2019-03-22 16:25:11 +00:00
|
|
|
from __future__ import print_function
|
|
|
|
|
2013-02-27 17:14:58 +00:00
|
|
|
import os.path, re, urllib, time, cgi
|
2017-01-19 22:39:53 +00:00
|
|
|
import json
|
2017-02-22 00:06:14 +00:00
|
|
|
import treq
|
2012-04-05 23:57:23 +00:00
|
|
|
|
2019-11-05 09:24:59 +00:00
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
|
2007-07-07 02:43:55 +00:00
|
|
|
from twisted.application import service
|
2017-07-24 20:51:58 +00:00
|
|
|
from twisted.internet import defer
|
2017-07-25 15:11:38 +00:00
|
|
|
from twisted.internet.defer import inlineCallbacks, returnValue, maybeDeferred
|
Change OphandleTable to use a deterministic clock, so we can test it
To test the changes for #577, we need a deterministic way to simulate
the passage of long periods of time. twisted.internet.task.Clock seems,
from my Googling, to be the way to go for this functionality. I changed
a few things so that OphandleTable would use twisted.internet.task.Clock
when testing:
* WebishServer.__init___ now takes an optional 'clock' parameter,
* which it passes to the root.Root instance it creates.
* root.Root.__init__ now takes an optional 'clock' parameter, which it
passes to the OphandleTable.__init__ method.
* OphandleTable.__init__ now takes an optional 'clock' parameter. If
it is provided, and it isn't None, its callLater method will be used
to schedule ophandle expirations (as opposed to using
reactor.callLater, which is what OphandleTable does normally).
* The WebMixin object in test_web.py now sets a self.clock parameter,
which is a twisted.internet.task.Clock that it feeds to the
WebishServer it creates.
Tests using the WebMixin can control the passage of time in
OphandleTable by accessing self.clock.
2010-02-20 21:07:13 +00:00
|
|
|
from twisted.internet.task import Clock
|
2016-08-04 00:46:12 +00:00
|
|
|
from twisted.web import client, error, http
|
2007-07-08 03:06:58 +00:00
|
|
|
from twisted.python import failure, log
|
2012-04-05 23:57:23 +00:00
|
|
|
|
2017-07-25 15:11:38 +00:00
|
|
|
from nevow.context import WebContext
|
|
|
|
from nevow.inevow import (
|
|
|
|
ICanHandleException,
|
|
|
|
IRequest,
|
|
|
|
IData,
|
|
|
|
)
|
2012-10-25 00:01:25 +00:00
|
|
|
from nevow.util import escapeToXML
|
2017-07-25 15:11:38 +00:00
|
|
|
from nevow.loaders import stan
|
|
|
|
from nevow.testutil import FakeRequest
|
|
|
|
from nevow.appserver import (
|
|
|
|
processingFailed,
|
|
|
|
DefaultExceptionHandler,
|
|
|
|
)
|
2012-04-05 23:57:23 +00:00
|
|
|
|
2016-08-04 00:46:12 +00:00
|
|
|
from allmydata import interfaces, uri, webish
|
2012-06-12 21:01:24 +00:00
|
|
|
from allmydata.storage_client import StorageFarmBroker, StubServer
|
2010-08-04 07:27:10 +00:00
|
|
|
from allmydata.immutable import upload
|
|
|
|
from allmydata.immutable.downloader.status import DownloadStatus
|
2009-10-13 02:34:44 +00:00
|
|
|
from allmydata.dirnode import DirectoryNode
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
from allmydata.nodemaker import NodeMaker
|
2016-08-04 00:46:12 +00:00
|
|
|
from allmydata.web import status
|
2017-07-25 15:11:38 +00:00
|
|
|
from allmydata.web.common import WebError, MultiFormatPage
|
2011-06-29 22:25:55 +00:00
|
|
|
from allmydata.util import fileutil, base32, hashutil
|
2009-12-01 22:44:35 +00:00
|
|
|
from allmydata.util.consumer import download_to_data
|
2010-07-18 14:29:15 +00:00
|
|
|
from allmydata.util.encodingutil import to_str
|
2016-12-08 23:15:49 +00:00
|
|
|
from ...util.connection_status import ConnectionStatus
|
2019-08-19 20:09:26 +00:00
|
|
|
from ..common import (
|
|
|
|
EMPTY_CLIENT_CONFIG,
|
|
|
|
FakeCHKFileNode,
|
|
|
|
FakeMutableFileNode,
|
|
|
|
create_chk_filenode,
|
|
|
|
WebErrorMixin,
|
|
|
|
make_mutable_file_uri,
|
|
|
|
create_mutable_filenode,
|
2020-01-31 18:27:17 +00:00
|
|
|
TrialTestCase,
|
2019-08-19 20:09:26 +00:00
|
|
|
)
|
2019-11-05 09:24:59 +00:00
|
|
|
from .common import (
|
|
|
|
assert_soup_has_favicon,
|
|
|
|
assert_soup_has_text,
|
2019-12-23 01:38:22 +00:00
|
|
|
assert_soup_has_tag_with_attributes,
|
2020-04-30 14:09:15 +00:00
|
|
|
assert_soup_has_tag_with_content,
|
|
|
|
assert_soup_has_tag_with_attributes_and_content,
|
2019-11-05 09:24:59 +00:00
|
|
|
)
|
2019-12-23 13:20:55 +00:00
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
from allmydata.interfaces import IMutableFileNode, SDMF_VERSION, MDMF_VERSION
|
2008-04-17 20:02:22 +00:00
|
|
|
from allmydata.mutable import servermap, publish, retrieve
|
2016-08-04 00:46:12 +00:00
|
|
|
from .. import common_util as testutil
|
2017-07-25 15:11:38 +00:00
|
|
|
from ..common_web import (
|
|
|
|
do_http,
|
|
|
|
Error,
|
|
|
|
)
|
2017-09-06 01:08:35 +00:00
|
|
|
from allmydata.client import _Client, SecretHolder
|
2016-08-04 00:46:12 +00:00
|
|
|
from .common import unknown_rwcap, unknown_rocap, unknown_immcap, FAVICON_MARKUP
|
2018-05-23 17:59:42 +00:00
|
|
|
|
2007-07-07 02:43:55 +00:00
|
|
|
# create a fake uploader/downloader, and a couple of fake dirnodes, then
|
|
|
|
# create a webserver that works against them
|
|
|
|
|
2019-05-15 06:17:44 +00:00
|
|
|
class FakeStatsProvider(object):
|
2009-02-20 19:15:54 +00:00
|
|
|
def get_stats(self):
|
|
|
|
stats = {'stats': {}, 'counters': {}}
|
|
|
|
return stats
|
|
|
|
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
class FakeNodeMaker(NodeMaker):
|
2011-08-07 00:43:48 +00:00
|
|
|
encoding_params = {
|
|
|
|
'k': 3,
|
|
|
|
'n': 10,
|
|
|
|
'happy': 7,
|
|
|
|
'max_segment_size':128*1024 # 1024=KiB
|
|
|
|
}
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
def _create_lit(self, cap):
|
2012-05-22 22:18:26 +00:00
|
|
|
return FakeCHKFileNode(cap, self.all_contents)
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
def _create_immutable(self, cap):
|
2012-05-22 22:18:26 +00:00
|
|
|
return FakeCHKFileNode(cap, self.all_contents)
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
def _create_mutable(self, cap):
|
2012-05-22 22:18:26 +00:00
|
|
|
return FakeMutableFileNode(None, None,
|
|
|
|
self.encoding_params, None,
|
|
|
|
self.all_contents).init_from_cap(cap)
|
2011-08-07 00:43:48 +00:00
|
|
|
def create_mutable_file(self, contents="", keysize=None,
|
|
|
|
version=SDMF_VERSION):
|
2012-05-22 22:18:26 +00:00
|
|
|
n = FakeMutableFileNode(None, None, self.encoding_params, None,
|
|
|
|
self.all_contents)
|
2011-08-07 00:43:48 +00:00
|
|
|
return n.create(contents, version=version)
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
|
2009-10-12 22:28:08 +00:00
|
|
|
class FakeUploader(service.Service):
|
|
|
|
name = "uploader"
|
2012-12-29 04:17:00 +00:00
|
|
|
helper_furl = None
|
|
|
|
helper_connected = False
|
|
|
|
|
2015-11-12 23:16:28 +00:00
|
|
|
def upload(self, uploadable, **kw):
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
d = uploadable.get_size()
|
|
|
|
d.addCallback(lambda size: uploadable.read(size))
|
|
|
|
def _got_data(datav):
|
|
|
|
data = "".join(datav)
|
2012-05-22 22:18:26 +00:00
|
|
|
n = create_chk_filenode(data, self.all_contents)
|
2012-05-22 04:14:14 +00:00
|
|
|
ur = upload.UploadResults(file_size=len(data),
|
|
|
|
ciphertext_fetched=0,
|
|
|
|
preexisting_shares=0,
|
|
|
|
pushed_shares=10,
|
|
|
|
sharemap={},
|
|
|
|
servermap={},
|
|
|
|
timings={},
|
|
|
|
uri_extension_data={},
|
|
|
|
uri_extension_hash="fake",
|
|
|
|
verifycapstr="fakevcap")
|
|
|
|
ur.set_uri(n.get_uri())
|
|
|
|
return ur
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
d.addCallback(_got_data)
|
|
|
|
return d
|
2012-12-29 04:17:00 +00:00
|
|
|
|
2009-10-12 22:28:08 +00:00
|
|
|
def get_helper_info(self):
|
2012-12-29 04:17:00 +00:00
|
|
|
return (self.helper_furl, self.helper_connected)
|
|
|
|
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
|
2010-08-09 22:03:42 +00:00
|
|
|
def build_one_ds():
|
|
|
|
ds = DownloadStatus("storage_index", 1234)
|
|
|
|
now = time.time()
|
|
|
|
|
2012-06-12 21:01:24 +00:00
|
|
|
serverA = StubServer(hashutil.tagged_hash("foo", "serverid_a")[:20])
|
|
|
|
serverB = StubServer(hashutil.tagged_hash("foo", "serverid_b")[:20])
|
2011-06-29 22:25:55 +00:00
|
|
|
storage_index = hashutil.storage_index_hash("SI")
|
|
|
|
e0 = ds.add_segment_request(0, now)
|
|
|
|
e0.activate(now+0.5)
|
|
|
|
e0.deliver(now+1, 0, 100, 0.5) # when, start,len, decodetime
|
|
|
|
e1 = ds.add_segment_request(1, now+2)
|
|
|
|
e1.error(now+3)
|
2010-08-10 06:06:03 +00:00
|
|
|
# two outstanding requests
|
2011-06-29 22:25:55 +00:00
|
|
|
e2 = ds.add_segment_request(2, now+4)
|
|
|
|
e3 = ds.add_segment_request(3, now+5)
|
|
|
|
del e2,e3 # hush pyflakes
|
2010-08-09 22:03:42 +00:00
|
|
|
|
2010-08-15 14:19:33 +00:00
|
|
|
# simulate a segment which gets delivered faster than a system clock tick (ticket #1166)
|
2011-06-29 22:25:55 +00:00
|
|
|
e = ds.add_segment_request(4, now)
|
|
|
|
e.activate(now)
|
|
|
|
e.deliver(now, 0, 140, 0.5)
|
2010-08-15 14:19:33 +00:00
|
|
|
|
2011-08-01 18:54:01 +00:00
|
|
|
e = ds.add_dyhb_request(serverA, now)
|
2010-08-09 22:03:42 +00:00
|
|
|
e.finished([1,2], now+1)
|
2011-08-01 18:54:01 +00:00
|
|
|
e = ds.add_dyhb_request(serverB, now+2) # left unfinished
|
2010-08-09 22:03:42 +00:00
|
|
|
|
|
|
|
e = ds.add_read_event(0, 120, now)
|
|
|
|
e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
|
|
|
|
e.finished(now+1)
|
|
|
|
e = ds.add_read_event(120, 30, now+2) # left unfinished
|
|
|
|
|
2011-08-01 18:54:01 +00:00
|
|
|
e = ds.add_block_request(serverA, 1, 100, 20, now)
|
2010-08-09 22:03:42 +00:00
|
|
|
e.finished(20, now+1)
|
2011-08-01 18:54:01 +00:00
|
|
|
e = ds.add_block_request(serverB, 1, 120, 30, now+1) # left unfinished
|
2010-08-09 22:03:42 +00:00
|
|
|
|
|
|
|
# make sure that add_read_event() can come first too
|
2011-06-29 22:25:55 +00:00
|
|
|
ds1 = DownloadStatus(storage_index, 1234)
|
2010-08-09 22:03:42 +00:00
|
|
|
e = ds1.add_read_event(0, 120, now)
|
|
|
|
e.update(60, 0.5, 0.1) # bytes, decrypttime, pausetime
|
|
|
|
e.finished(now+1)
|
|
|
|
|
|
|
|
return ds
|
|
|
|
|
2019-05-15 06:17:44 +00:00
|
|
|
class FakeHistory(object):
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
_all_upload_status = [upload.UploadStatus()]
|
2010-08-09 22:03:42 +00:00
|
|
|
_all_download_status = [build_one_ds()]
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
_all_mapupdate_statuses = [servermap.UpdateStatus()]
|
|
|
|
_all_publish_statuses = [publish.PublishStatus()]
|
|
|
|
_all_retrieve_statuses = [retrieve.RetrieveStatus()]
|
|
|
|
|
|
|
|
def list_all_upload_statuses(self):
|
|
|
|
return self._all_upload_status
|
|
|
|
def list_all_download_statuses(self):
|
|
|
|
return self._all_download_status
|
|
|
|
def list_all_mapupdate_statuses(self):
|
|
|
|
return self._all_mapupdate_statuses
|
|
|
|
def list_all_publish_statuses(self):
|
|
|
|
return self._all_publish_statuses
|
|
|
|
def list_all_retrieve_statuses(self):
|
|
|
|
return self._all_retrieve_statuses
|
|
|
|
def list_all_helper_statuses(self):
|
|
|
|
return []
|
|
|
|
|
2012-12-27 05:08:53 +00:00
|
|
|
class FakeDisplayableServer(StubServer):
|
2016-01-04 19:58:55 +00:00
|
|
|
def __init__(self, serverid, nickname, connected,
|
|
|
|
last_connect_time, last_loss_time, last_rx_time):
|
2012-12-27 05:08:53 +00:00
|
|
|
StubServer.__init__(self, serverid)
|
2016-03-25 19:16:01 +00:00
|
|
|
self.announcement = {"my-version": "tahoe-lafs-fake",
|
2012-12-27 05:08:53 +00:00
|
|
|
"service-name": "storage",
|
|
|
|
"nickname": nickname}
|
2016-01-04 19:58:55 +00:00
|
|
|
self.connected = connected
|
|
|
|
self.last_loss_time = last_loss_time
|
|
|
|
self.last_rx_time = last_rx_time
|
|
|
|
self.last_connect_time = last_connect_time
|
2017-07-27 20:31:41 +00:00
|
|
|
|
2016-12-08 23:22:49 +00:00
|
|
|
def on_status_changed(self, cb): # TODO: try to remove me
|
2016-04-26 17:44:58 +00:00
|
|
|
cb(self)
|
2016-12-08 23:22:49 +00:00
|
|
|
def is_connected(self): # TODO: remove me
|
2016-01-04 19:58:55 +00:00
|
|
|
return self.connected
|
2017-01-20 08:49:35 +00:00
|
|
|
def get_version(self):
|
|
|
|
return {
|
2017-02-23 00:50:53 +00:00
|
|
|
"application-version": "1.0"
|
2017-01-20 08:49:35 +00:00
|
|
|
}
|
2012-12-27 05:08:53 +00:00
|
|
|
def get_permutation_seed(self):
|
|
|
|
return ""
|
|
|
|
def get_announcement(self):
|
|
|
|
return self.announcement
|
|
|
|
def get_nickname(self):
|
|
|
|
return self.announcement["nickname"]
|
2014-11-20 22:46:20 +00:00
|
|
|
def get_available_space(self):
|
2014-11-22 02:12:36 +00:00
|
|
|
return 123456
|
2016-12-08 23:15:49 +00:00
|
|
|
def get_connection_status(self):
|
2016-12-10 00:35:46 +00:00
|
|
|
return ConnectionStatus(self.connected, "summary", {},
|
2016-12-08 23:15:49 +00:00
|
|
|
self.last_connect_time, self.last_rx_time)
|
2012-12-27 05:08:53 +00:00
|
|
|
|
|
|
|
class FakeBucketCounter(object):
|
|
|
|
def get_state(self):
|
|
|
|
return {"last-complete-bucket-count": 0}
|
|
|
|
def get_progress(self):
|
|
|
|
return {"estimated-time-per-cycle": 0,
|
|
|
|
"cycle-in-progress": False,
|
|
|
|
"remaining-wait-time": 0}
|
|
|
|
|
|
|
|
class FakeLeaseChecker(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.expiration_enabled = False
|
|
|
|
self.mode = "age"
|
|
|
|
self.override_lease_duration = None
|
|
|
|
self.sharetypes_to_expire = {}
|
|
|
|
def get_state(self):
|
|
|
|
return {"history": None}
|
|
|
|
def get_progress(self):
|
|
|
|
return {"estimated-time-per-cycle": 0,
|
|
|
|
"cycle-in-progress": False,
|
|
|
|
"remaining-wait-time": 0}
|
|
|
|
|
|
|
|
class FakeStorageServer(service.MultiService):
|
|
|
|
name = 'storage'
|
|
|
|
def __init__(self, nodeid, nickname):
|
|
|
|
service.MultiService.__init__(self)
|
|
|
|
self.my_nodeid = nodeid
|
|
|
|
self.nickname = nickname
|
|
|
|
self.bucket_counter = FakeBucketCounter()
|
|
|
|
self.lease_checker = FakeLeaseChecker()
|
|
|
|
def get_stats(self):
|
|
|
|
return {"storage_server.accepting_immutable_shares": False}
|
2016-04-26 17:44:58 +00:00
|
|
|
def on_status_changed(self, cb):
|
|
|
|
cb(self)
|
2012-12-27 05:08:53 +00:00
|
|
|
|
2017-09-06 01:08:35 +00:00
|
|
|
class FakeClient(_Client):
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
def __init__(self):
|
2009-10-12 22:28:08 +00:00
|
|
|
# don't upcall to Client.__init__, since we only want to initialize a
|
|
|
|
# minimal subset
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
service.MultiService.__init__(self)
|
2012-05-22 22:18:26 +00:00
|
|
|
self.all_contents = {}
|
2009-10-12 22:28:08 +00:00
|
|
|
self.nodeid = "fake_nodeid"
|
2012-12-27 05:08:53 +00:00
|
|
|
self.nickname = u"fake_nickname \u263A"
|
2016-09-12 23:01:23 +00:00
|
|
|
self.introducer_furls = []
|
|
|
|
self.introducer_clients = []
|
2009-10-12 22:28:08 +00:00
|
|
|
self.stats_provider = FakeStatsProvider()
|
2009-11-18 01:54:44 +00:00
|
|
|
self._secret_holder = SecretHolder("lease secret", "convergence secret")
|
2009-10-12 22:28:08 +00:00
|
|
|
self.helper = None
|
|
|
|
self.convergence = "some random string"
|
2019-08-19 20:09:26 +00:00
|
|
|
self.storage_broker = StorageFarmBroker(
|
|
|
|
permute_peers=True,
|
|
|
|
tub_maker=None,
|
|
|
|
node_config=EMPTY_CLIENT_CONFIG,
|
|
|
|
)
|
2012-12-27 05:08:53 +00:00
|
|
|
# fake knowledge of another server
|
|
|
|
self.storage_broker.test_add_server("other_nodeid",
|
2016-01-04 19:58:55 +00:00
|
|
|
FakeDisplayableServer(
|
|
|
|
serverid="other_nodeid", nickname=u"other_nickname \u263B", connected = True,
|
|
|
|
last_connect_time = 10, last_loss_time = 20, last_rx_time = 30))
|
|
|
|
self.storage_broker.test_add_server("disconnected_nodeid",
|
|
|
|
FakeDisplayableServer(
|
2017-07-25 15:56:42 +00:00
|
|
|
serverid="disconnected_nodeid", nickname=u"disconnected_nickname \u263B", connected = False,
|
2016-12-08 23:15:49 +00:00
|
|
|
last_connect_time = None, last_loss_time = 25, last_rx_time = 35))
|
2009-10-12 22:28:08 +00:00
|
|
|
self.introducer_client = None
|
|
|
|
self.history = FakeHistory()
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
self.uploader = FakeUploader()
|
2012-05-22 22:18:26 +00:00
|
|
|
self.uploader.all_contents = self.all_contents
|
2009-10-12 22:28:08 +00:00
|
|
|
self.uploader.setServiceParent(self)
|
2011-08-24 15:59:28 +00:00
|
|
|
self.blacklist = None
|
2009-11-18 01:54:44 +00:00
|
|
|
self.nodemaker = FakeNodeMaker(None, self._secret_holder, None,
|
2010-08-04 07:27:10 +00:00
|
|
|
self.uploader, None,
|
2011-10-13 16:37:13 +00:00
|
|
|
None, None, None)
|
2012-05-22 22:18:26 +00:00
|
|
|
self.nodemaker.all_contents = self.all_contents
|
2011-08-07 00:43:48 +00:00
|
|
|
self.mutable_file_default = SDMF_VERSION
|
2012-12-27 05:08:53 +00:00
|
|
|
self.addService(FakeStorageServer(self.nodeid, self.nickname))
|
2008-03-01 06:03:00 +00:00
|
|
|
|
2013-04-14 21:32:13 +00:00
|
|
|
def get_long_nodeid(self):
|
|
|
|
return "v0-nodeid"
|
|
|
|
def get_long_tubid(self):
|
|
|
|
return "tubid"
|
|
|
|
|
2017-08-23 21:34:11 +00:00
|
|
|
def get_auth_token(self):
|
|
|
|
return 'a fake debug auth token'
|
|
|
|
|
2009-10-12 22:28:08 +00:00
|
|
|
def startService(self):
|
|
|
|
return service.MultiService.startService(self)
|
|
|
|
def stopService(self):
|
|
|
|
return service.MultiService.stopService(self)
|
2007-12-03 21:52:42 +00:00
|
|
|
|
2008-06-03 07:03:16 +00:00
|
|
|
MUTABLE_SIZELIMIT = FakeMutableFileNode.MUTABLE_SIZELIMIT
|
2007-12-05 06:01:37 +00:00
|
|
|
|
2016-01-04 19:58:55 +00:00
|
|
|
class WebMixin(testutil.TimezoneMixin):
|
2007-07-07 02:43:55 +00:00
|
|
|
def setUp(self):
|
2016-01-04 19:58:55 +00:00
|
|
|
self.setTimezone('UTC-13:00')
|
2007-12-07 00:17:02 +00:00
|
|
|
self.s = FakeClient()
|
2007-07-07 02:43:55 +00:00
|
|
|
self.s.startService()
|
2008-10-29 22:34:31 +00:00
|
|
|
self.staticdir = self.mktemp()
|
Change OphandleTable to use a deterministic clock, so we can test it
To test the changes for #577, we need a deterministic way to simulate
the passage of long periods of time. twisted.internet.task.Clock seems,
from my Googling, to be the way to go for this functionality. I changed
a few things so that OphandleTable would use twisted.internet.task.Clock
when testing:
* WebishServer.__init___ now takes an optional 'clock' parameter,
* which it passes to the root.Root instance it creates.
* root.Root.__init__ now takes an optional 'clock' parameter, which it
passes to the OphandleTable.__init__ method.
* OphandleTable.__init__ now takes an optional 'clock' parameter. If
it is provided, and it isn't None, its callLater method will be used
to schedule ophandle expirations (as opposed to using
reactor.callLater, which is what OphandleTable does normally).
* The WebMixin object in test_web.py now sets a self.clock parameter,
which is a twisted.internet.task.Clock that it feeds to the
WebishServer it creates.
Tests using the WebMixin can control the passage of time in
OphandleTable by accessing self.clock.
2010-02-20 21:07:13 +00:00
|
|
|
self.clock = Clock()
|
2016-01-04 19:58:55 +00:00
|
|
|
self.fakeTime = 86460 # 1d 0h 1m 0s
|
Change OphandleTable to use a deterministic clock, so we can test it
To test the changes for #577, we need a deterministic way to simulate
the passage of long periods of time. twisted.internet.task.Clock seems,
from my Googling, to be the way to go for this functionality. I changed
a few things so that OphandleTable would use twisted.internet.task.Clock
when testing:
* WebishServer.__init___ now takes an optional 'clock' parameter,
* which it passes to the root.Root instance it creates.
* root.Root.__init__ now takes an optional 'clock' parameter, which it
passes to the OphandleTable.__init__ method.
* OphandleTable.__init__ now takes an optional 'clock' parameter. If
it is provided, and it isn't None, its callLater method will be used
to schedule ophandle expirations (as opposed to using
reactor.callLater, which is what OphandleTable does normally).
* The WebMixin object in test_web.py now sets a self.clock parameter,
which is a twisted.internet.task.Clock that it feeds to the
WebishServer it creates.
Tests using the WebMixin can control the passage of time in
OphandleTable by accessing self.clock.
2010-02-20 21:07:13 +00:00
|
|
|
self.ws = webish.WebishServer(self.s, "0", staticdir=self.staticdir,
|
2016-01-04 19:58:55 +00:00
|
|
|
clock=self.clock, now_fn=lambda:self.fakeTime)
|
2009-10-12 22:28:08 +00:00
|
|
|
self.ws.setServiceParent(self.s)
|
2011-01-17 07:47:51 +00:00
|
|
|
self.webish_port = self.ws.getPortnum()
|
|
|
|
self.webish_url = self.ws.getURL()
|
|
|
|
assert self.webish_url.endswith("/")
|
|
|
|
self.webish_url = self.webish_url[:-1] # these tests add their own /
|
2007-07-07 02:43:55 +00:00
|
|
|
|
2009-10-12 22:45:06 +00:00
|
|
|
l = [ self.s.create_dirnode() for x in range(6) ]
|
2007-12-03 21:52:42 +00:00
|
|
|
d = defer.DeferredList(l)
|
|
|
|
def _then(res):
|
|
|
|
self.public_root = res[0][1]
|
|
|
|
assert interfaces.IDirectoryNode.providedBy(self.public_root), res
|
|
|
|
self.public_url = "/uri/" + self.public_root.get_uri()
|
|
|
|
self.private_root = res[1][1]
|
|
|
|
|
|
|
|
foo = res[2][1]
|
|
|
|
self._foo_node = foo
|
|
|
|
self._foo_uri = foo.get_uri()
|
|
|
|
self._foo_readonly_uri = foo.get_readonly_uri()
|
2009-02-04 02:22:48 +00:00
|
|
|
self._foo_verifycap = foo.get_verify_cap().to_string()
|
2007-12-05 06:01:37 +00:00
|
|
|
# NOTE: we ignore the deferred on all set_uri() calls, because we
|
|
|
|
# know the fake nodes do these synchronously
|
2009-10-12 23:51:26 +00:00
|
|
|
self.public_root.set_uri(u"foo", foo.get_uri(),
|
|
|
|
foo.get_readonly_uri())
|
2007-12-03 21:52:42 +00:00
|
|
|
|
2007-12-05 06:01:37 +00:00
|
|
|
self.BAR_CONTENTS, n, self._bar_txt_uri = self.makefile(0)
|
2009-10-12 23:51:26 +00:00
|
|
|
foo.set_uri(u"bar.txt", self._bar_txt_uri, self._bar_txt_uri)
|
2009-02-04 02:22:48 +00:00
|
|
|
self._bar_txt_verifycap = n.get_verify_cap().to_string()
|
2008-02-12 02:14:10 +00:00
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
# sdmf
|
|
|
|
# XXX: Do we ever use this?
|
|
|
|
self.BAZ_CONTENTS, n, self._baz_txt_uri, self._baz_txt_readonly_uri = self.makefile_mutable(0)
|
|
|
|
|
|
|
|
foo.set_uri(u"baz.txt", self._baz_txt_uri, self._baz_txt_readonly_uri)
|
|
|
|
|
|
|
|
# mdmf
|
|
|
|
self.QUUX_CONTENTS, n, self._quux_txt_uri, self._quux_txt_readonly_uri = self.makefile_mutable(0, mdmf=True)
|
|
|
|
assert self._quux_txt_uri.startswith("URI:MDMF")
|
|
|
|
foo.set_uri(u"quux.txt", self._quux_txt_uri, self._quux_txt_readonly_uri)
|
|
|
|
|
2009-10-12 23:51:26 +00:00
|
|
|
foo.set_uri(u"empty", res[3][1].get_uri(),
|
|
|
|
res[3][1].get_readonly_uri())
|
2007-12-03 21:52:42 +00:00
|
|
|
sub_uri = res[4][1].get_uri()
|
2008-06-18 02:49:40 +00:00
|
|
|
self._sub_uri = sub_uri
|
2009-10-12 23:51:26 +00:00
|
|
|
foo.set_uri(u"sub", sub_uri, sub_uri)
|
2007-12-05 06:01:37 +00:00
|
|
|
sub = self.s.create_node_from_uri(sub_uri)
|
2011-11-10 08:00:11 +00:00
|
|
|
self._sub_node = sub
|
2007-12-03 21:52:42 +00:00
|
|
|
|
2007-12-05 06:01:37 +00:00
|
|
|
_ign, n, blocking_uri = self.makefile(1)
|
2009-10-12 23:51:26 +00:00
|
|
|
foo.set_uri(u"blockingfile", blocking_uri, blocking_uri)
|
2008-02-14 22:45:56 +00:00
|
|
|
|
2012-10-25 00:01:25 +00:00
|
|
|
# filenode to test for html encoding issues
|
|
|
|
self._htmlname_unicode = u"<&weirdly'named\"file>>>_<iframe />.txt"
|
|
|
|
self._htmlname_raw = self._htmlname_unicode.encode('utf-8')
|
|
|
|
self._htmlname_urlencoded = urllib.quote(self._htmlname_raw, '')
|
|
|
|
self._htmlname_escaped = escapeToXML(self._htmlname_raw)
|
2013-02-27 17:14:58 +00:00
|
|
|
self._htmlname_escaped_attr = cgi.escape(self._htmlname_raw, quote=True)
|
|
|
|
self._htmlname_escaped_double = escapeToXML(cgi.escape(self._htmlname_raw, quote=True))
|
2012-10-25 00:01:25 +00:00
|
|
|
self.HTMLNAME_CONTENTS, n, self._htmlname_txt_uri = self.makefile(0)
|
|
|
|
foo.set_uri(self._htmlname_unicode, self._htmlname_txt_uri, self._htmlname_txt_uri)
|
|
|
|
|
2008-02-14 22:45:56 +00:00
|
|
|
unicode_filename = u"n\u00fc.txt" # n u-umlaut . t x t
|
|
|
|
# ok, unicode calls it LATIN SMALL LETTER U WITH DIAERESIS but I
|
|
|
|
# still think of it as an umlaut
|
2009-10-12 23:51:26 +00:00
|
|
|
foo.set_uri(unicode_filename, self._bar_txt_uri, self._bar_txt_uri)
|
2007-12-03 21:52:42 +00:00
|
|
|
|
2011-11-10 08:00:11 +00:00
|
|
|
self.SUBBAZ_CONTENTS, n, baz_file = self.makefile(2)
|
2009-01-23 05:01:36 +00:00
|
|
|
self._baz_file_uri = baz_file
|
2009-10-12 23:51:26 +00:00
|
|
|
sub.set_uri(u"baz.txt", baz_file, baz_file)
|
2007-12-03 21:52:42 +00:00
|
|
|
|
2007-12-05 06:01:37 +00:00
|
|
|
_ign, n, self._bad_file_uri = self.makefile(3)
|
|
|
|
# this uri should not be downloadable
|
2012-05-22 22:18:26 +00:00
|
|
|
del self.s.all_contents[self._bad_file_uri]
|
2007-12-03 21:52:42 +00:00
|
|
|
|
|
|
|
rodir = res[5][1]
|
2009-10-12 23:51:26 +00:00
|
|
|
self.public_root.set_uri(u"reedownlee", rodir.get_readonly_uri(),
|
|
|
|
rodir.get_readonly_uri())
|
|
|
|
rodir.set_uri(u"nor", baz_file, baz_file)
|
2007-12-03 21:52:42 +00:00
|
|
|
|
|
|
|
# public/
|
|
|
|
# public/foo/
|
|
|
|
# public/foo/bar.txt
|
2011-08-07 00:43:48 +00:00
|
|
|
# public/foo/baz.txt
|
|
|
|
# public/foo/quux.txt
|
2007-12-03 21:52:42 +00:00
|
|
|
# public/foo/blockingfile
|
2012-10-25 00:01:25 +00:00
|
|
|
# public/foo/<&weirdly'named\"file>>>_<iframe />.txt
|
2007-12-03 21:52:42 +00:00
|
|
|
# public/foo/empty/
|
|
|
|
# public/foo/sub/
|
|
|
|
# public/foo/sub/baz.txt
|
|
|
|
# public/reedownlee/
|
|
|
|
# public/reedownlee/nor
|
|
|
|
self.NEWFILE_CONTENTS = "newfile contents\n"
|
2008-02-12 02:14:10 +00:00
|
|
|
|
2008-02-14 22:45:56 +00:00
|
|
|
return foo.get_metadata_for(u"bar.txt")
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(_then)
|
2008-02-12 02:14:10 +00:00
|
|
|
def _got_metadata(metadata):
|
|
|
|
self._bar_txt_metadata = metadata
|
|
|
|
d.addCallback(_got_metadata)
|
2007-12-03 21:52:42 +00:00
|
|
|
return d
|
2007-07-07 02:43:55 +00:00
|
|
|
|
2012-05-22 22:18:26 +00:00
|
|
|
def get_all_contents(self):
|
|
|
|
return self.s.all_contents
|
|
|
|
|
2007-07-07 07:16:36 +00:00
|
|
|
def makefile(self, number):
|
2007-12-05 06:01:37 +00:00
|
|
|
contents = "contents of file %s\n" % number
|
2012-05-22 22:18:26 +00:00
|
|
|
n = create_chk_filenode(contents, self.get_all_contents())
|
2007-12-05 06:01:37 +00:00
|
|
|
return contents, n, n.get_uri()
|
2007-07-12 23:17:49 +00:00
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def makefile_mutable(self, number, mdmf=False):
|
|
|
|
contents = "contents of mutable file %s\n" % number
|
2012-05-22 22:18:26 +00:00
|
|
|
n = create_mutable_filenode(contents, mdmf, self.s.all_contents)
|
2011-08-07 00:43:48 +00:00
|
|
|
return contents, n, n.get_uri(), n.get_readonly_uri()
|
|
|
|
|
2007-07-07 02:43:55 +00:00
|
|
|
def tearDown(self):
|
|
|
|
return self.s.stopService()
|
|
|
|
|
|
|
|
def failUnlessIsBarDotTxt(self, res):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, self.BAR_CONTENTS, res)
|
2007-07-07 02:43:55 +00:00
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def failUnlessIsQuuxDotTxt(self, res):
|
|
|
|
self.failUnlessReallyEqual(res, self.QUUX_CONTENTS, res)
|
|
|
|
|
|
|
|
def failUnlessIsBazDotTxt(self, res):
|
|
|
|
self.failUnlessReallyEqual(res, self.BAZ_CONTENTS, res)
|
|
|
|
|
2011-11-10 08:00:11 +00:00
|
|
|
def failUnlessIsSubBazDotTxt(self, res):
|
|
|
|
self.failUnlessReallyEqual(res, self.SUBBAZ_CONTENTS, res)
|
|
|
|
|
2007-07-08 07:17:11 +00:00
|
|
|
def failUnlessIsBarJSON(self, res):
|
2017-01-19 22:39:53 +00:00
|
|
|
data = json.loads(res)
|
2007-07-08 07:17:11 +00:00
|
|
|
self.failUnless(isinstance(data, list))
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessEqual(data[0], "filenode")
|
2007-07-08 07:17:11 +00:00
|
|
|
self.failUnless(isinstance(data[1], dict))
|
2008-05-20 22:40:49 +00:00
|
|
|
self.failIf(data[1]["mutable"])
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failIfIn("rw_uri", data[1]) # immutable
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._bar_txt_uri)
|
|
|
|
self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._bar_txt_verifycap)
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(data[1]["size"], len(self.BAR_CONTENTS))
|
2007-07-08 07:17:11 +00:00
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def failUnlessIsQuuxJSON(self, res, readonly=False):
|
2017-01-19 22:39:53 +00:00
|
|
|
data = json.loads(res)
|
2011-08-07 00:43:48 +00:00
|
|
|
self.failUnless(isinstance(data, list))
|
|
|
|
self.failUnlessEqual(data[0], "filenode")
|
|
|
|
self.failUnless(isinstance(data[1], dict))
|
|
|
|
metadata = data[1]
|
|
|
|
return self.failUnlessIsQuuxDotTxtMetadata(metadata, readonly)
|
|
|
|
|
|
|
|
def failUnlessIsQuuxDotTxtMetadata(self, metadata, readonly):
|
|
|
|
self.failUnless(metadata['mutable'])
|
|
|
|
if readonly:
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failIfIn("rw_uri", metadata)
|
2011-08-07 00:43:48 +00:00
|
|
|
else:
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("rw_uri", metadata)
|
2011-08-07 00:43:48 +00:00
|
|
|
self.failUnlessEqual(metadata['rw_uri'], self._quux_txt_uri)
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("ro_uri", metadata)
|
2011-08-07 00:43:48 +00:00
|
|
|
self.failUnlessEqual(metadata['ro_uri'], self._quux_txt_readonly_uri)
|
|
|
|
self.failUnlessReallyEqual(metadata['size'], len(self.QUUX_CONTENTS))
|
|
|
|
|
2007-07-08 03:06:58 +00:00
|
|
|
def failUnlessIsFooJSON(self, res):
|
2017-01-19 22:39:53 +00:00
|
|
|
data = json.loads(res)
|
2007-07-08 07:17:11 +00:00
|
|
|
self.failUnless(isinstance(data, list))
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessEqual(data[0], "dirnode", res)
|
2007-07-08 07:17:11 +00:00
|
|
|
self.failUnless(isinstance(data[1], dict))
|
2008-05-20 22:40:49 +00:00
|
|
|
self.failUnless(data[1]["mutable"])
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("rw_uri", data[1]) # mutable
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(data[1]["rw_uri"]), self._foo_uri)
|
|
|
|
self.failUnlessReallyEqual(to_str(data[1]["ro_uri"]), self._foo_readonly_uri)
|
|
|
|
self.failUnlessReallyEqual(to_str(data[1]["verify_uri"]), self._foo_verifycap)
|
2007-12-04 04:37:54 +00:00
|
|
|
|
2008-09-30 22:21:06 +00:00
|
|
|
kidnames = sorted([unicode(n) for n in data[1]["children"]])
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessEqual(kidnames,
|
2012-10-25 00:01:25 +00:00
|
|
|
[self._htmlname_unicode, u"bar.txt", u"baz.txt",
|
|
|
|
u"blockingfile", u"empty", u"n\u00fc.txt", u"quux.txt", u"sub"])
|
2008-09-30 22:21:06 +00:00
|
|
|
kids = dict( [(unicode(name),value)
|
|
|
|
for (name,value)
|
|
|
|
in data[1]["children"].iteritems()] )
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessEqual(kids[u"sub"][0], "dirnode")
|
2010-06-18 23:01:19 +00:00
|
|
|
self.failUnlessIn("metadata", kids[u"sub"][1])
|
|
|
|
self.failUnlessIn("tahoe", kids[u"sub"][1]["metadata"])
|
|
|
|
tahoe_md = kids[u"sub"][1]["metadata"]["tahoe"]
|
|
|
|
self.failUnlessIn("linkcrtime", tahoe_md)
|
|
|
|
self.failUnlessIn("linkmotime", tahoe_md)
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessEqual(kids[u"bar.txt"][0], "filenode")
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(kids[u"bar.txt"][1]["size"], len(self.BAR_CONTENTS))
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["ro_uri"]), self._bar_txt_uri)
|
|
|
|
self.failUnlessReallyEqual(to_str(kids[u"bar.txt"][1]["verify_uri"]),
|
2010-07-11 20:02:52 +00:00
|
|
|
self._bar_txt_verifycap)
|
2010-06-19 02:17:18 +00:00
|
|
|
self.failUnlessIn("metadata", kids[u"bar.txt"][1])
|
|
|
|
self.failUnlessIn("tahoe", kids[u"bar.txt"][1]["metadata"])
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(kids[u"bar.txt"][1]["metadata"]["tahoe"]["linkcrtime"],
|
|
|
|
self._bar_txt_metadata["tahoe"]["linkcrtime"])
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(kids[u"n\u00fc.txt"][1]["ro_uri"]),
|
2010-07-11 20:02:52 +00:00
|
|
|
self._bar_txt_uri)
|
2011-08-07 00:43:48 +00:00
|
|
|
self.failUnlessIn("quux.txt", kids)
|
2011-08-08 21:39:25 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(kids[u"quux.txt"][1]["rw_uri"]),
|
2011-08-07 00:43:48 +00:00
|
|
|
self._quux_txt_uri)
|
2011-08-08 21:39:25 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(kids[u"quux.txt"][1]["ro_uri"]),
|
2011-08-07 00:43:48 +00:00
|
|
|
self._quux_txt_readonly_uri)
|
2007-07-08 03:06:58 +00:00
|
|
|
|
2017-07-24 20:51:58 +00:00
|
|
|
@inlineCallbacks
|
2008-10-28 20:41:04 +00:00
|
|
|
def GET(self, urlpath, followRedirect=False, return_response=False,
|
|
|
|
**kwargs):
|
|
|
|
# if return_response=True, this fires with (data, statuscode,
|
|
|
|
# respheaders) instead of just data.
|
2017-07-24 20:51:58 +00:00
|
|
|
|
|
|
|
# treq can accept unicode URLs, unlike the old client.getPage
|
2007-07-07 02:43:55 +00:00
|
|
|
url = self.webish_url + urlpath
|
2017-07-24 20:51:58 +00:00
|
|
|
response = yield treq.request("get", url, persistent=False,
|
|
|
|
allow_redirects=followRedirect,
|
|
|
|
**kwargs)
|
|
|
|
data = yield response.content()
|
2008-10-28 20:41:04 +00:00
|
|
|
if return_response:
|
2017-07-24 20:51:58 +00:00
|
|
|
# we emulate the old HTTPClientGetFactory-based response, which
|
|
|
|
# wanted a tuple of (bytestring of data, bytestring of response
|
|
|
|
# code like "200" or "404", and a
|
|
|
|
# twisted.web.http_headers.Headers instance). Fortunately treq's
|
|
|
|
# response.headers has one.
|
|
|
|
returnValue( (data, str(response.code), response.headers) )
|
|
|
|
if 400 <= response.code < 600:
|
|
|
|
raise Error(response.code, response=data)
|
|
|
|
returnValue(data)
|
2007-07-07 02:43:55 +00:00
|
|
|
|
2017-02-22 00:06:14 +00:00
|
|
|
@inlineCallbacks
|
|
|
|
def HEAD(self, urlpath, return_response=False, headers={}):
|
|
|
|
url = self.webish_url + urlpath
|
|
|
|
response = yield treq.request("head", url, persistent=False,
|
|
|
|
headers=headers)
|
|
|
|
if 400 <= response.code < 600:
|
|
|
|
raise Error(response.code, response="")
|
2017-07-24 20:51:58 +00:00
|
|
|
returnValue( ("", response.code, response.headers) )
|
2017-02-22 00:06:14 +00:00
|
|
|
|
|
|
|
def PUT(self, urlpath, data, headers={}):
|
2007-07-07 02:43:55 +00:00
|
|
|
url = self.webish_url + urlpath
|
2017-02-22 00:06:14 +00:00
|
|
|
return do_http("put", url, data=data, headers=headers)
|
2007-07-07 02:43:55 +00:00
|
|
|
|
|
|
|
def DELETE(self, urlpath):
|
|
|
|
url = self.webish_url + urlpath
|
2017-02-22 00:06:14 +00:00
|
|
|
return do_http("delete", url)
|
2007-07-07 02:43:55 +00:00
|
|
|
|
2017-07-24 21:24:14 +00:00
|
|
|
def build_form(self, **fields):
|
2007-07-08 03:06:58 +00:00
|
|
|
sepbase = "boogabooga"
|
|
|
|
sep = "--" + sepbase
|
|
|
|
form = []
|
|
|
|
form.append(sep)
|
|
|
|
form.append('Content-Disposition: form-data; name="_charset"')
|
|
|
|
form.append('')
|
|
|
|
form.append('UTF-8')
|
|
|
|
form.append(sep)
|
|
|
|
for name, value in fields.iteritems():
|
|
|
|
if isinstance(value, tuple):
|
|
|
|
filename, value = value
|
|
|
|
form.append('Content-Disposition: form-data; name="%s"; '
|
2008-02-14 22:45:56 +00:00
|
|
|
'filename="%s"' % (name, filename.encode("utf-8")))
|
2007-07-08 03:06:58 +00:00
|
|
|
else:
|
|
|
|
form.append('Content-Disposition: form-data; name="%s"' % name)
|
|
|
|
form.append('')
|
2008-06-04 00:09:39 +00:00
|
|
|
if isinstance(value, unicode):
|
|
|
|
value = value.encode("utf-8")
|
|
|
|
else:
|
|
|
|
value = str(value)
|
|
|
|
assert isinstance(value, str)
|
|
|
|
form.append(value)
|
2007-07-08 03:06:58 +00:00
|
|
|
form.append(sep)
|
|
|
|
form[-1] += "--"
|
2009-11-18 07:09:00 +00:00
|
|
|
body = ""
|
|
|
|
headers = {}
|
|
|
|
if fields:
|
|
|
|
body = "\r\n".join(form) + "\r\n"
|
|
|
|
headers["content-type"] = "multipart/form-data; boundary=%s" % sepbase
|
2017-07-24 21:24:14 +00:00
|
|
|
return (body, headers)
|
|
|
|
|
2017-07-24 23:22:36 +00:00
|
|
|
def POST(self, urlpath, **fields):
|
2017-07-24 21:24:14 +00:00
|
|
|
body, headers = self.build_form(**fields)
|
2017-07-24 23:22:36 +00:00
|
|
|
return self.POST2(urlpath, body, headers)
|
2009-11-18 07:09:00 +00:00
|
|
|
|
|
|
|
def POST2(self, urlpath, body="", headers={}, followRedirect=False):
|
|
|
|
url = self.webish_url + urlpath
|
2017-07-24 23:31:44 +00:00
|
|
|
return do_http("POST", url, allow_redirects=followRedirect,
|
|
|
|
headers=headers, data=body)
|
2007-07-07 02:43:55 +00:00
|
|
|
|
2007-07-16 18:53:12 +00:00
|
|
|
def shouldFail(self, res, expected_failure, which,
|
|
|
|
substring=None, response_substring=None):
|
2007-07-07 02:43:55 +00:00
|
|
|
if isinstance(res, failure.Failure):
|
|
|
|
res.trap(expected_failure)
|
|
|
|
if substring:
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn(substring, str(res), which)
|
2007-07-16 18:53:12 +00:00
|
|
|
if response_substring:
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn(response_substring, res.value.response, which)
|
2007-07-07 02:43:55 +00:00
|
|
|
else:
|
|
|
|
self.fail("%s was supposed to raise %s, not get '%s'" %
|
|
|
|
(which, expected_failure, res))
|
|
|
|
|
2007-12-25 05:49:35 +00:00
|
|
|
def shouldFail2(self, expected_failure, which, substring,
|
2008-04-15 18:11:29 +00:00
|
|
|
response_substring,
|
2007-12-25 05:49:35 +00:00
|
|
|
callable, *args, **kwargs):
|
|
|
|
assert substring is None or isinstance(substring, str)
|
2008-04-15 18:11:29 +00:00
|
|
|
assert response_substring is None or isinstance(response_substring, str)
|
2007-12-25 05:49:35 +00:00
|
|
|
d = defer.maybeDeferred(callable, *args, **kwargs)
|
|
|
|
def done(res):
|
2007-12-25 08:56:04 +00:00
|
|
|
if isinstance(res, failure.Failure):
|
2007-12-25 05:49:35 +00:00
|
|
|
res.trap(expected_failure)
|
|
|
|
if substring:
|
2012-05-09 21:18:37 +00:00
|
|
|
self.failUnlessIn(substring, str(res),
|
2013-04-05 05:36:14 +00:00
|
|
|
"'%s' not in '%s' (response is '%s') for test '%s'" % \
|
|
|
|
(substring, str(res),
|
|
|
|
getattr(res.value, "response", ""),
|
|
|
|
which))
|
2008-04-15 18:11:29 +00:00
|
|
|
if response_substring:
|
2012-05-09 21:18:37 +00:00
|
|
|
self.failUnlessIn(response_substring, res.value.response,
|
|
|
|
"'%s' not in '%s' for test '%s'" % \
|
|
|
|
(response_substring, res.value.response,
|
|
|
|
which))
|
2007-12-25 05:49:35 +00:00
|
|
|
else:
|
2020-04-18 07:43:12 +00:00
|
|
|
self.fail("%s was supposed to raise %s, not get '%s'" %
|
|
|
|
(which, expected_failure, res))
|
2007-12-25 05:49:35 +00:00
|
|
|
d.addBoth(done)
|
|
|
|
return d
|
|
|
|
|
2007-07-07 02:43:55 +00:00
|
|
|
def should404(self, res, which):
|
|
|
|
if isinstance(res, failure.Failure):
|
|
|
|
res.trap(error.Error)
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res.value.status, "404")
|
2007-07-07 02:43:55 +00:00
|
|
|
else:
|
2007-07-07 07:16:36 +00:00
|
|
|
self.fail("%s was supposed to Error(404), not get '%s'" %
|
|
|
|
(which, res))
|
2007-07-07 02:43:55 +00:00
|
|
|
|
2010-02-21 01:04:55 +00:00
|
|
|
def should302(self, res, which):
|
|
|
|
if isinstance(res, failure.Failure):
|
|
|
|
res.trap(error.Error)
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res.value.status, "302")
|
2010-02-21 01:04:55 +00:00
|
|
|
else:
|
|
|
|
self.fail("%s was supposed to Error(302), not get '%s'" %
|
2011-12-17 04:27:10 +00:00
|
|
|
(which, res))
|
2010-02-21 01:04:55 +00:00
|
|
|
|
2017-07-25 15:11:38 +00:00
|
|
|
|
|
|
|
|
2020-01-31 18:27:17 +00:00
|
|
|
class MultiFormatPageTests(TrialTestCase):
|
2017-07-25 15:11:38 +00:00
|
|
|
"""
|
|
|
|
Tests for ``MultiFormatPage``.
|
|
|
|
"""
|
|
|
|
def resource(self):
|
|
|
|
"""
|
|
|
|
Create and return an instance of a ``MultiFormatPage`` subclass with two
|
|
|
|
formats: ``a`` and ``b``.
|
|
|
|
"""
|
|
|
|
class Content(MultiFormatPage):
|
|
|
|
docFactory = stan("doc factory")
|
|
|
|
|
|
|
|
def render_A(self, req):
|
|
|
|
return "a"
|
|
|
|
|
|
|
|
def render_B(self, req):
|
|
|
|
return "b"
|
|
|
|
return Content()
|
|
|
|
|
|
|
|
|
|
|
|
def render(self, resource, **query_args):
|
|
|
|
"""
|
|
|
|
Render a Nevow ``Page`` against a request with the given query arguments.
|
|
|
|
|
|
|
|
:param resource: The Nevow resource to render.
|
|
|
|
|
|
|
|
:param query_args: The query arguments to put into the request being
|
|
|
|
rendered. A mapping from ``bytes`` to ``list`` of ``bytes``.
|
|
|
|
|
|
|
|
:return: The rendered response body as ``bytes``.
|
|
|
|
"""
|
|
|
|
ctx = WebContext(tag=resource)
|
|
|
|
req = FakeRequest(args=query_args)
|
|
|
|
ctx.remember(DefaultExceptionHandler(), ICanHandleException)
|
|
|
|
ctx.remember(req, IRequest)
|
|
|
|
ctx.remember(None, IData)
|
|
|
|
|
|
|
|
d = maybeDeferred(resource.renderHTTP, ctx)
|
|
|
|
d.addErrback(processingFailed, req, ctx)
|
|
|
|
res = self.successResultOf(d)
|
|
|
|
if isinstance(res, bytes):
|
|
|
|
return req.v + res
|
|
|
|
return req.v
|
|
|
|
|
|
|
|
|
|
|
|
def test_select_format(self):
|
|
|
|
"""
|
|
|
|
The ``formatArgument`` attribute of a ``MultiFormatPage`` subclass
|
|
|
|
identifies the query argument which selects the result format.
|
|
|
|
"""
|
|
|
|
resource = self.resource()
|
|
|
|
resource.formatArgument = "foo"
|
|
|
|
self.assertEqual("a", self.render(resource, foo=["a"]))
|
|
|
|
|
|
|
|
|
|
|
|
def test_default_format_argument(self):
|
|
|
|
"""
|
|
|
|
If a ``MultiFormatPage`` subclass does not set ``formatArgument`` then the
|
|
|
|
``t`` argument is used.
|
|
|
|
"""
|
|
|
|
resource = self.resource()
|
|
|
|
self.assertEqual("a", self.render(resource, t=["a"]))
|
|
|
|
|
|
|
|
|
|
|
|
def test_no_format(self):
|
|
|
|
"""
|
|
|
|
If no value is given for the format argument and no default format has
|
|
|
|
been defined, the base Nevow rendering behavior is used
|
|
|
|
(``renderHTTP``).
|
|
|
|
"""
|
|
|
|
resource = self.resource()
|
|
|
|
self.assertEqual("doc factory", self.render(resource))
|
|
|
|
|
|
|
|
|
|
|
|
def test_default_format(self):
|
|
|
|
"""
|
|
|
|
If no value is given for the format argument and the ``MultiFormatPage``
|
|
|
|
subclass defines a ``formatDefault``, that value is used as the format
|
|
|
|
to render.
|
|
|
|
"""
|
|
|
|
resource = self.resource()
|
|
|
|
resource.formatDefault = "b"
|
|
|
|
self.assertEqual("b", self.render(resource))
|
|
|
|
|
|
|
|
|
|
|
|
def test_explicit_none_format_renderer(self):
|
|
|
|
"""
|
|
|
|
If a format is selected which has a renderer set to ``None``, the base
|
|
|
|
Nevow rendering behavior is used (``renderHTTP``).
|
|
|
|
"""
|
|
|
|
resource = self.resource()
|
|
|
|
resource.render_FOO = None
|
|
|
|
self.assertEqual("doc factory", self.render(resource, t=["foo"]))
|
|
|
|
|
|
|
|
|
|
|
|
def test_unknown_format(self):
|
|
|
|
"""
|
|
|
|
If a format is selected for which there is no renderer, an error is
|
|
|
|
returned.
|
|
|
|
"""
|
|
|
|
resource = self.resource()
|
|
|
|
self.assertIn(
|
|
|
|
"<title>Exception</title>",
|
|
|
|
self.render(resource, t=["foo"]),
|
|
|
|
)
|
|
|
|
self.flushLoggedErrors(WebError)
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-01-31 18:27:17 +00:00
|
|
|
class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixin, TrialTestCase):
|
2018-04-13 16:52:12 +00:00
|
|
|
maxDiff = None
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_create(self):
|
2007-07-07 02:43:55 +00:00
|
|
|
pass
|
|
|
|
|
2018-04-13 16:56:50 +00:00
|
|
|
def test_frame_options(self):
|
|
|
|
"""
|
|
|
|
All pages deny the ability to be loaded in frames.
|
|
|
|
"""
|
|
|
|
d = self.GET("/", return_response=True)
|
|
|
|
def responded(result):
|
|
|
|
_, _, headers = result
|
|
|
|
self.assertEqual(
|
|
|
|
[b"DENY"],
|
|
|
|
headers.getRawHeaders(b"X-Frame-Options"),
|
|
|
|
)
|
|
|
|
d.addCallback(responded)
|
|
|
|
return d
|
|
|
|
|
2017-01-20 08:49:35 +00:00
|
|
|
def test_welcome_json(self):
|
2017-07-25 12:29:52 +00:00
|
|
|
"""
|
|
|
|
There is a JSON version of the welcome page which can be selected with the
|
|
|
|
``t`` query argument.
|
|
|
|
"""
|
2017-01-20 08:49:35 +00:00
|
|
|
d = self.GET("/?t=json")
|
|
|
|
def _check(res):
|
2017-07-25 15:35:54 +00:00
|
|
|
decoded = json.loads(res)
|
2017-07-25 12:29:52 +00:00
|
|
|
expected = {
|
2017-07-25 15:56:42 +00:00
|
|
|
u'introducers': {
|
|
|
|
u'statuses': [],
|
2017-07-25 12:29:52 +00:00
|
|
|
},
|
2017-07-25 15:56:42 +00:00
|
|
|
u'servers': sorted([
|
|
|
|
{u"nodeid": u'other_nodeid',
|
2017-07-27 19:32:47 +00:00
|
|
|
u'available_space': 123456,
|
|
|
|
u'connection_status': u'summary',
|
2017-07-27 20:31:41 +00:00
|
|
|
u'last_received_data': 30,
|
2017-07-27 19:32:47 +00:00
|
|
|
u'nickname': u'other_nickname \u263b',
|
|
|
|
u'version': u'1.0',
|
2017-07-25 15:56:42 +00:00
|
|
|
},
|
|
|
|
{u"nodeid": u'disconnected_nodeid',
|
2017-07-27 19:32:47 +00:00
|
|
|
u'available_space': 123456,
|
|
|
|
u'connection_status': u'summary',
|
2017-07-27 20:31:41 +00:00
|
|
|
u'last_received_data': 35,
|
2017-07-27 19:32:47 +00:00
|
|
|
u'nickname': u'disconnected_nickname \u263b',
|
|
|
|
u'version': u'1.0',
|
2017-07-25 15:56:42 +00:00
|
|
|
},
|
|
|
|
]),
|
2017-07-25 12:29:52 +00:00
|
|
|
}
|
|
|
|
self.assertEqual(expected, decoded)
|
2017-01-20 08:49:35 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
2013-03-21 00:25:49 +00:00
|
|
|
def test_introducer_status(self):
|
|
|
|
class MockIntroducerClient(object):
|
|
|
|
def __init__(self, connected):
|
|
|
|
self.connected = connected
|
2016-12-08 23:15:49 +00:00
|
|
|
def connection_status(self):
|
2016-12-10 00:35:46 +00:00
|
|
|
return ConnectionStatus(self.connected, "summary", {}, 0, 0)
|
2013-03-21 00:25:49 +00:00
|
|
|
|
|
|
|
d = defer.succeed(None)
|
|
|
|
|
|
|
|
# introducer not connected, unguessable furl
|
|
|
|
def _set_introducer_not_connected_unguessable(ign):
|
2016-09-12 23:01:23 +00:00
|
|
|
self.s.introducer_furls = [ "pb://someIntroducer/secret" ]
|
|
|
|
self.s.introducer_clients = [ MockIntroducerClient(False) ]
|
2013-03-21 00:25:49 +00:00
|
|
|
return self.GET("/")
|
|
|
|
d.addCallback(_set_introducer_not_connected_unguessable)
|
|
|
|
def _check_introducer_not_connected_unguessable(res):
|
2020-04-30 14:09:15 +00:00
|
|
|
soup = BeautifulSoup(res, 'html5lib')
|
|
|
|
self.failIfIn('pb://someIntroducer/secret', res)
|
|
|
|
assert_soup_has_tag_with_attributes(
|
|
|
|
self, soup, u"img",
|
|
|
|
{u"alt": u"Disconnected", u"src": u"img/connected-no.png"}
|
|
|
|
)
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"div",
|
|
|
|
u"No introducers connected"
|
|
|
|
)
|
2013-03-21 00:25:49 +00:00
|
|
|
d.addCallback(_check_introducer_not_connected_unguessable)
|
|
|
|
|
|
|
|
# introducer connected, unguessable furl
|
|
|
|
def _set_introducer_connected_unguessable(ign):
|
2016-09-12 23:01:23 +00:00
|
|
|
self.s.introducer_furls = [ "pb://someIntroducer/secret" ]
|
|
|
|
self.s.introducer_clients = [ MockIntroducerClient(True) ]
|
2013-03-21 00:25:49 +00:00
|
|
|
return self.GET("/")
|
|
|
|
d.addCallback(_set_introducer_connected_unguessable)
|
|
|
|
def _check_introducer_connected_unguessable(res):
|
2020-04-30 14:09:15 +00:00
|
|
|
soup = BeautifulSoup(res, 'html5lib')
|
|
|
|
assert_soup_has_tag_with_attributes_and_content(
|
|
|
|
self, soup, u"div",
|
|
|
|
u"summary",
|
|
|
|
{ u"class": u"connection-status", u"title": u"(no other hints)" }
|
|
|
|
)
|
|
|
|
self.failIfIn('pb://someIntroducer/secret', res)
|
|
|
|
assert_soup_has_tag_with_attributes(
|
|
|
|
self, soup, u"img",
|
|
|
|
{ u"alt": u"Connected", u"src": u"img/connected-yes.png" }
|
|
|
|
)
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"div",
|
|
|
|
u"1 introducer connected"
|
|
|
|
)
|
2013-03-21 00:25:49 +00:00
|
|
|
d.addCallback(_check_introducer_connected_unguessable)
|
|
|
|
|
|
|
|
# introducer connected, guessable furl
|
|
|
|
def _set_introducer_connected_guessable(ign):
|
2016-09-12 23:01:23 +00:00
|
|
|
self.s.introducer_furls = [ "pb://someIntroducer/introducer" ]
|
|
|
|
self.s.introducer_clients = [ MockIntroducerClient(True) ]
|
2013-03-21 00:25:49 +00:00
|
|
|
return self.GET("/")
|
|
|
|
d.addCallback(_set_introducer_connected_guessable)
|
|
|
|
def _check_introducer_connected_guessable(res):
|
2020-04-30 14:09:15 +00:00
|
|
|
soup = BeautifulSoup(res, 'html5lib')
|
|
|
|
assert_soup_has_tag_with_attributes_and_content(
|
|
|
|
self, soup, u"div",
|
|
|
|
u"summary",
|
|
|
|
{ u"class": u"connection-status", u"title": u"(no other hints)" }
|
|
|
|
)
|
|
|
|
assert_soup_has_tag_with_attributes(
|
|
|
|
self, soup, u"img",
|
|
|
|
{ u"src": u"img/connected-yes.png", u"alt": u"Connected" }
|
|
|
|
)
|
|
|
|
assert_soup_has_tag_with_content(
|
|
|
|
self, soup, u"div",
|
|
|
|
u"1 introducer connected"
|
|
|
|
)
|
2013-03-21 00:25:49 +00:00
|
|
|
d.addCallback(_check_introducer_connected_guessable)
|
|
|
|
return d
|
|
|
|
|
2012-12-29 04:17:00 +00:00
|
|
|
def test_helper_status(self):
|
|
|
|
d = defer.succeed(None)
|
|
|
|
|
|
|
|
# set helper furl to None
|
2012-12-29 23:44:11 +00:00
|
|
|
def _set_no_helper(ign):
|
2012-12-29 04:17:00 +00:00
|
|
|
self.s.uploader.helper_furl = None
|
|
|
|
return self.GET("/")
|
2012-12-29 23:44:11 +00:00
|
|
|
d.addCallback(_set_no_helper)
|
2013-03-15 02:28:38 +00:00
|
|
|
def _check_no_helper(res):
|
2020-04-30 14:09:15 +00:00
|
|
|
soup = BeautifulSoup(res, 'html5lib')
|
|
|
|
assert_soup_has_tag_with_attributes(
|
|
|
|
self, soup, u"img",
|
|
|
|
{ u"src": u"img/connected-not-configured.png", u"alt": u"Not Configured" }
|
|
|
|
)
|
2013-03-15 02:28:38 +00:00
|
|
|
d.addCallback(_check_no_helper)
|
2012-12-29 04:17:00 +00:00
|
|
|
|
|
|
|
# enable helper, not connected
|
|
|
|
def _set_helper_not_connected(ign):
|
2013-03-21 00:25:49 +00:00
|
|
|
self.s.uploader.helper_furl = "pb://someHelper/secret"
|
2012-12-29 04:17:00 +00:00
|
|
|
self.s.uploader.helper_connected = False
|
|
|
|
return self.GET("/")
|
|
|
|
d.addCallback(_set_helper_not_connected)
|
2013-03-15 02:28:38 +00:00
|
|
|
def _check_helper_not_connected(res):
|
2020-04-30 14:09:15 +00:00
|
|
|
soup = BeautifulSoup(res, 'html5lib')
|
|
|
|
assert_soup_has_tag_with_attributes_and_content(
|
|
|
|
self, soup, u"div",
|
|
|
|
u"pb://someHelper/[censored]",
|
|
|
|
{ u"class": u"furl" }
|
|
|
|
)
|
|
|
|
self.failIfIn('pb://someHelper/secret', res)
|
|
|
|
assert_soup_has_tag_with_attributes(
|
|
|
|
self, soup, u"img",
|
|
|
|
{ u"src": u"img/connected-no.png", u"alt": u"Disconnected" }
|
|
|
|
)
|
2013-03-15 02:28:38 +00:00
|
|
|
d.addCallback(_check_helper_not_connected)
|
2012-12-29 04:17:00 +00:00
|
|
|
|
|
|
|
# enable helper, connected
|
|
|
|
def _set_helper_connected(ign):
|
2013-03-21 00:25:49 +00:00
|
|
|
self.s.uploader.helper_furl = "pb://someHelper/secret"
|
2012-12-29 04:17:00 +00:00
|
|
|
self.s.uploader.helper_connected = True
|
|
|
|
return self.GET("/")
|
|
|
|
d.addCallback(_set_helper_connected)
|
2013-03-15 02:28:38 +00:00
|
|
|
def _check_helper_connected(res):
|
2020-04-30 14:09:15 +00:00
|
|
|
soup = BeautifulSoup(res, 'html5lib')
|
|
|
|
assert_soup_has_tag_with_attributes_and_content(
|
|
|
|
self, soup, u"div",
|
|
|
|
u"pb://someHelper/[censored]",
|
|
|
|
{ u"class": u"furl" }
|
|
|
|
)
|
|
|
|
self.failIfIn('pb://someHelper/secret', res)
|
|
|
|
assert_soup_has_tag_with_attributes(
|
|
|
|
self, soup, u"img",
|
|
|
|
{ u"src": u"img/connected-yes.png", "alt": u"Connected" }
|
|
|
|
)
|
2013-03-15 02:28:38 +00:00
|
|
|
d.addCallback(_check_helper_connected)
|
2012-12-29 04:17:00 +00:00
|
|
|
return d
|
|
|
|
|
2012-12-27 05:08:53 +00:00
|
|
|
def test_storage(self):
|
|
|
|
d = self.GET("/storage")
|
|
|
|
def _check(res):
|
2020-02-14 15:10:48 +00:00
|
|
|
soup = BeautifulSoup(res, 'html5lib')
|
|
|
|
assert_soup_has_text(self, soup, 'Storage Server Status')
|
|
|
|
assert_soup_has_favicon(self, soup)
|
2012-12-27 05:08:53 +00:00
|
|
|
res_u = res.decode('utf-8')
|
|
|
|
self.failUnlessIn(u'<li>Server Nickname: <span class="nickname mine">fake_nickname \u263A</span></li>', res_u)
|
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
2008-02-13 20:57:39 +00:00
|
|
|
def test_status(self):
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
h = self.s.get_history()
|
|
|
|
dl_num = h.list_all_download_statuses()[0].get_counter()
|
|
|
|
ul_num = h.list_all_upload_statuses()[0].get_counter()
|
|
|
|
mu_num = h.list_all_mapupdate_statuses()[0].get_counter()
|
|
|
|
pub_num = h.list_all_publish_statuses()[0].get_counter()
|
|
|
|
ret_num = h.list_all_retrieve_statuses()[0].get_counter()
|
2008-03-01 05:19:03 +00:00
|
|
|
d = self.GET("/status", followRedirect=True)
|
2008-02-13 20:57:39 +00:00
|
|
|
def _check(res):
|
2012-12-29 23:54:54 +00:00
|
|
|
self.failUnlessIn('Recent and Active Operations', res)
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn('"down-%d"' % dl_num, res)
|
|
|
|
self.failUnlessIn('"up-%d"' % ul_num, res)
|
|
|
|
self.failUnlessIn('"mapupdate-%d"' % mu_num, res)
|
|
|
|
self.failUnlessIn('"publish-%d"' % pub_num, res)
|
|
|
|
self.failUnlessIn('"retrieve-%d"' % ret_num, res)
|
2008-02-13 20:57:39 +00:00
|
|
|
d.addCallback(_check)
|
2008-07-26 00:41:10 +00:00
|
|
|
d.addCallback(lambda res: self.GET("/status/?t=json"))
|
|
|
|
def _check_json(res):
|
2017-01-19 22:39:53 +00:00
|
|
|
data = json.loads(res)
|
2008-07-26 00:41:10 +00:00
|
|
|
self.failUnless(isinstance(data, dict))
|
2010-01-14 22:15:29 +00:00
|
|
|
#active = data["active"]
|
2008-07-26 00:41:10 +00:00
|
|
|
# TODO: test more. We need a way to fake an active operation
|
|
|
|
# here.
|
|
|
|
d.addCallback(_check_json)
|
|
|
|
|
2008-03-01 06:03:00 +00:00
|
|
|
d.addCallback(lambda res: self.GET("/status/down-%d" % dl_num))
|
|
|
|
def _check_dl(res):
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("File Download Status", res)
|
2008-03-01 06:03:00 +00:00
|
|
|
d.addCallback(_check_dl)
|
2011-06-29 22:25:55 +00:00
|
|
|
d.addCallback(lambda res: self.GET("/status/down-%d/event_json" % dl_num))
|
2010-08-09 22:03:42 +00:00
|
|
|
def _check_dl_json(res):
|
2017-01-19 22:39:53 +00:00
|
|
|
data = json.loads(res)
|
2010-08-09 22:03:42 +00:00
|
|
|
self.failUnless(isinstance(data, dict))
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("read", data)
|
2011-06-29 22:25:55 +00:00
|
|
|
self.failUnlessEqual(data["read"][0]["length"], 120)
|
|
|
|
self.failUnlessEqual(data["segment"][0]["segment_length"], 100)
|
|
|
|
self.failUnlessEqual(data["segment"][2]["segment_number"], 2)
|
|
|
|
self.failUnlessEqual(data["segment"][2]["finish_time"], None)
|
|
|
|
phwr_id = base32.b2a(hashutil.tagged_hash("foo", "serverid_a")[:20])
|
|
|
|
cmpu_id = base32.b2a(hashutil.tagged_hash("foo", "serverid_b")[:20])
|
|
|
|
# serverids[] keys are strings, since that's what JSON does, but
|
|
|
|
# we'd really like them to be ints
|
2012-06-12 21:01:24 +00:00
|
|
|
self.failUnlessEqual(data["serverids"]["0"], "phwrsjte")
|
|
|
|
self.failUnless(data["serverids"].has_key("1"),
|
|
|
|
str(data["serverids"]))
|
|
|
|
self.failUnlessEqual(data["serverids"]["1"], "cmpuvkjm",
|
|
|
|
str(data["serverids"]))
|
|
|
|
self.failUnlessEqual(data["server_info"][phwr_id]["short"],
|
|
|
|
"phwrsjte")
|
|
|
|
self.failUnlessEqual(data["server_info"][cmpu_id]["short"],
|
|
|
|
"cmpuvkjm")
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("dyhb", data)
|
|
|
|
self.failUnlessIn("misc", data)
|
2010-08-09 22:03:42 +00:00
|
|
|
d.addCallback(_check_dl_json)
|
2008-03-01 06:03:00 +00:00
|
|
|
d.addCallback(lambda res: self.GET("/status/up-%d" % ul_num))
|
|
|
|
def _check_ul(res):
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("File Upload Status", res)
|
2008-03-01 06:03:00 +00:00
|
|
|
d.addCallback(_check_ul)
|
2008-04-17 20:02:22 +00:00
|
|
|
d.addCallback(lambda res: self.GET("/status/mapupdate-%d" % mu_num))
|
|
|
|
def _check_mapupdate(res):
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("Mutable File Servermap Update Status", res)
|
2008-04-17 20:02:22 +00:00
|
|
|
d.addCallback(_check_mapupdate)
|
|
|
|
d.addCallback(lambda res: self.GET("/status/publish-%d" % pub_num))
|
|
|
|
def _check_publish(res):
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("Mutable File Publish Status", res)
|
2008-04-17 20:02:22 +00:00
|
|
|
d.addCallback(_check_publish)
|
|
|
|
d.addCallback(lambda res: self.GET("/status/retrieve-%d" % ret_num))
|
|
|
|
def _check_retrieve(res):
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("Mutable File Retrieve Status", res)
|
2008-04-17 20:02:22 +00:00
|
|
|
d.addCallback(_check_retrieve)
|
|
|
|
|
2008-02-13 20:57:39 +00:00
|
|
|
return d
|
|
|
|
|
2008-03-04 04:56:23 +00:00
|
|
|
def test_status_numbers(self):
|
2008-03-05 21:59:56 +00:00
|
|
|
drrm = status.DownloadResultsRendererMixin()
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(drrm.render_time(None, None), "")
|
|
|
|
self.failUnlessReallyEqual(drrm.render_time(None, 2.5), "2.50s")
|
|
|
|
self.failUnlessReallyEqual(drrm.render_time(None, 0.25), "250ms")
|
|
|
|
self.failUnlessReallyEqual(drrm.render_time(None, 0.0021), "2.1ms")
|
|
|
|
self.failUnlessReallyEqual(drrm.render_time(None, 0.000123), "123us")
|
|
|
|
self.failUnlessReallyEqual(drrm.render_rate(None, None), "")
|
|
|
|
self.failUnlessReallyEqual(drrm.render_rate(None, 2500000), "2.50MBps")
|
|
|
|
self.failUnlessReallyEqual(drrm.render_rate(None, 30100), "30.1kBps")
|
|
|
|
self.failUnlessReallyEqual(drrm.render_rate(None, 123), "123Bps")
|
2008-03-04 04:56:23 +00:00
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_FILEURL(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.GET(self.public_url + "/foo/bar.txt")
|
2007-07-07 02:43:55 +00:00
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
2007-12-12 01:04:44 +00:00
|
|
|
return d
|
|
|
|
|
2008-10-28 20:41:04 +00:00
|
|
|
def test_GET_FILEURL_range(self):
|
|
|
|
headers = {"range": "bytes=1-10"}
|
|
|
|
d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
|
|
|
|
return_response=True)
|
2019-04-12 14:18:36 +00:00
|
|
|
def _got(res_and_status_and_headers):
|
|
|
|
(res, status, headers) = res_and_status_and_headers
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(int(status), 206)
|
2017-07-24 20:51:58 +00:00
|
|
|
self.failUnless(headers.hasHeader("content-range"))
|
|
|
|
self.failUnlessReallyEqual(headers.getRawHeaders("content-range")[0],
|
2010-07-11 20:02:52 +00:00
|
|
|
"bytes 1-10/%d" % len(self.BAR_CONTENTS))
|
|
|
|
self.failUnlessReallyEqual(res, self.BAR_CONTENTS[1:11])
|
2008-10-28 20:41:04 +00:00
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
2008-11-18 14:41:35 +00:00
|
|
|
def test_GET_FILEURL_partial_range(self):
|
|
|
|
headers = {"range": "bytes=5-"}
|
|
|
|
length = len(self.BAR_CONTENTS)
|
|
|
|
d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
|
|
|
|
return_response=True)
|
2019-04-12 14:18:36 +00:00
|
|
|
def _got(res_and_status_and_headers):
|
|
|
|
(res, status, headers) = res_and_status_and_headers
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(int(status), 206)
|
2017-07-24 20:51:58 +00:00
|
|
|
self.failUnless(headers.hasHeader("content-range"))
|
|
|
|
self.failUnlessReallyEqual(headers.getRawHeaders("content-range")[0],
|
2010-07-11 20:02:52 +00:00
|
|
|
"bytes 5-%d/%d" % (length-1, length))
|
|
|
|
self.failUnlessReallyEqual(res, self.BAR_CONTENTS[5:])
|
2008-11-18 14:41:35 +00:00
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
2010-03-10 03:59:13 +00:00
|
|
|
def test_GET_FILEURL_partial_end_range(self):
|
|
|
|
headers = {"range": "bytes=-5"}
|
|
|
|
length = len(self.BAR_CONTENTS)
|
|
|
|
d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
|
|
|
|
return_response=True)
|
2019-04-12 14:18:36 +00:00
|
|
|
def _got(res_and_status_and_headers):
|
|
|
|
(res, status, headers) = res_and_status_and_headers
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(int(status), 206)
|
2017-07-24 20:51:58 +00:00
|
|
|
self.failUnless(headers.hasHeader("content-range"))
|
|
|
|
self.failUnlessReallyEqual(headers.getRawHeaders("content-range")[0],
|
2010-07-11 20:02:52 +00:00
|
|
|
"bytes %d-%d/%d" % (length-5, length-1, length))
|
|
|
|
self.failUnlessReallyEqual(res, self.BAR_CONTENTS[-5:])
|
2010-03-10 03:59:13 +00:00
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_GET_FILEURL_partial_range_overrun(self):
|
|
|
|
headers = {"range": "bytes=100-200"}
|
|
|
|
d = self.shouldFail2(error.Error, "test_GET_FILEURL_range_overrun",
|
|
|
|
"416 Requested Range not satisfiable",
|
|
|
|
"First beyond end of file",
|
|
|
|
self.GET, self.public_url + "/foo/bar.txt",
|
|
|
|
headers=headers)
|
|
|
|
return d
|
|
|
|
|
2008-10-28 20:41:04 +00:00
|
|
|
def test_HEAD_FILEURL_range(self):
|
|
|
|
headers = {"range": "bytes=1-10"}
|
|
|
|
d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
|
|
|
|
return_response=True)
|
2019-04-12 14:18:36 +00:00
|
|
|
def _got(res_and_status_and_headers):
|
|
|
|
(res, status, headers) = res_and_status_and_headers
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, "")
|
|
|
|
self.failUnlessReallyEqual(int(status), 206)
|
2017-07-24 20:51:58 +00:00
|
|
|
self.failUnless(headers.hasHeader("content-range"))
|
|
|
|
self.failUnlessReallyEqual(headers.getRawHeaders("content-range")[0],
|
2010-07-11 20:02:52 +00:00
|
|
|
"bytes 1-10/%d" % len(self.BAR_CONTENTS))
|
2008-10-28 20:41:04 +00:00
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
2008-11-18 14:41:35 +00:00
|
|
|
def test_HEAD_FILEURL_partial_range(self):
|
|
|
|
headers = {"range": "bytes=5-"}
|
|
|
|
length = len(self.BAR_CONTENTS)
|
|
|
|
d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
|
|
|
|
return_response=True)
|
2019-04-12 14:18:36 +00:00
|
|
|
def _got(res_and_status_and_headers):
|
|
|
|
(res, status, headers) = res_and_status_and_headers
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(int(status), 206)
|
2017-07-24 20:51:58 +00:00
|
|
|
self.failUnless(headers.hasHeader("content-range"))
|
|
|
|
self.failUnlessReallyEqual(headers.getRawHeaders("content-range")[0],
|
2010-07-11 20:02:52 +00:00
|
|
|
"bytes 5-%d/%d" % (length-1, length))
|
2008-11-18 14:41:35 +00:00
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
2010-03-10 03:59:13 +00:00
|
|
|
def test_HEAD_FILEURL_partial_end_range(self):
|
|
|
|
headers = {"range": "bytes=-5"}
|
|
|
|
length = len(self.BAR_CONTENTS)
|
|
|
|
d = self.HEAD(self.public_url + "/foo/bar.txt", headers=headers,
|
|
|
|
return_response=True)
|
2019-04-12 14:18:36 +00:00
|
|
|
def _got(res_and_status_and_headers):
|
|
|
|
(res, status, headers) = res_and_status_and_headers
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(int(status), 206)
|
2017-07-24 20:51:58 +00:00
|
|
|
self.failUnless(headers.hasHeader("content-range"))
|
|
|
|
self.failUnlessReallyEqual(headers.getRawHeaders("content-range")[0],
|
2010-07-11 20:02:52 +00:00
|
|
|
"bytes %d-%d/%d" % (length-5, length-1, length))
|
2010-03-10 03:59:13 +00:00
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_HEAD_FILEURL_partial_range_overrun(self):
|
|
|
|
headers = {"range": "bytes=100-200"}
|
|
|
|
d = self.shouldFail2(error.Error, "test_HEAD_FILEURL_range_overrun",
|
|
|
|
"416 Requested Range not satisfiable",
|
|
|
|
"",
|
|
|
|
self.HEAD, self.public_url + "/foo/bar.txt",
|
|
|
|
headers=headers)
|
|
|
|
return d
|
|
|
|
|
2008-10-28 20:41:04 +00:00
|
|
|
def test_GET_FILEURL_range_bad(self):
|
|
|
|
headers = {"range": "BOGUS=fizbop-quarnak"}
|
2010-03-10 03:59:13 +00:00
|
|
|
d = self.GET(self.public_url + "/foo/bar.txt", headers=headers,
|
|
|
|
return_response=True)
|
2019-04-12 14:18:36 +00:00
|
|
|
def _got(res_and_status_and_headers):
|
|
|
|
(res, status, headers) = res_and_status_and_headers
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(int(status), 200)
|
2017-07-24 20:51:58 +00:00
|
|
|
self.failUnless(not headers.hasHeader("content-range"))
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, self.BAR_CONTENTS)
|
2010-03-10 03:59:13 +00:00
|
|
|
d.addCallback(_got)
|
2008-10-28 20:41:04 +00:00
|
|
|
return d
|
|
|
|
|
2008-05-20 18:47:43 +00:00
|
|
|
def test_HEAD_FILEURL(self):
|
2008-10-28 20:41:04 +00:00
|
|
|
d = self.HEAD(self.public_url + "/foo/bar.txt", return_response=True)
|
2019-04-12 14:18:36 +00:00
|
|
|
def _got(res_and_status_and_headers):
|
|
|
|
(res, status, headers) = res_and_status_and_headers
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, "")
|
2017-07-24 20:51:58 +00:00
|
|
|
self.failUnlessReallyEqual(headers.getRawHeaders("content-length")[0],
|
2010-07-11 20:02:52 +00:00
|
|
|
str(len(self.BAR_CONTENTS)))
|
2017-07-24 20:51:58 +00:00
|
|
|
self.failUnlessReallyEqual(headers.getRawHeaders("content-type"),
|
|
|
|
["text/plain"])
|
2008-05-20 18:47:43 +00:00
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
2008-05-14 21:32:21 +00:00
|
|
|
def test_GET_FILEURL_named(self):
|
|
|
|
base = "/file/%s" % urllib.quote(self._bar_txt_uri)
|
2008-05-20 18:13:12 +00:00
|
|
|
base2 = "/named/%s" % urllib.quote(self._bar_txt_uri)
|
2008-05-14 21:32:21 +00:00
|
|
|
d = self.GET(base + "/@@name=/blah.txt")
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(base + "/blah.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(base + "/ignore/lots/blah.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
2008-05-20 18:13:12 +00:00
|
|
|
d.addCallback(lambda res: self.GET(base2 + "/@@name=/blah.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
2008-07-19 01:58:57 +00:00
|
|
|
save_url = base + "?save=true&filename=blah.txt"
|
|
|
|
d.addCallback(lambda res: self.GET(save_url))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
|
|
|
|
u_filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
|
|
|
|
u_fn_e = urllib.quote(u_filename.encode("utf-8"))
|
|
|
|
u_url = base + "?save=true&filename=" + u_fn_e
|
|
|
|
d.addCallback(lambda res: self.GET(u_url))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt) # TODO: check headers
|
2008-05-14 21:32:21 +00:00
|
|
|
return d
|
|
|
|
|
2008-05-20 06:28:52 +00:00
|
|
|
def test_PUT_FILEURL_named_bad(self):
|
|
|
|
base = "/file/%s" % urllib.quote(self._bar_txt_uri)
|
|
|
|
d = self.shouldFail2(error.Error, "test_PUT_FILEURL_named_bad",
|
|
|
|
"400 Bad Request",
|
|
|
|
"/file can only be used with GET or HEAD",
|
|
|
|
self.PUT, base + "/@@name=/blah.txt", "")
|
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2008-05-20 06:28:52 +00:00
|
|
|
def test_GET_DIRURL_named_bad(self):
|
|
|
|
base = "/file/%s" % urllib.quote(self._foo_uri)
|
|
|
|
d = self.shouldFail2(error.Error, "test_PUT_DIRURL_named_bad",
|
|
|
|
"400 Bad Request",
|
|
|
|
"is not a file-cap",
|
|
|
|
self.GET, base + "/@@name=/blah.txt")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_GET_slash_file_bad(self):
|
|
|
|
d = self.shouldFail2(error.Error, "test_GET_slash_file_bad",
|
|
|
|
"404 Not Found",
|
|
|
|
"/file must be followed by a file-cap and a name",
|
|
|
|
self.GET, "/file")
|
|
|
|
return d
|
|
|
|
|
2008-05-20 01:38:39 +00:00
|
|
|
def test_GET_unhandled_URI_named(self):
|
|
|
|
contents, n, newuri = self.makefile(12)
|
2008-12-08 19:44:11 +00:00
|
|
|
verifier_cap = n.get_verify_cap().to_string()
|
2008-05-20 01:38:39 +00:00
|
|
|
base = "/file/%s" % urllib.quote(verifier_cap)
|
|
|
|
# client.create_node_from_uri() can't handle verify-caps
|
|
|
|
d = self.shouldFail2(error.Error, "GET_unhandled_URI_named",
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
"400 Bad Request", "is not a file-cap",
|
2008-05-20 01:38:39 +00:00
|
|
|
self.GET, base)
|
|
|
|
return d
|
|
|
|
|
2008-05-20 06:28:52 +00:00
|
|
|
def test_GET_unhandled_URI(self):
|
|
|
|
contents, n, newuri = self.makefile(12)
|
2008-12-08 19:44:11 +00:00
|
|
|
verifier_cap = n.get_verify_cap().to_string()
|
2008-05-20 06:28:52 +00:00
|
|
|
base = "/uri/%s" % urllib.quote(verifier_cap)
|
|
|
|
# client.create_node_from_uri() can't handle verify-caps
|
|
|
|
d = self.shouldFail2(error.Error, "test_GET_unhandled_URI",
|
|
|
|
"400 Bad Request",
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
"GET unknown URI type: can only do t=info",
|
2008-05-20 06:28:52 +00:00
|
|
|
self.GET, base)
|
|
|
|
return d
|
|
|
|
|
2008-05-20 18:33:14 +00:00
|
|
|
def test_GET_FILE_URI(self):
|
|
|
|
base = "/uri/%s" % urllib.quote(self._bar_txt_uri)
|
|
|
|
d = self.GET(base)
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_GET_FILE_URI_mdmf(self):
|
|
|
|
base = "/uri/%s" % urllib.quote(self._quux_txt_uri)
|
|
|
|
d = self.GET(base)
|
|
|
|
d.addCallback(self.failUnlessIsQuuxDotTxt)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_GET_FILE_URI_mdmf_extensions(self):
|
2011-10-01 23:35:53 +00:00
|
|
|
base = "/uri/%s" % urllib.quote("%s:RANDOMSTUFF" % self._quux_txt_uri)
|
2011-08-07 00:43:48 +00:00
|
|
|
d = self.GET(base)
|
|
|
|
d.addCallback(self.failUnlessIsQuuxDotTxt)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_GET_FILE_URI_mdmf_readonly(self):
|
|
|
|
base = "/uri/%s" % urllib.quote(self._quux_txt_readonly_uri)
|
|
|
|
d = self.GET(base)
|
|
|
|
d.addCallback(self.failUnlessIsQuuxDotTxt)
|
|
|
|
return d
|
|
|
|
|
2019-12-22 11:20:46 +00:00
|
|
|
def test_GET_FILE_URI_badchild(self):
|
2008-05-20 18:33:14 +00:00
|
|
|
base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
|
2019-09-24 19:55:09 +00:00
|
|
|
errmsg = "Files have no children named 'boguschild'"
|
2008-05-20 18:33:14 +00:00
|
|
|
d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
|
|
|
|
"400 Bad Request", errmsg,
|
|
|
|
self.GET, base)
|
|
|
|
return d
|
|
|
|
|
2019-12-22 11:20:46 +00:00
|
|
|
def test_PUT_FILE_URI_badchild(self):
|
2008-05-20 18:33:14 +00:00
|
|
|
base = "/uri/%s/boguschild" % urllib.quote(self._bar_txt_uri)
|
|
|
|
errmsg = "Cannot create directory 'boguschild', because its parent is a file, not a directory"
|
|
|
|
d = self.shouldFail2(error.Error, "test_GET_FILE_URI_badchild",
|
2019-11-18 23:55:25 +00:00
|
|
|
"409 Conflict", errmsg,
|
2008-05-20 18:33:14 +00:00
|
|
|
self.PUT, base, "")
|
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_PUT_FILE_URI_mdmf(self):
|
|
|
|
base = "/uri/%s" % urllib.quote(self._quux_txt_uri)
|
|
|
|
self._quux_new_contents = "new_contents"
|
|
|
|
d = self.GET(base)
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessIsQuuxDotTxt(res))
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.PUT(base, self._quux_new_contents))
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.GET(base))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessReallyEqual(res, self._quux_new_contents))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_FILE_URI_mdmf_extensions(self):
|
2011-10-01 23:35:53 +00:00
|
|
|
base = "/uri/%s" % urllib.quote("%s:EXTENSIONSTUFF" % self._quux_txt_uri)
|
2011-08-07 00:43:48 +00:00
|
|
|
self._quux_new_contents = "new_contents"
|
|
|
|
d = self.GET(base)
|
|
|
|
d.addCallback(lambda res: self.failUnlessIsQuuxDotTxt(res))
|
|
|
|
d.addCallback(lambda ignored: self.PUT(base, self._quux_new_contents))
|
|
|
|
d.addCallback(lambda ignored: self.GET(base))
|
|
|
|
d.addCallback(lambda res: self.failUnlessEqual(self._quux_new_contents,
|
|
|
|
res))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_FILE_URI_mdmf_readonly(self):
|
|
|
|
# We're not allowed to PUT things to a readonly cap.
|
|
|
|
base = "/uri/%s" % self._quux_txt_readonly_uri
|
|
|
|
d = self.GET(base)
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessIsQuuxDotTxt(res))
|
|
|
|
# What should we get here? We get a 500 error now; that's not right.
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.shouldFail2(error.Error, "test_PUT_FILE_URI_mdmf_readonly",
|
|
|
|
"400 Bad Request", "read-only cap",
|
|
|
|
self.PUT, base, "new data"))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_FILE_URI_sdmf_readonly(self):
|
|
|
|
# We're not allowed to put things to a readonly cap.
|
|
|
|
base = "/uri/%s" % self._baz_txt_readonly_uri
|
|
|
|
d = self.GET(base)
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessIsBazDotTxt(res))
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.shouldFail2(error.Error, "test_PUT_FILE_URI_sdmf_readonly",
|
|
|
|
"400 Bad Request", "read-only cap",
|
|
|
|
self.PUT, base, "new_data"))
|
|
|
|
return d
|
|
|
|
|
2012-03-31 20:46:34 +00:00
|
|
|
def test_GET_etags(self):
|
|
|
|
|
|
|
|
def _check_etags(uri):
|
|
|
|
d1 = _get_etag(uri)
|
|
|
|
d2 = _get_etag(uri, 'json')
|
|
|
|
d = defer.DeferredList([d1, d2], consumeErrors=True)
|
|
|
|
def _check(results):
|
2012-05-13 07:19:20 +00:00
|
|
|
# All deferred must succeed
|
|
|
|
self.failUnless(all([r[0] for r in results]))
|
|
|
|
# the etag for the t=json form should be just like the etag
|
|
|
|
# fo the default t='' form, but with a 'json' suffix
|
|
|
|
self.failUnlessEqual(results[0][1] + 'json', results[1][1])
|
2012-03-31 20:46:34 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _get_etag(uri, t=''):
|
|
|
|
targetbase = "/uri/%s?t=%s" % (urllib.quote(uri.strip()), t)
|
|
|
|
d = self.GET(targetbase, return_response=True, followRedirect=True)
|
|
|
|
def _just_the_etag(result):
|
|
|
|
data, response, headers = result
|
2017-07-24 20:51:58 +00:00
|
|
|
etag = headers.getRawHeaders('etag')[0]
|
2012-05-13 07:19:20 +00:00
|
|
|
if uri.startswith('URI:DIR'):
|
|
|
|
self.failUnless(etag.startswith('DIR:'), etag)
|
2012-03-31 20:46:34 +00:00
|
|
|
return etag
|
|
|
|
return d.addCallback(_just_the_etag)
|
|
|
|
|
|
|
|
# Check that etags work with immutable directories
|
|
|
|
(newkids, caps) = self._create_immutable_children()
|
|
|
|
d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
|
2017-01-19 22:39:53 +00:00
|
|
|
json.dumps(newkids))
|
2012-05-13 07:41:53 +00:00
|
|
|
def _stash_immdir_uri(uri):
|
|
|
|
self._immdir_uri = uri
|
|
|
|
return uri
|
|
|
|
d.addCallback(_stash_immdir_uri)
|
2012-03-31 20:46:34 +00:00
|
|
|
d.addCallback(_check_etags)
|
|
|
|
|
|
|
|
# Check that etags work with immutable files
|
|
|
|
d.addCallback(lambda _: _check_etags(self._bar_txt_uri))
|
2012-05-13 07:19:20 +00:00
|
|
|
|
|
|
|
# use the ETag on GET
|
|
|
|
def _check_match(ign):
|
|
|
|
uri = "/uri/%s" % self._bar_txt_uri
|
|
|
|
d = self.GET(uri, return_response=True)
|
|
|
|
# extract the ETag
|
2019-04-12 14:18:36 +00:00
|
|
|
d.addCallback(lambda data_code_headers:
|
|
|
|
data_code_headers[2].getRawHeaders('etag')[0])
|
2012-05-13 07:19:20 +00:00
|
|
|
# do a GET that's supposed to match the ETag
|
|
|
|
d.addCallback(lambda etag:
|
|
|
|
self.GET(uri, return_response=True,
|
|
|
|
headers={"If-None-Match": etag}))
|
|
|
|
# make sure it short-circuited (304 instead of 200)
|
2019-04-12 14:18:36 +00:00
|
|
|
d.addCallback(lambda data_code_headers:
|
|
|
|
self.failUnlessEqual(int(data_code_headers[1]), http.NOT_MODIFIED))
|
2012-05-13 07:19:20 +00:00
|
|
|
return d
|
|
|
|
d.addCallback(_check_match)
|
2012-05-13 07:41:53 +00:00
|
|
|
|
|
|
|
def _no_etag(uri, t):
|
|
|
|
target = "/uri/%s?t=%s" % (uri, t)
|
|
|
|
d = self.GET(target, return_response=True, followRedirect=True)
|
2019-04-12 14:18:36 +00:00
|
|
|
d.addCallback(lambda data_code_headers:
|
|
|
|
self.failIf(data_code_headers[2].hasHeader("etag"), target))
|
2012-05-13 07:41:53 +00:00
|
|
|
return d
|
|
|
|
def _yes_etag(uri, t):
|
|
|
|
target = "/uri/%s?t=%s" % (uri, t)
|
|
|
|
d = self.GET(target, return_response=True, followRedirect=True)
|
2019-04-12 14:18:36 +00:00
|
|
|
d.addCallback(lambda data_code_headers:
|
|
|
|
self.failUnless(data_code_headers[2].hasHeader("etag"), target))
|
2012-05-13 07:41:53 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
d.addCallback(lambda ign: _yes_etag(self._bar_txt_uri, ""))
|
|
|
|
d.addCallback(lambda ign: _yes_etag(self._bar_txt_uri, "json"))
|
|
|
|
d.addCallback(lambda ign: _yes_etag(self._bar_txt_uri, "uri"))
|
|
|
|
d.addCallback(lambda ign: _yes_etag(self._bar_txt_uri, "readonly-uri"))
|
|
|
|
d.addCallback(lambda ign: _no_etag(self._bar_txt_uri, "info"))
|
|
|
|
|
|
|
|
d.addCallback(lambda ign: _yes_etag(self._immdir_uri, ""))
|
|
|
|
d.addCallback(lambda ign: _yes_etag(self._immdir_uri, "json"))
|
|
|
|
d.addCallback(lambda ign: _yes_etag(self._immdir_uri, "uri"))
|
|
|
|
d.addCallback(lambda ign: _yes_etag(self._immdir_uri, "readonly-uri"))
|
|
|
|
d.addCallback(lambda ign: _no_etag(self._immdir_uri, "info"))
|
|
|
|
d.addCallback(lambda ign: _no_etag(self._immdir_uri, "rename-form"))
|
|
|
|
|
2012-03-31 20:46:34 +00:00
|
|
|
return d
|
|
|
|
|
2010-01-27 07:03:09 +00:00
|
|
|
# TODO: version of this with a Unicode filename
|
2007-12-12 01:04:44 +00:00
|
|
|
def test_GET_FILEURL_save(self):
|
2010-01-27 07:03:09 +00:00
|
|
|
d = self.GET(self.public_url + "/foo/bar.txt?filename=bar.txt&save=true",
|
|
|
|
return_response=True)
|
2019-04-12 14:18:36 +00:00
|
|
|
def _got(res_and_status_and_headers):
|
|
|
|
(res, statuscode, headers) = res_and_status_and_headers
|
2017-07-24 20:51:58 +00:00
|
|
|
content_disposition = headers.getRawHeaders("content-disposition")[0]
|
2010-01-27 07:03:09 +00:00
|
|
|
self.failUnless(content_disposition == 'attachment; filename="bar.txt"', content_disposition)
|
|
|
|
self.failUnlessIsBarDotTxt(res)
|
|
|
|
d.addCallback(_got)
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_FILEURL_missing(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.GET(self.public_url + "/foo/missing")
|
2007-07-07 02:43:55 +00:00
|
|
|
d.addBoth(self.should404, "test_GET_FILEURL_missing")
|
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_GET_FILEURL_info_mdmf(self):
|
|
|
|
d = self.GET("/uri/%s?t=info" % self._quux_txt_uri)
|
|
|
|
def _got(res):
|
|
|
|
self.failUnlessIn("mutable file (mdmf)", res)
|
|
|
|
self.failUnlessIn(self._quux_txt_uri, res)
|
|
|
|
self.failUnlessIn(self._quux_txt_readonly_uri, res)
|
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_GET_FILEURL_info_mdmf_readonly(self):
|
|
|
|
d = self.GET("/uri/%s?t=info" % self._quux_txt_readonly_uri)
|
|
|
|
def _got(res):
|
|
|
|
self.failUnlessIn("mutable file (mdmf)", res)
|
|
|
|
self.failIfIn(self._quux_txt_uri, res)
|
|
|
|
self.failUnlessIn(self._quux_txt_readonly_uri, res)
|
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_GET_FILEURL_info_sdmf(self):
|
|
|
|
d = self.GET("/uri/%s?t=info" % self._baz_txt_uri)
|
|
|
|
def _got(res):
|
|
|
|
self.failUnlessIn("mutable file (sdmf)", res)
|
|
|
|
self.failUnlessIn(self._baz_txt_uri, res)
|
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_GET_FILEURL_info_mdmf_extensions(self):
|
2011-10-01 23:35:53 +00:00
|
|
|
d = self.GET("/uri/%s:STUFF?t=info" % self._quux_txt_uri)
|
2011-08-07 00:43:48 +00:00
|
|
|
def _got(res):
|
|
|
|
self.failUnlessIn("mutable file (mdmf)", res)
|
|
|
|
self.failUnlessIn(self._quux_txt_uri, res)
|
|
|
|
self.failUnlessIn(self._quux_txt_readonly_uri, res)
|
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
2009-07-20 03:46:32 +00:00
|
|
|
def test_PUT_overwrite_only_files(self):
|
|
|
|
# create a directory, put a file in that directory.
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
contents, n, filecap = self.makefile(8)
|
2009-07-20 03:46:32 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/dir?t=mkdir", "")
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.PUT(self.public_url + "/foo/dir/file1.txt",
|
|
|
|
self.NEWFILE_CONTENTS))
|
|
|
|
# try to overwrite the file with replace=only-files
|
|
|
|
# (this should work)
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.PUT(self.public_url + "/foo/dir/file1.txt?t=uri&replace=only-files",
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
filecap))
|
2009-07-20 03:46:32 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.shouldFail2(error.Error, "PUT_bad_t", "409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me "
|
|
|
|
"to not replace it",
|
|
|
|
self.PUT, self.public_url + "/foo/dir?t=uri&replace=only-files",
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
filecap))
|
2009-07-20 03:46:32 +00:00
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_PUT_NEWFILEURL(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/new.txt", self.NEWFILE_CONTENTS)
|
2007-12-05 06:01:37 +00:00
|
|
|
# TODO: we lose the response code, so we can't check this
|
2010-07-11 20:02:52 +00:00
|
|
|
#self.failUnlessReallyEqual(responsecode, 201)
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
|
2007-12-05 06:01:37 +00:00
|
|
|
self.NEWFILE_CONTENTS))
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2009-04-08 02:13:40 +00:00
|
|
|
def test_PUT_NEWFILEURL_not_mutable(self):
|
|
|
|
d = self.PUT(self.public_url + "/foo/new.txt?mutable=false",
|
|
|
|
self.NEWFILE_CONTENTS)
|
|
|
|
# TODO: we lose the response code, so we can't check this
|
2010-07-11 20:02:52 +00:00
|
|
|
#self.failUnlessReallyEqual(responsecode, 201)
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
|
2009-04-08 02:13:40 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
|
|
|
|
self.NEWFILE_CONTENTS))
|
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_PUT_NEWFILEURL_unlinked_mdmf(self):
|
|
|
|
# this should get us a few segments of an MDMF mutable file,
|
|
|
|
# which we can then test for.
|
|
|
|
contents = self.NEWFILE_CONTENTS * 300000
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.PUT("/uri?format=mdmf",
|
2011-08-07 00:43:48 +00:00
|
|
|
contents)
|
|
|
|
def _got_filecap(filecap):
|
|
|
|
self.failUnless(filecap.startswith("URI:MDMF"))
|
|
|
|
return filecap
|
|
|
|
d.addCallback(_got_filecap)
|
|
|
|
d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
|
2011-10-13 16:31:43 +00:00
|
|
|
d.addCallback(lambda json: self.failUnlessIn("MDMF", json))
|
2011-08-07 00:43:48 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_NEWFILEURL_unlinked_sdmf(self):
|
|
|
|
contents = self.NEWFILE_CONTENTS * 300000
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.PUT("/uri?format=sdmf",
|
2011-08-07 00:43:48 +00:00
|
|
|
contents)
|
|
|
|
d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
|
2011-10-13 16:31:43 +00:00
|
|
|
d.addCallback(lambda json: self.failUnlessIn("SDMF", json))
|
2011-08-07 00:43:48 +00:00
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_PUT_NEWFILEURL_unlinked_bad_format(self):
|
2011-08-07 00:43:48 +00:00
|
|
|
contents = self.NEWFILE_CONTENTS * 300000
|
2017-07-25 00:53:48 +00:00
|
|
|
yield self.assertHTTPError(self.webish_url + "/uri?format=foo", 400,
|
|
|
|
"Unknown format: foo",
|
|
|
|
method="put", data=contents)
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2008-10-28 20:41:04 +00:00
|
|
|
def test_PUT_NEWFILEURL_range_bad(self):
|
|
|
|
headers = {"content-range": "bytes 1-10/%d" % len(self.NEWFILE_CONTENTS)}
|
|
|
|
target = self.public_url + "/foo/new.txt"
|
|
|
|
d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_range_bad",
|
|
|
|
"501 Not Implemented",
|
|
|
|
"Content-Range in PUT not yet supported",
|
|
|
|
# (and certainly not for immutable files)
|
|
|
|
self.PUT, target, self.NEWFILE_CONTENTS[1:11],
|
|
|
|
headers=headers)
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failIfNodeHasChild(self._foo_node, u"new.txt"))
|
|
|
|
return d
|
|
|
|
|
2008-05-20 19:36:02 +00:00
|
|
|
def test_PUT_NEWFILEURL_mutable(self):
|
|
|
|
d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
|
|
|
|
self.NEWFILE_CONTENTS)
|
|
|
|
# TODO: we lose the response code, so we can't check this
|
2010-07-11 20:02:52 +00:00
|
|
|
#self.failUnlessReallyEqual(responsecode, 201)
|
2008-05-20 19:36:02 +00:00
|
|
|
def _check_uri(res):
|
|
|
|
u = uri.from_string_mutable_filenode(res)
|
|
|
|
self.failUnless(u.is_mutable())
|
|
|
|
self.failIf(u.is_readonly())
|
|
|
|
return res
|
|
|
|
d.addCallback(_check_uri)
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesRWChild, self._foo_node, u"new.txt")
|
2008-05-20 19:36:02 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessMutableChildContentsAre(self._foo_node,
|
|
|
|
u"new.txt",
|
|
|
|
self.NEWFILE_CONTENTS))
|
|
|
|
return d
|
|
|
|
|
2008-06-03 07:03:16 +00:00
|
|
|
def test_PUT_NEWFILEURL_mutable_toobig(self):
|
2011-08-07 00:43:48 +00:00
|
|
|
# It is okay to upload large mutable files, so we should be able
|
|
|
|
# to do that.
|
|
|
|
d = self.PUT(self.public_url + "/foo/new.txt?mutable=true",
|
|
|
|
"b" * (self.s.MUTABLE_SIZELIMIT + 1))
|
2008-06-03 07:03:16 +00:00
|
|
|
return d
|
|
|
|
|
2007-08-15 20:22:23 +00:00
|
|
|
def test_PUT_NEWFILEURL_replace(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/bar.txt", self.NEWFILE_CONTENTS)
|
2007-12-05 06:01:37 +00:00
|
|
|
# TODO: we lose the response code, so we can't check this
|
2010-07-11 20:02:52 +00:00
|
|
|
#self.failUnlessReallyEqual(responsecode, 200)
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
|
2007-12-05 06:01:37 +00:00
|
|
|
self.NEWFILE_CONTENTS))
|
2007-08-15 20:22:23 +00:00
|
|
|
return d
|
|
|
|
|
2008-05-20 01:38:39 +00:00
|
|
|
def test_PUT_NEWFILEURL_bad_t(self):
|
|
|
|
d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
|
|
|
|
"PUT to a file: bad t=bogus",
|
|
|
|
self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
|
|
|
|
"contents")
|
|
|
|
return d
|
|
|
|
|
2007-08-15 20:22:23 +00:00
|
|
|
def test_PUT_NEWFILEURL_no_replace(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/bar.txt?replace=false",
|
2007-08-15 20:22:23 +00:00
|
|
|
self.NEWFILE_CONTENTS)
|
|
|
|
d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_no_replace",
|
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me "
|
|
|
|
"to not replace it")
|
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_PUT_NEWFILEURL_mkdirs(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/newdir/new.txt", self.NEWFILE_CONTENTS)
|
2007-12-05 06:01:37 +00:00
|
|
|
fn = self._foo_node
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, fn, u"newdir/new.txt")
|
2008-02-14 22:45:56 +00:00
|
|
|
d.addCallback(lambda res: self.failIfNodeHasChild(fn, u"new.txt"))
|
|
|
|
d.addCallback(lambda res: self.failUnlessNodeHasChild(fn, u"newdir"))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessChildContentsAre(fn, u"newdir/new.txt",
|
2007-12-05 06:01:37 +00:00
|
|
|
self.NEWFILE_CONTENTS))
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2019-11-17 04:19:41 +00:00
|
|
|
def test_PUT_NEWFILEURL_blocked(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/blockingfile/new.txt",
|
2007-07-07 02:43:55 +00:00
|
|
|
self.NEWFILE_CONTENTS)
|
|
|
|
d.addBoth(self.shouldFail, error.Error, "PUT_NEWFILEURL_blocked",
|
2008-05-19 19:56:02 +00:00
|
|
|
"409 Conflict",
|
|
|
|
"Unable to create directory 'blockingfile': a file was in the way")
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2009-12-27 20:10:43 +00:00
|
|
|
def test_PUT_NEWFILEURL_emptyname(self):
|
|
|
|
# an empty pathname component (i.e. a double-slash) is disallowed
|
|
|
|
d = self.shouldFail2(error.Error, "test_PUT_NEWFILEURL_emptyname",
|
|
|
|
"400 Bad Request",
|
|
|
|
"The webapi does not allow empty pathname components",
|
|
|
|
self.PUT, self.public_url + "/foo//new.txt", "")
|
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_DELETE_FILEURL(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.DELETE(self.public_url + "/foo/bar.txt")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_DELETE_FILEURL_missing(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.DELETE(self.public_url + "/foo/missing")
|
2007-07-07 07:16:36 +00:00
|
|
|
d.addBoth(self.should404, "test_DELETE_FILEURL_missing")
|
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_DELETE_FILEURL_missing2(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.DELETE(self.public_url + "/missing/missing")
|
2007-07-07 07:16:36 +00:00
|
|
|
d.addBoth(self.should404, "test_DELETE_FILEURL_missing2")
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2009-12-27 22:54:43 +00:00
|
|
|
def failUnlessHasBarDotTxtMetadata(self, res):
|
2017-01-19 22:39:53 +00:00
|
|
|
data = json.loads(res)
|
2009-12-27 22:54:43 +00:00
|
|
|
self.failUnless(isinstance(data, list))
|
2010-06-19 02:17:18 +00:00
|
|
|
self.failUnlessIn("metadata", data[1])
|
|
|
|
self.failUnlessIn("tahoe", data[1]["metadata"])
|
|
|
|
self.failUnlessIn("linkcrtime", data[1]["metadata"]["tahoe"])
|
|
|
|
self.failUnlessIn("linkmotime", data[1]["metadata"]["tahoe"])
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(data[1]["metadata"]["tahoe"]["linkcrtime"],
|
|
|
|
self._bar_txt_metadata["tahoe"]["linkcrtime"])
|
2009-12-27 22:54:43 +00:00
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_FILEURL_json(self):
|
2007-07-07 02:43:55 +00:00
|
|
|
# twisted.web.http.parse_qs ignores any query args without an '=', so
|
|
|
|
# I can't do "GET /path?json", I have to do "GET /path/t=json"
|
|
|
|
# instead. This may make it tricky to emulate the S3 interface
|
|
|
|
# completely.
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.GET(self.public_url + "/foo/bar.txt?t=json")
|
2009-12-27 22:54:43 +00:00
|
|
|
def _check1(data):
|
|
|
|
self.failUnlessIsBarJSON(data)
|
|
|
|
self.failUnlessHasBarDotTxtMetadata(data)
|
|
|
|
return
|
|
|
|
d.addCallback(_check1)
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_GET_FILEURL_json_mutable_type(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
# The JSON should include format, which says whether the
|
2011-08-07 00:43:48 +00:00
|
|
|
# file is SDMF or MDMF
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.PUT("/uri?format=mdmf",
|
2011-08-07 00:43:48 +00:00
|
|
|
self.NEWFILE_CONTENTS * 300000)
|
|
|
|
d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
|
2017-01-19 22:39:53 +00:00
|
|
|
def _got_json(raw, version):
|
|
|
|
data = json.loads(raw)
|
2011-08-07 00:43:48 +00:00
|
|
|
assert "filenode" == data[0]
|
|
|
|
data = data[1]
|
|
|
|
assert isinstance(data, dict)
|
|
|
|
|
2011-10-02 04:00:36 +00:00
|
|
|
self.failUnlessIn("format", data)
|
|
|
|
self.failUnlessEqual(data["format"], version)
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2011-10-13 16:31:43 +00:00
|
|
|
d.addCallback(_got_json, "MDMF")
|
2011-08-07 00:43:48 +00:00
|
|
|
# Now make an SDMF file and check that it is reported correctly.
|
|
|
|
d.addCallback(lambda ignored:
|
2011-10-02 04:00:36 +00:00
|
|
|
self.PUT("/uri?format=sdmf",
|
2011-08-07 00:43:48 +00:00
|
|
|
self.NEWFILE_CONTENTS * 300000))
|
|
|
|
d.addCallback(lambda filecap: self.GET("/uri/%s?t=json" % filecap))
|
2011-10-13 16:31:43 +00:00
|
|
|
d.addCallback(_got_json, "SDMF")
|
2011-08-07 00:43:48 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
def test_GET_FILEURL_json_mdmf(self):
|
|
|
|
d = self.GET("/uri/%s?t=json" % urllib.quote(self._quux_txt_uri))
|
|
|
|
d.addCallback(self.failUnlessIsQuuxJSON)
|
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_FILEURL_json_missing(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.GET(self.public_url + "/foo/missing?json")
|
2007-07-07 02:43:55 +00:00
|
|
|
d.addBoth(self.should404, "test_GET_FILEURL_json_missing")
|
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_FILEURL_uri(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.GET(self.public_url + "/foo/bar.txt?t=uri")
|
2007-07-07 02:43:55 +00:00
|
|
|
def _check(res):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, self._bar_txt_uri)
|
2007-07-07 02:43:55 +00:00
|
|
|
d.addCallback(_check)
|
2007-07-08 05:47:18 +00:00
|
|
|
d.addCallback(lambda res:
|
2007-12-03 21:52:42 +00:00
|
|
|
self.GET(self.public_url + "/foo/bar.txt?t=readonly-uri"))
|
2007-07-08 05:47:18 +00:00
|
|
|
def _check2(res):
|
|
|
|
# for now, for files, uris and readonly-uris are the same
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, self._bar_txt_uri)
|
2007-07-08 05:47:18 +00:00
|
|
|
d.addCallback(_check2)
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2007-12-25 10:48:57 +00:00
|
|
|
def test_GET_FILEURL_badtype(self):
|
2017-07-25 00:53:48 +00:00
|
|
|
url = self.webish_url + self.public_url + "/foo/bar.txt?t=bogus"
|
|
|
|
yield self.assertHTTPError(url, 400, "bad t=bogus")
|
2007-12-25 10:48:57 +00:00
|
|
|
|
2010-06-22 21:47:14 +00:00
|
|
|
def test_CSS_FILE(self):
|
2011-11-17 21:49:23 +00:00
|
|
|
d = self.GET("/tahoe.css", followRedirect=True)
|
2010-06-22 21:47:14 +00:00
|
|
|
def _check(res):
|
|
|
|
CSS_STYLE=re.compile('toolbar\s{.+text-align:\scenter.+toolbar-item.+display:\sinline',re.DOTALL)
|
|
|
|
self.failUnless(CSS_STYLE.search(res), res)
|
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
2011-08-10 17:21:39 +00:00
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_FILEURL_uri_missing(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.GET(self.public_url + "/foo/missing?t=uri")
|
2007-07-07 02:43:55 +00:00
|
|
|
d.addBoth(self.should404, "test_GET_FILEURL_uri_missing")
|
|
|
|
return d
|
|
|
|
|
2019-12-28 02:00:20 +00:00
|
|
|
def _check_upload_and_mkdir_forms(self, soup):
|
|
|
|
"""
|
|
|
|
Confirm `soup` contains a form to create a file, with radio
|
|
|
|
buttons that allow the user to toggle whether it is a CHK/LIT
|
|
|
|
(default), SDMF, or MDMF file.
|
|
|
|
"""
|
|
|
|
found = []
|
|
|
|
desired_ids = (
|
|
|
|
u"upload-chk",
|
|
|
|
u"upload-sdmf",
|
|
|
|
u"upload-mdmf",
|
|
|
|
u"mkdir-sdmf",
|
|
|
|
u"mkdir-mdmf",
|
|
|
|
)
|
|
|
|
for input_tag in soup.find_all(u"input"):
|
|
|
|
if input_tag.get(u"id", u"") in desired_ids:
|
|
|
|
found.append(input_tag)
|
|
|
|
else:
|
|
|
|
if input_tag.get(u"name", u"") == u"t" and input_tag.get(u"type", u"") == u"hidden":
|
|
|
|
if input_tag[u"value"] == u"upload":
|
|
|
|
found.append(input_tag)
|
|
|
|
elif input_tag[u"value"] == u"mkdir":
|
|
|
|
found.append(input_tag)
|
|
|
|
self.assertEqual(len(found), 7, u"Failed to find all 7 <input> tags")
|
|
|
|
assert_soup_has_favicon(self, soup)
|
2011-12-17 04:31:30 +00:00
|
|
|
|
2019-12-28 02:00:20 +00:00
|
|
|
@inlineCallbacks
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_GET_DIRECTORY_html(self):
|
2019-12-28 02:00:20 +00:00
|
|
|
data = yield self.GET(self.public_url + "/foo", followRedirect=True)
|
|
|
|
soup = BeautifulSoup(data, 'html5lib')
|
|
|
|
self._check_upload_and_mkdir_forms(soup)
|
|
|
|
toolbars = soup.find_all(u"li", {u"class": u"toolbar-item"})
|
|
|
|
self.assertTrue(any(li.text == u"Return to Welcome page" for li in toolbars))
|
|
|
|
self.failUnlessIn("quux", data)
|
2010-06-22 20:14:03 +00:00
|
|
|
|
2019-12-28 02:15:17 +00:00
|
|
|
@inlineCallbacks
|
2012-10-25 00:01:25 +00:00
|
|
|
def test_GET_DIRECTORY_html_filenode_encoding(self):
|
2019-12-28 02:15:17 +00:00
|
|
|
data = yield self.GET(self.public_url + "/foo", followRedirect=True)
|
|
|
|
soup = BeautifulSoup(data, 'html5lib')
|
|
|
|
# Check if encoded entries are there
|
|
|
|
target_ref = u'@@named=/{}'.format(self._htmlname_urlencoded)
|
|
|
|
# at least one <a> tag has our weirdly-named file properly
|
|
|
|
# encoded (or else BeautifulSoup would produce an error)
|
|
|
|
self.assertTrue(
|
|
|
|
any(
|
|
|
|
a.text == self._htmlname_unicode and a[u"href"].endswith(target_ref)
|
|
|
|
for a in soup.find_all(u"a", {u"rel": u"noreferrer"})
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# XXX leaving this as-is, but consider using beautfulsoup here too?
|
|
|
|
# Make sure that Nevow escaping actually works by checking for unsafe characters
|
|
|
|
# and that '&' is escaped.
|
|
|
|
for entity in '<>':
|
|
|
|
self.failUnlessIn(entity, self._htmlname_raw)
|
|
|
|
self.failIfIn(entity, self._htmlname_escaped)
|
|
|
|
self.failUnlessIn('&', re.sub(r'&(amp|lt|gt|quot|apos);', '', self._htmlname_raw))
|
|
|
|
self.failIfIn('&', re.sub(r'&(amp|lt|gt|quot|apos);', '', self._htmlname_escaped))
|
2012-10-25 00:01:25 +00:00
|
|
|
|
2019-12-28 02:00:20 +00:00
|
|
|
@inlineCallbacks
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_GET_root_html(self):
|
2019-12-28 02:00:20 +00:00
|
|
|
data = yield self.GET("/")
|
|
|
|
soup = BeautifulSoup(data, 'html5lib')
|
|
|
|
self._check_upload_and_mkdir_forms(soup)
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2019-12-28 02:48:46 +00:00
|
|
|
@inlineCallbacks
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_DIRURL(self):
|
2019-12-28 02:48:46 +00:00
|
|
|
data = yield self.GET(self.public_url + "/foo", followRedirect=True)
|
|
|
|
soup = BeautifulSoup(data, 'html5lib')
|
|
|
|
|
2008-06-18 02:49:40 +00:00
|
|
|
# from /uri/$URI/foo/ , we need ../../../ to get back to the root
|
2019-12-28 02:48:46 +00:00
|
|
|
root = u"../../.."
|
|
|
|
self.assertTrue(
|
|
|
|
any(
|
|
|
|
a.text == u"Return to Welcome page"
|
|
|
|
for a in soup.find_all(u"a", {u"href": root})
|
|
|
|
)
|
|
|
|
)
|
2007-07-07 02:43:55 +00:00
|
|
|
|
2019-12-28 02:48:46 +00:00
|
|
|
# the FILE reference points to a URI, but it should end in bar.txt
|
|
|
|
bar_url = "{}/file/{}/@@named=/bar.txt".format(root, urllib.quote(self._bar_txt_uri))
|
|
|
|
self.assertTrue(
|
|
|
|
any(
|
|
|
|
a.text == u"bar.txt"
|
|
|
|
for a in soup.find_all(u"a", {u"href": bar_url})
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertTrue(
|
|
|
|
any(
|
|
|
|
td.text == u"{}".format(len(self.BAR_CONTENTS))
|
|
|
|
for td in soup.find_all(u"td", {u"align": u"right"})
|
|
|
|
)
|
|
|
|
)
|
|
|
|
foo_url = urllib.quote("{}/uri/{}/".format(root, self._foo_uri))
|
|
|
|
forms = soup.find_all(u"form", {u"action": foo_url})
|
|
|
|
found = []
|
|
|
|
for form in forms:
|
|
|
|
if form.find_all(u"input", {u"name": u"name", u"value": u"bar.txt"}):
|
|
|
|
kind = form.find_all(u"input", {u"type": u"submit"})[0][u"value"]
|
|
|
|
found.append(kind)
|
|
|
|
if kind == u"unlink":
|
|
|
|
self.assertTrue(form[u"method"] == u"post")
|
|
|
|
self.assertEqual(
|
|
|
|
set(found),
|
|
|
|
{u"unlink", u"rename/relink"}
|
|
|
|
)
|
|
|
|
|
|
|
|
sub_url = "{}/uri/{}/".format(root, urllib.quote(self._sub_uri))
|
|
|
|
self.assertTrue(
|
|
|
|
any(
|
|
|
|
td.findNextSibling()(u"a")[0][u"href"] == sub_url
|
|
|
|
for td in soup.find_all(u"td")
|
|
|
|
if td.text == u"DIR"
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
@inlineCallbacks
|
|
|
|
def test_GET_DIRURL_readonly(self):
|
2011-08-09 00:11:17 +00:00
|
|
|
# look at a readonly directory
|
2019-12-28 02:48:46 +00:00
|
|
|
data = yield self.GET(self.public_url + "/reedownlee", followRedirect=True)
|
|
|
|
self.failUnlessIn("(read-only)", data)
|
|
|
|
self.failIfIn("Upload a file", data)
|
2007-07-08 05:47:18 +00:00
|
|
|
|
2019-12-28 02:48:46 +00:00
|
|
|
@inlineCallbacks
|
|
|
|
def test_GET_DIRURL_readonly_dir(self):
|
|
|
|
# look at a directory that contains a readonly directory
|
|
|
|
data = yield self.GET(self.public_url, followRedirect=True)
|
|
|
|
soup = BeautifulSoup(data, 'html5lib')
|
|
|
|
ro_links = list(
|
|
|
|
td.findNextSibling()(u"a")[0]
|
|
|
|
for td in soup.find_all(u"td")
|
|
|
|
if td.text == u"DIR-RO"
|
|
|
|
)
|
|
|
|
self.assertEqual(1, len(ro_links))
|
|
|
|
self.assertEqual(u"reedownlee", ro_links[0].text)
|
|
|
|
self.assertTrue(u"URI%3ADIR2-RO%3A" in ro_links[0][u"href"])
|
|
|
|
|
|
|
|
@inlineCallbacks
|
|
|
|
def test_GET_DIRURL_empty(self):
|
|
|
|
# look at an empty directory
|
2020-03-03 00:08:30 +00:00
|
|
|
data = yield self.GET(self.public_url + "/foo/empty")
|
2019-12-28 02:48:46 +00:00
|
|
|
soup = BeautifulSoup(data, 'html5lib')
|
|
|
|
self.failUnlessIn("directory is empty", data)
|
2019-12-28 04:07:28 +00:00
|
|
|
mkdir_inputs = soup.find_all(u"input", {u"type": u"hidden", u"name": u"t", u"value": u"mkdir"})
|
|
|
|
self.assertEqual(1, len(mkdir_inputs))
|
|
|
|
self.assertEqual(
|
|
|
|
u"Create a new directory in this directory",
|
|
|
|
mkdir_inputs[0].parent(u"legend")[0].text
|
|
|
|
)
|
2019-12-28 02:48:46 +00:00
|
|
|
|
|
|
|
@inlineCallbacks
|
|
|
|
def test_GET_DIRURL_literal(self):
|
|
|
|
# look at a literal directory
|
2010-02-25 04:18:24 +00:00
|
|
|
tiny_litdir_uri = "URI:DIR2-LIT:gqytunj2onug64tufqzdcosvkjetutcjkq5gw4tvm5vwszdgnz5hgyzufqydulbshj5x2lbm" # contains one child which is itself also LIT
|
2020-03-03 00:08:30 +00:00
|
|
|
data = yield self.GET("/uri/" + tiny_litdir_uri, followRedirect=True)
|
2019-12-28 02:48:46 +00:00
|
|
|
soup = BeautifulSoup(data, 'html5lib')
|
|
|
|
self.failUnlessIn('(immutable)', data)
|
|
|
|
file_links = list(
|
|
|
|
td.findNextSibling()(u"a")[0]
|
|
|
|
for td in soup.find_all(u"td")
|
|
|
|
if td.text == u"FILE"
|
|
|
|
)
|
|
|
|
self.assertEqual(1, len(file_links))
|
|
|
|
self.assertEqual(u"short", file_links[0].text)
|
|
|
|
self.assertTrue(file_links[0][u"href"].endswith(u"/file/URI%3ALIT%3Akrugkidfnzsc4/@@named=/short"))
|
2007-07-08 05:21:20 +00:00
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2007-12-25 10:48:57 +00:00
|
|
|
def test_GET_DIRURL_badtype(self):
|
2017-07-25 00:53:48 +00:00
|
|
|
url = self.webish_url + self.public_url + "/foo?t=bogus"
|
|
|
|
yield self.assertHTTPError(url, 400, "bad t=bogus")
|
2007-12-25 10:48:57 +00:00
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_DIRURL_json(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.GET(self.public_url + "/foo?t=json")
|
2007-07-08 05:47:18 +00:00
|
|
|
d.addCallback(self.failUnlessIsFooJSON)
|
|
|
|
return d
|
|
|
|
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_GET_DIRURL_json_format(self):
|
2011-08-07 00:43:48 +00:00
|
|
|
d = self.PUT(self.public_url + \
|
2011-10-02 04:00:36 +00:00
|
|
|
"/foo/sdmf.txt?format=sdmf",
|
2011-08-07 00:43:48 +00:00
|
|
|
self.NEWFILE_CONTENTS * 300000)
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.PUT(self.public_url + \
|
2011-10-02 04:00:36 +00:00
|
|
|
"/foo/mdmf.txt?format=mdmf",
|
2011-08-07 00:43:48 +00:00
|
|
|
self.NEWFILE_CONTENTS * 300000))
|
|
|
|
# Now we have an MDMF and SDMF file in the directory. If we GET
|
|
|
|
# its JSON, we should see their encodings.
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.GET(self.public_url + "/foo?t=json"))
|
2017-01-19 22:39:53 +00:00
|
|
|
def _got_json(raw):
|
|
|
|
data = json.loads(raw)
|
2011-08-07 00:43:48 +00:00
|
|
|
assert data[0] == "dirnode"
|
|
|
|
|
|
|
|
data = data[1]
|
|
|
|
kids = data['children']
|
|
|
|
|
|
|
|
mdmf_data = kids['mdmf.txt'][1]
|
2011-10-02 04:00:36 +00:00
|
|
|
self.failUnlessIn("format", mdmf_data)
|
2011-10-13 16:31:43 +00:00
|
|
|
self.failUnlessEqual(mdmf_data["format"], "MDMF")
|
2011-08-07 00:43:48 +00:00
|
|
|
|
|
|
|
sdmf_data = kids['sdmf.txt'][1]
|
2011-10-02 04:00:36 +00:00
|
|
|
self.failUnlessIn("format", sdmf_data)
|
2011-10-13 16:31:43 +00:00
|
|
|
self.failUnlessEqual(sdmf_data["format"], "SDMF")
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(_got_json)
|
|
|
|
return d
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
|
|
|
|
def test_POST_DIRURL_manifest_no_ophandle(self):
|
|
|
|
d = self.shouldFail2(error.Error,
|
|
|
|
"test_POST_DIRURL_manifest_no_ophandle",
|
|
|
|
"400 Bad Request",
|
|
|
|
"slow operation requires ophandle=",
|
|
|
|
self.POST, self.public_url, t="start-manifest")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_DIRURL_manifest(self):
|
2008-10-07 04:36:18 +00:00
|
|
|
d = defer.succeed(None)
|
2008-10-22 00:03:07 +00:00
|
|
|
def getman(ignored, output):
|
2020-03-03 00:08:30 +00:00
|
|
|
url = self.webish_url + self.public_url + "/foo?t=start-manifest&ophandle=125"
|
2017-07-24 23:12:35 +00:00
|
|
|
d = do_http("post", url, allow_redirects=True,
|
|
|
|
browser_like_redirects=True)
|
2008-10-22 00:03:07 +00:00
|
|
|
d.addCallback(self.wait_for_operation, "125")
|
|
|
|
d.addCallback(self.get_operation_results, "125", output)
|
|
|
|
return d
|
|
|
|
d.addCallback(getman, None)
|
2008-10-07 04:36:18 +00:00
|
|
|
def _got_html(manifest):
|
2019-11-05 09:24:59 +00:00
|
|
|
soup = BeautifulSoup(manifest, 'html5lib')
|
|
|
|
assert_soup_has_text(self, soup, "Manifest of SI=")
|
|
|
|
assert_soup_has_text(self, soup, "sub")
|
|
|
|
assert_soup_has_text(self, soup, self._sub_uri)
|
|
|
|
assert_soup_has_text(self, soup, "sub/baz.txt")
|
|
|
|
assert_soup_has_favicon(self, soup)
|
2008-10-07 04:36:18 +00:00
|
|
|
d.addCallback(_got_html)
|
2008-10-23 22:56:58 +00:00
|
|
|
|
|
|
|
# both t=status and unadorned GET should be identical
|
|
|
|
d.addCallback(lambda res: self.GET("/operations/125"))
|
|
|
|
d.addCallback(_got_html)
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
d.addCallback(getman, "html")
|
2008-10-07 04:36:18 +00:00
|
|
|
d.addCallback(_got_html)
|
2008-10-22 00:03:07 +00:00
|
|
|
d.addCallback(getman, "text")
|
2008-10-07 04:36:18 +00:00
|
|
|
def _got_text(manifest):
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("\nsub " + self._sub_uri + "\n", manifest)
|
|
|
|
self.failUnlessIn("\nsub/baz.txt URI:CHK:", manifest)
|
2008-10-07 04:36:18 +00:00
|
|
|
d.addCallback(_got_text)
|
2008-10-22 00:03:07 +00:00
|
|
|
d.addCallback(getman, "JSON")
|
2008-11-19 23:00:27 +00:00
|
|
|
def _got_json(res):
|
|
|
|
data = res["manifest"]
|
2008-10-07 04:36:18 +00:00
|
|
|
got = {}
|
|
|
|
for (path_list, cap) in data:
|
|
|
|
got[tuple(path_list)] = cap
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(got[(u"sub",)]), self._sub_uri)
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn((u"sub", u"baz.txt"), got)
|
|
|
|
self.failUnlessIn("finished", res)
|
|
|
|
self.failUnlessIn("origin", res)
|
|
|
|
self.failUnlessIn("storage-index", res)
|
|
|
|
self.failUnlessIn("verifycaps", res)
|
|
|
|
self.failUnlessIn("stats", res)
|
2008-10-07 04:36:18 +00:00
|
|
|
d.addCallback(_got_json)
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
def test_POST_DIRURL_deepsize_no_ophandle(self):
|
|
|
|
d = self.shouldFail2(error.Error,
|
|
|
|
"test_POST_DIRURL_deepsize_no_ophandle",
|
|
|
|
"400 Bad Request",
|
|
|
|
"slow operation requires ophandle=",
|
|
|
|
self.POST, self.public_url, t="start-deep-size")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_DIRURL_deepsize(self):
|
2020-03-03 00:08:30 +00:00
|
|
|
url = self.webish_url + self.public_url + "/foo?t=start-deep-size&ophandle=126"
|
2017-07-24 23:12:35 +00:00
|
|
|
d = do_http("post", url, allow_redirects=True,
|
|
|
|
browser_like_redirects=True)
|
2008-10-22 00:03:07 +00:00
|
|
|
d.addCallback(self.wait_for_operation, "126")
|
|
|
|
d.addCallback(self.get_operation_results, "126", "json")
|
|
|
|
def _got_json(data):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(data["finished"], True)
|
2008-10-22 00:03:07 +00:00
|
|
|
size = data["size"]
|
|
|
|
self.failUnless(size > 1000)
|
|
|
|
d.addCallback(_got_json)
|
|
|
|
d.addCallback(self.get_operation_results, "126", "text")
|
|
|
|
def _got_text(res):
|
|
|
|
mo = re.search(r'^size: (\d+)$', res, re.M)
|
|
|
|
self.failUnless(mo, res)
|
|
|
|
size = int(mo.group(1))
|
2008-10-07 05:11:47 +00:00
|
|
|
# with directories, the size varies.
|
|
|
|
self.failUnless(size > 1000)
|
2008-10-22 00:03:07 +00:00
|
|
|
d.addCallback(_got_text)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_DIRURL_deepstats_no_ophandle(self):
|
|
|
|
d = self.shouldFail2(error.Error,
|
|
|
|
"test_POST_DIRURL_deepstats_no_ophandle",
|
|
|
|
"400 Bad Request",
|
|
|
|
"slow operation requires ophandle=",
|
|
|
|
self.POST, self.public_url, t="start-deep-stats")
|
2008-03-27 18:33:42 +00:00
|
|
|
return d
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
def test_POST_DIRURL_deepstats(self):
|
2020-03-03 00:08:30 +00:00
|
|
|
url = self.webish_url + self.public_url + "/foo?t=start-deep-stats&ophandle=127"
|
2017-07-24 23:12:35 +00:00
|
|
|
d = do_http("post", url,
|
|
|
|
allow_redirects=True, browser_like_redirects=True)
|
2008-10-22 00:03:07 +00:00
|
|
|
d.addCallback(self.wait_for_operation, "127")
|
|
|
|
d.addCallback(self.get_operation_results, "127", "json")
|
|
|
|
def _got_json(stats):
|
2012-10-25 00:01:25 +00:00
|
|
|
expected = {"count-immutable-files": 4,
|
2011-08-07 00:43:48 +00:00
|
|
|
"count-mutable-files": 2,
|
2008-05-08 20:21:14 +00:00
|
|
|
"count-literal-files": 0,
|
2012-10-25 00:01:25 +00:00
|
|
|
"count-files": 6,
|
2008-05-08 20:21:14 +00:00
|
|
|
"count-directories": 3,
|
2012-10-25 00:01:25 +00:00
|
|
|
"size-immutable-files": 76,
|
2008-05-08 20:21:14 +00:00
|
|
|
"size-literal-files": 0,
|
|
|
|
#"size-directories": 1912, # varies
|
|
|
|
#"largest-directory": 1590,
|
2012-10-25 00:01:25 +00:00
|
|
|
"largest-directory-children": 8,
|
2008-05-08 20:21:14 +00:00
|
|
|
"largest-immutable-file": 19,
|
2016-12-01 23:45:27 +00:00
|
|
|
"api-version": 1,
|
2008-05-08 20:21:14 +00:00
|
|
|
}
|
|
|
|
for k,v in expected.iteritems():
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(stats[k], v,
|
|
|
|
"stats[%s] was %s, not %s" %
|
|
|
|
(k, stats[k], v))
|
|
|
|
self.failUnlessReallyEqual(stats["size-files-histogram"],
|
2012-10-25 00:01:25 +00:00
|
|
|
[ [11, 31, 4] ])
|
2008-10-22 00:03:07 +00:00
|
|
|
d.addCallback(_got_json)
|
2008-05-08 20:21:14 +00:00
|
|
|
return d
|
|
|
|
|
2009-01-23 05:01:36 +00:00
|
|
|
def test_POST_DIRURL_stream_manifest(self):
|
2020-03-03 00:08:30 +00:00
|
|
|
d = self.POST(self.public_url + "/foo?t=stream-manifest")
|
2009-01-23 05:01:36 +00:00
|
|
|
def _check(res):
|
|
|
|
self.failUnless(res.endswith("\n"))
|
2017-01-19 22:39:53 +00:00
|
|
|
units = [json.loads(t) for t in res[:-1].split("\n")]
|
2012-10-25 00:01:25 +00:00
|
|
|
self.failUnlessReallyEqual(len(units), 10)
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessEqual(units[-1]["type"], "stats")
|
2009-01-23 05:01:36 +00:00
|
|
|
first = units[0]
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessEqual(first["path"], [])
|
|
|
|
self.failUnlessReallyEqual(to_str(first["cap"]), self._foo_uri)
|
|
|
|
self.failUnlessEqual(first["type"], "directory")
|
|
|
|
baz = [u for u in units[:-1] if to_str(u["cap"]) == self._baz_file_uri][0]
|
|
|
|
self.failUnlessEqual(baz["path"], ["sub", "baz.txt"])
|
2009-01-23 05:01:36 +00:00
|
|
|
self.failIfEqual(baz["storage-index"], None)
|
|
|
|
self.failIfEqual(baz["verifycap"], None)
|
|
|
|
self.failIfEqual(baz["repaircap"], None)
|
2011-08-07 00:43:48 +00:00
|
|
|
# XXX: Add quux and baz to this test.
|
2009-01-23 05:01:36 +00:00
|
|
|
return
|
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_DIRURL_uri(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.GET(self.public_url + "/foo?t=uri")
|
2007-07-07 02:43:55 +00:00
|
|
|
def _check(res):
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(res), self._foo_uri)
|
2007-07-07 02:43:55 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_DIRURL_readonly_uri(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.GET(self.public_url + "/foo?t=readonly-uri")
|
2007-07-07 02:43:55 +00:00
|
|
|
def _check(res):
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(res), self._foo_readonly_uri)
|
2007-07-07 02:43:55 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_PUT_NEWDIRURL(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/newdir?t=mkdir", "")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, [])
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_PUT_NEWDIRURL_mdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/newdir?t=mkdir&format=mdmf", "")
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(lambda node:
|
|
|
|
self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_NEWDIRURL_sdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/newdir?t=mkdir&format=sdmf",
|
2011-08-07 00:43:48 +00:00
|
|
|
"")
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(lambda node:
|
|
|
|
self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
|
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_PUT_NEWDIRURL_bad_format(self):
|
2017-07-25 00:53:48 +00:00
|
|
|
url = (self.webish_url + self.public_url +
|
|
|
|
"/foo/newdir=?t=mkdir&format=foo")
|
|
|
|
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
|
|
|
method="put", data="")
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2009-12-27 20:10:43 +00:00
|
|
|
def test_POST_NEWDIRURL(self):
|
|
|
|
d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, [])
|
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_POST_NEWDIRURL_mdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.POST2(self.public_url + "/foo/newdir?t=mkdir&format=mdmf", "")
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(lambda node:
|
|
|
|
self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_NEWDIRURL_sdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.POST2(self.public_url + "/foo/newdir?t=mkdir&format=sdmf", "")
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(lambda node:
|
|
|
|
self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
|
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_POST_NEWDIRURL_bad_format(self):
|
2017-07-25 00:53:48 +00:00
|
|
|
url = (self.webish_url + self.public_url +
|
|
|
|
"/foo/newdir?t=mkdir&format=foo")
|
|
|
|
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
|
|
|
method="post", data="")
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2009-12-27 20:10:43 +00:00
|
|
|
def test_POST_NEWDIRURL_emptyname(self):
|
|
|
|
# an empty pathname component (i.e. a double-slash) is disallowed
|
2011-10-02 04:03:32 +00:00
|
|
|
d = self.shouldFail2(error.Error, "POST_NEWDIRURL_emptyname",
|
2009-12-27 20:10:43 +00:00
|
|
|
"400 Bad Request",
|
|
|
|
"The webapi does not allow empty pathname components, i.e. a double slash",
|
|
|
|
self.POST, self.public_url + "//?t=mkdir")
|
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def _do_POST_NEWDIRURL_initial_children_test(self, version=None):
|
2010-01-27 06:44:30 +00:00
|
|
|
(newkids, caps) = self._create_initial_children()
|
2011-08-07 00:43:48 +00:00
|
|
|
query = "/foo/newdir?t=mkdir-with-children"
|
|
|
|
if version == MDMF_VERSION:
|
2011-10-02 04:00:36 +00:00
|
|
|
query += "&format=mdmf"
|
2011-08-07 00:43:48 +00:00
|
|
|
elif version == SDMF_VERSION:
|
2011-10-02 04:00:36 +00:00
|
|
|
query += "&format=sdmf"
|
2011-08-07 00:43:48 +00:00
|
|
|
else:
|
|
|
|
version = SDMF_VERSION # for later
|
|
|
|
d = self.POST2(self.public_url + query,
|
2017-01-19 22:39:53 +00:00
|
|
|
json.dumps(newkids))
|
2009-10-13 02:34:44 +00:00
|
|
|
def _check(uri):
|
|
|
|
n = self.s.create_node_from_uri(uri.strip())
|
|
|
|
d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
|
2011-08-07 00:43:48 +00:00
|
|
|
self.failUnlessEqual(n._node.get_version(), version)
|
2009-10-13 02:34:44 +00:00
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessROChildURIIs(n, u"child-imm",
|
|
|
|
caps['filecap1']))
|
2009-10-13 02:34:44 +00:00
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessRWChildURIIs(n, u"child-mutable",
|
|
|
|
caps['filecap2']))
|
2009-10-13 02:34:44 +00:00
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessROChildURIIs(n, u"child-mutable-ro",
|
|
|
|
caps['filecap3']))
|
2009-10-13 02:34:44 +00:00
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessROChildURIIs(n, u"unknownchild-ro",
|
|
|
|
caps['unknown_rocap']))
|
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
|
|
|
|
caps['unknown_rwcap']))
|
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessROChildURIIs(n, u"unknownchild-imm",
|
|
|
|
caps['unknown_immcap']))
|
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessRWChildURIIs(n, u"dirchild",
|
|
|
|
caps['dircap']))
|
2010-02-22 02:53:52 +00:00
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessROChildURIIs(n, u"dirchild-lit",
|
|
|
|
caps['litdircap']))
|
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessROChildURIIs(n, u"dirchild-empty",
|
|
|
|
caps['emptydircap']))
|
2009-10-13 02:34:44 +00:00
|
|
|
return d2
|
|
|
|
d.addCallback(_check)
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
|
2009-10-13 02:34:44 +00:00
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_POST_NEWDIRURL_initial_children(self):
|
|
|
|
return self._do_POST_NEWDIRURL_initial_children_test()
|
|
|
|
|
|
|
|
def test_POST_NEWDIRURL_initial_children_mdmf(self):
|
|
|
|
return self._do_POST_NEWDIRURL_initial_children_test(MDMF_VERSION)
|
|
|
|
|
|
|
|
def test_POST_NEWDIRURL_initial_children_sdmf(self):
|
|
|
|
return self._do_POST_NEWDIRURL_initial_children_test(SDMF_VERSION)
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_POST_NEWDIRURL_initial_children_bad_format(self):
|
2011-08-07 00:43:48 +00:00
|
|
|
(newkids, caps) = self._create_initial_children()
|
2017-07-25 00:53:48 +00:00
|
|
|
url = (self.webish_url + self.public_url +
|
|
|
|
"/foo/newdir?t=mkdir-with-children&format=foo")
|
|
|
|
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
|
|
|
method="post", data=json.dumps(newkids))
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2009-11-18 07:09:00 +00:00
|
|
|
def test_POST_NEWDIRURL_immutable(self):
|
2010-01-27 06:44:30 +00:00
|
|
|
(newkids, caps) = self._create_immutable_children()
|
2009-11-18 07:09:00 +00:00
|
|
|
d = self.POST2(self.public_url + "/foo/newdir?t=mkdir-immutable",
|
2017-01-19 22:39:53 +00:00
|
|
|
json.dumps(newkids))
|
2009-11-18 07:09:00 +00:00
|
|
|
def _check(uri):
|
|
|
|
n = self.s.create_node_from_uri(uri.strip())
|
|
|
|
d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
|
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessROChildURIIs(n, u"child-imm",
|
|
|
|
caps['filecap1']))
|
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessROChildURIIs(n, u"unknownchild-imm",
|
|
|
|
caps['unknown_immcap']))
|
2009-11-18 07:09:00 +00:00
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessROChildURIIs(n, u"dirchild-imm",
|
|
|
|
caps['immdircap']))
|
2010-02-22 02:53:52 +00:00
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessROChildURIIs(n, u"dirchild-lit",
|
|
|
|
caps['litdircap']))
|
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessROChildURIIs(n, u"dirchild-empty",
|
|
|
|
caps['emptydircap']))
|
2009-11-18 07:09:00 +00:00
|
|
|
return d2
|
|
|
|
d.addCallback(_check)
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
|
2009-11-18 07:09:00 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
|
2010-02-22 02:53:52 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
|
2009-11-18 07:09:00 +00:00
|
|
|
d.addErrback(self.explain_web_error)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_NEWDIRURL_immutable_bad(self):
|
2010-01-27 06:44:30 +00:00
|
|
|
(newkids, caps) = self._create_initial_children()
|
2009-11-18 07:09:00 +00:00
|
|
|
d = self.shouldFail2(error.Error, "test_POST_NEWDIRURL_immutable_bad",
|
|
|
|
"400 Bad Request",
|
2010-01-27 06:44:30 +00:00
|
|
|
"needed to be immutable but was not",
|
2009-11-18 07:09:00 +00:00
|
|
|
self.POST2,
|
|
|
|
self.public_url + "/foo/newdir?t=mkdir-immutable",
|
2017-01-19 22:39:53 +00:00
|
|
|
json.dumps(newkids))
|
2009-11-18 07:09:00 +00:00
|
|
|
return d
|
|
|
|
|
2008-05-19 19:56:02 +00:00
|
|
|
def test_PUT_NEWDIRURL_exists(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/sub?t=mkdir", "")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"sub"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"sub"))
|
2008-05-19 19:56:02 +00:00
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
|
2007-08-15 20:22:23 +00:00
|
|
|
return d
|
|
|
|
|
2008-05-19 19:56:02 +00:00
|
|
|
def test_PUT_NEWDIRURL_blocked(self):
|
|
|
|
d = self.shouldFail2(error.Error, "PUT_NEWDIRURL_blocked",
|
|
|
|
"409 Conflict", "Unable to create directory 'bar.txt': a file was in the way",
|
|
|
|
self.PUT,
|
|
|
|
self.public_url + "/foo/bar.txt/sub?t=mkdir", "")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"sub"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"sub"))
|
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
|
2007-08-15 20:22:23 +00:00
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_PUT_NEWDIRURL_mkdirs(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir", "")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failIfNodeHasChild(self._foo_node, u"newdir"))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self._foo_node.get_child_at_path(u"subdir/newdir"))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, [])
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_PUT_NEWDIRURL_mkdirs_mdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir&format=mdmf", "")
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.failIfNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self._foo_node.get_child_at_path(u"subdir"))
|
|
|
|
def _got_subdir(subdir):
|
|
|
|
# XXX: What we want?
|
|
|
|
#self.failUnlessEqual(subdir._node.get_version(), MDMF_VERSION)
|
|
|
|
self.failUnlessNodeHasChild(subdir, u"newdir")
|
|
|
|
return subdir.get_child_at_path(u"newdir")
|
|
|
|
d.addCallback(_got_subdir)
|
|
|
|
d.addCallback(lambda newdir:
|
|
|
|
self.failUnlessEqual(newdir._node.get_version(), MDMF_VERSION))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_NEWDIRURL_mkdirs_sdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/subdir/newdir?t=mkdir&format=sdmf", "")
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"subdir"))
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.failIfNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self._foo_node.get_child_at_path(u"subdir"))
|
|
|
|
def _got_subdir(subdir):
|
|
|
|
# XXX: What we want?
|
|
|
|
#self.failUnlessEqual(subdir._node.get_version(), MDMF_VERSION)
|
|
|
|
self.failUnlessNodeHasChild(subdir, u"newdir")
|
|
|
|
return subdir.get_child_at_path(u"newdir")
|
|
|
|
d.addCallback(_got_subdir)
|
|
|
|
d.addCallback(lambda newdir:
|
|
|
|
self.failUnlessEqual(newdir._node.get_version(), SDMF_VERSION))
|
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_PUT_NEWDIRURL_mkdirs_bad_format(self):
|
2017-07-25 00:53:48 +00:00
|
|
|
url = (self.webish_url + self.public_url +
|
|
|
|
"/foo/subdir/newdir?t=mkdir&format=foo")
|
|
|
|
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
|
|
|
method="put", data="")
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_DELETE_DIRURL(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.DELETE(self.public_url + "/foo")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failIfNodeHasChild(self.public_root, u"foo"))
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_DELETE_DIRURL_missing(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.DELETE(self.public_url + "/foo/missing")
|
2007-07-07 07:16:36 +00:00
|
|
|
d.addBoth(self.should404, "test_DELETE_DIRURL_missing")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessNodeHasChild(self.public_root, u"foo"))
|
2007-07-07 07:16:36 +00:00
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_DELETE_DIRURL_missing2(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.DELETE(self.public_url + "/missing")
|
2007-07-07 07:16:36 +00:00
|
|
|
d.addBoth(self.should404, "test_DELETE_DIRURL_missing2")
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2007-07-07 17:34:05 +00:00
|
|
|
def dump_root(self):
|
2019-03-22 16:25:11 +00:00
|
|
|
print("NODEWALK")
|
2007-12-05 06:11:00 +00:00
|
|
|
w = webish.DirnodeWalkerMixin()
|
|
|
|
def visitor(childpath, childnode, metadata):
|
2019-03-22 16:25:11 +00:00
|
|
|
print(childpath)
|
2007-12-05 06:11:00 +00:00
|
|
|
d = w.walk(self.public_root, visitor)
|
|
|
|
return d
|
2007-07-07 17:34:05 +00:00
|
|
|
|
2007-12-05 06:01:37 +00:00
|
|
|
def failUnlessNodeKeysAre(self, node, expected_keys):
|
2008-02-14 22:45:56 +00:00
|
|
|
for k in expected_keys:
|
|
|
|
assert isinstance(k, unicode)
|
2007-12-05 06:01:37 +00:00
|
|
|
d = node.list()
|
|
|
|
def _check(children):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(sorted(children.keys()), sorted(expected_keys))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
def failUnlessNodeHasChild(self, node, name):
|
2008-02-14 22:45:56 +00:00
|
|
|
assert isinstance(name, unicode)
|
2007-12-05 06:01:37 +00:00
|
|
|
d = node.list()
|
|
|
|
def _check(children):
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn(name, children)
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
def failIfNodeHasChild(self, node, name):
|
2008-02-14 22:45:56 +00:00
|
|
|
assert isinstance(name, unicode)
|
2007-12-05 06:01:37 +00:00
|
|
|
d = node.list()
|
|
|
|
def _check(children):
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failIfIn(name, children)
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def failUnlessChildContentsAre(self, node, name, expected_contents):
|
2008-02-14 22:45:56 +00:00
|
|
|
assert isinstance(name, unicode)
|
2007-12-05 06:01:37 +00:00
|
|
|
d = node.get_child_at_path(name)
|
2009-12-01 22:44:35 +00:00
|
|
|
d.addCallback(lambda node: download_to_data(node))
|
2007-12-05 06:01:37 +00:00
|
|
|
def _check(contents):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(contents, expected_contents)
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
2008-04-18 00:51:38 +00:00
|
|
|
def failUnlessMutableChildContentsAre(self, node, name, expected_contents):
|
|
|
|
assert isinstance(name, unicode)
|
|
|
|
d = node.get_child_at_path(name)
|
|
|
|
d.addCallback(lambda node: node.download_best_version())
|
|
|
|
def _check(contents):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(contents, expected_contents)
|
2008-04-18 00:51:38 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
2010-01-27 06:44:30 +00:00
|
|
|
def failUnlessRWChildURIIs(self, node, name, expected_uri):
|
|
|
|
assert isinstance(name, unicode)
|
|
|
|
d = node.get_child_at_path(name)
|
|
|
|
def _check(child):
|
|
|
|
self.failUnless(child.is_unknown() or not child.is_readonly())
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
|
|
|
|
self.failUnlessReallyEqual(child.get_write_uri(), expected_uri.strip())
|
2010-01-27 06:44:30 +00:00
|
|
|
expected_ro_uri = self._make_readonly(expected_uri)
|
|
|
|
if expected_ro_uri:
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def failUnlessROChildURIIs(self, node, name, expected_uri):
|
2008-02-14 22:45:56 +00:00
|
|
|
assert isinstance(name, unicode)
|
2007-12-05 06:01:37 +00:00
|
|
|
d = node.get_child_at_path(name)
|
|
|
|
def _check(child):
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnless(child.is_unknown() or child.is_readonly())
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(child.get_write_uri(), None)
|
|
|
|
self.failUnlessReallyEqual(child.get_uri(), expected_uri.strip())
|
|
|
|
self.failUnlessReallyEqual(child.get_readonly_uri(), expected_uri.strip())
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
2010-01-27 06:44:30 +00:00
|
|
|
def failUnlessURIMatchesRWChild(self, got_uri, node, name):
|
2008-02-14 22:45:56 +00:00
|
|
|
assert isinstance(name, unicode)
|
2007-12-05 06:01:37 +00:00
|
|
|
d = node.get_child_at_path(name)
|
|
|
|
def _check(child):
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnless(child.is_unknown() or not child.is_readonly())
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(child.get_uri(), got_uri.strip())
|
|
|
|
self.failUnlessReallyEqual(child.get_write_uri(), got_uri.strip())
|
2010-01-27 06:44:30 +00:00
|
|
|
expected_ro_uri = self._make_readonly(got_uri)
|
|
|
|
if expected_ro_uri:
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(child.get_readonly_uri(), expected_ro_uri.strip())
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def failUnlessURIMatchesROChild(self, got_uri, node, name):
|
|
|
|
assert isinstance(name, unicode)
|
|
|
|
d = node.get_child_at_path(name)
|
|
|
|
def _check(child):
|
|
|
|
self.failUnless(child.is_unknown() or child.is_readonly())
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(child.get_write_uri(), None)
|
|
|
|
self.failUnlessReallyEqual(got_uri.strip(), child.get_uri())
|
|
|
|
self.failUnlessReallyEqual(got_uri.strip(), child.get_readonly_uri())
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
2007-12-07 00:17:02 +00:00
|
|
|
def failUnlessCHKURIHasContents(self, got_uri, contents):
|
2012-05-22 22:18:26 +00:00
|
|
|
self.failUnless(self.get_all_contents()[got_uri] == contents)
|
2007-12-07 00:17:02 +00:00
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_POST_upload(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="upload",
|
2007-07-08 03:06:58 +00:00
|
|
|
file=("new.txt", self.NEWFILE_CONTENTS))
|
2007-12-05 06:01:37 +00:00
|
|
|
fn = self._foo_node
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
|
2008-02-14 22:45:56 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessChildContentsAre(fn, u"new.txt",
|
|
|
|
self.NEWFILE_CONTENTS))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_upload_unicode(self):
|
|
|
|
filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
|
|
|
|
d = self.POST(self.public_url + "/foo", t="upload",
|
|
|
|
file=(filename, self.NEWFILE_CONTENTS))
|
|
|
|
fn = self._foo_node
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessChildContentsAre(fn, filename,
|
2007-12-05 06:01:37 +00:00
|
|
|
self.NEWFILE_CONTENTS))
|
2017-07-24 20:51:58 +00:00
|
|
|
target_url = self.public_url + u"/foo/" + filename
|
2008-06-04 00:09:39 +00:00
|
|
|
d.addCallback(lambda res: self.GET(target_url))
|
2010-07-11 20:02:52 +00:00
|
|
|
d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
|
|
|
|
self.NEWFILE_CONTENTS,
|
|
|
|
contents))
|
2008-06-04 00:09:39 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_upload_unicode_named(self):
|
|
|
|
filename = u"n\u00e9wer.txt" # n e-acute w e r . t x t
|
|
|
|
d = self.POST(self.public_url + "/foo", t="upload",
|
|
|
|
name=filename,
|
|
|
|
file=("overridden", self.NEWFILE_CONTENTS))
|
|
|
|
fn = self._foo_node
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, fn, filename)
|
2008-06-04 00:09:39 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessChildContentsAre(fn, filename,
|
|
|
|
self.NEWFILE_CONTENTS))
|
2017-07-24 20:51:58 +00:00
|
|
|
target_url = self.public_url + u"/foo/" + filename
|
2008-02-14 22:45:56 +00:00
|
|
|
d.addCallback(lambda res: self.GET(target_url))
|
2010-07-11 20:02:52 +00:00
|
|
|
d.addCallback(lambda contents: self.failUnlessReallyEqual(contents,
|
|
|
|
self.NEWFILE_CONTENTS,
|
|
|
|
contents))
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2007-12-07 00:17:02 +00:00
|
|
|
def test_POST_upload_no_link(self):
|
2007-12-25 00:55:38 +00:00
|
|
|
d = self.POST("/uri", t="upload",
|
2007-12-07 00:17:02 +00:00
|
|
|
file=("new.txt", self.NEWFILE_CONTENTS))
|
2008-02-06 06:01:37 +00:00
|
|
|
def _check_upload_results(page):
|
|
|
|
# this should be a page which describes the results of the upload
|
|
|
|
# that just finished.
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("Upload Results:", page)
|
|
|
|
self.failUnlessIn("URI:", page)
|
2008-02-06 06:01:37 +00:00
|
|
|
uri_re = re.compile("URI: <tt><span>(.*)</span>")
|
|
|
|
mo = uri_re.search(page)
|
|
|
|
self.failUnless(mo, page)
|
|
|
|
new_uri = mo.group(1)
|
|
|
|
return new_uri
|
|
|
|
d.addCallback(_check_upload_results)
|
2007-12-07 00:17:02 +00:00
|
|
|
d.addCallback(self.failUnlessCHKURIHasContents, self.NEWFILE_CONTENTS)
|
|
|
|
return d
|
|
|
|
|
2017-07-24 22:16:24 +00:00
|
|
|
@inlineCallbacks
|
2007-12-07 00:17:02 +00:00
|
|
|
def test_POST_upload_no_link_whendone(self):
|
2017-07-24 22:16:24 +00:00
|
|
|
body, headers = self.build_form(t="upload", when_done="/",
|
|
|
|
file=("new.txt", self.NEWFILE_CONTENTS))
|
|
|
|
yield self.shouldRedirectTo(self.webish_url + "/uri",
|
|
|
|
self.webish_url + "/",
|
|
|
|
method="post", data=body, headers=headers,
|
|
|
|
code=http.FOUND)
|
2007-12-07 00:17:02 +00:00
|
|
|
|
2017-07-24 21:40:11 +00:00
|
|
|
@inlineCallbacks
|
2008-05-20 06:28:52 +00:00
|
|
|
def test_POST_upload_no_link_whendone_results(self):
|
2017-07-24 22:02:23 +00:00
|
|
|
# We encode "uri" as "%75ri" to exercise a case affected by ticket #1860
|
|
|
|
body, headers = self.build_form(t="upload",
|
|
|
|
when_done="/%75ri/%(uri)s",
|
|
|
|
file=("new.txt", self.NEWFILE_CONTENTS),
|
|
|
|
)
|
|
|
|
redir_url = yield self.shouldRedirectTo(self.webish_url + "/uri", None,
|
|
|
|
method="post",
|
|
|
|
data=body, headers=headers,
|
|
|
|
code=http.FOUND)
|
|
|
|
res = yield do_http("get", redir_url)
|
2017-07-24 21:40:11 +00:00
|
|
|
self.failUnlessReallyEqual(res, self.NEWFILE_CONTENTS)
|
2008-05-20 06:28:52 +00:00
|
|
|
|
2008-02-06 05:10:22 +00:00
|
|
|
def test_POST_upload_no_link_mutable(self):
|
|
|
|
d = self.POST("/uri", t="upload", mutable="true",
|
|
|
|
file=("new.txt", self.NEWFILE_CONTENTS))
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
def _check(filecap):
|
|
|
|
filecap = filecap.strip()
|
|
|
|
self.failUnless(filecap.startswith("URI:SSK:"), filecap)
|
|
|
|
self.filecap = filecap
|
|
|
|
u = uri.WriteableSSKFileURI.init_from_string(filecap)
|
2012-05-22 22:18:26 +00:00
|
|
|
self.failUnlessIn(u.get_storage_index(), self.get_all_contents())
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
n = self.s.create_node_from_uri(filecap)
|
2008-04-18 00:51:38 +00:00
|
|
|
return n.download_best_version()
|
2008-02-06 05:10:22 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
def _check2(data):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
return self.GET("/uri/%s" % urllib.quote(self.filecap))
|
2008-02-06 05:10:22 +00:00
|
|
|
d.addCallback(_check2)
|
2008-05-20 01:38:39 +00:00
|
|
|
def _check3(data):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
return self.GET("/file/%s" % urllib.quote(self.filecap))
|
2008-05-20 01:38:39 +00:00
|
|
|
d.addCallback(_check3)
|
|
|
|
def _check4(data):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(data, self.NEWFILE_CONTENTS)
|
2008-05-20 01:38:39 +00:00
|
|
|
d.addCallback(_check4)
|
2008-02-06 05:10:22 +00:00
|
|
|
return d
|
|
|
|
|
2008-06-03 07:03:16 +00:00
|
|
|
def test_POST_upload_no_link_mutable_toobig(self):
|
2011-08-07 00:43:48 +00:00
|
|
|
# The SDMF size limit is no longer in place, so we should be
|
|
|
|
# able to upload mutable files that are as large as we want them
|
|
|
|
# to be.
|
|
|
|
d = self.POST("/uri", t="upload", mutable="true",
|
|
|
|
file=("new.txt", "b" * (self.s.MUTABLE_SIZELIMIT + 1)))
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_POST_upload_format_unlinked(self):
|
|
|
|
def _check_upload_unlinked(ign, format, uri_prefix):
|
|
|
|
filename = format + ".txt"
|
|
|
|
d = self.POST("/uri?t=upload&format=" + format,
|
|
|
|
file=(filename, self.NEWFILE_CONTENTS * 300000))
|
2011-10-13 16:29:51 +00:00
|
|
|
def _got_results(results):
|
|
|
|
if format.upper() in ("SDMF", "MDMF"):
|
|
|
|
# webapi.rst says this returns a filecap
|
|
|
|
filecap = results
|
|
|
|
else:
|
|
|
|
# for immutable, it returns an "upload results page", and
|
|
|
|
# the filecap is buried inside
|
|
|
|
line = [l for l in results.split("\n") if "URI: " in l][0]
|
|
|
|
mo = re.search(r'<span>([^<]+)</span>', line)
|
|
|
|
filecap = mo.group(1)
|
|
|
|
self.failUnless(filecap.startswith(uri_prefix),
|
|
|
|
(uri_prefix, filecap))
|
2011-10-02 04:00:36 +00:00
|
|
|
return self.GET("/uri/%s?t=json" % filecap)
|
2011-10-13 16:29:51 +00:00
|
|
|
d.addCallback(_got_results)
|
2017-01-19 22:39:53 +00:00
|
|
|
def _got_json(raw):
|
|
|
|
data = json.loads(raw)
|
2011-10-02 04:00:36 +00:00
|
|
|
data = data[1]
|
|
|
|
self.failUnlessIn("format", data)
|
2011-10-13 16:31:43 +00:00
|
|
|
self.failUnlessEqual(data["format"], format.upper())
|
2011-10-02 04:00:36 +00:00
|
|
|
d.addCallback(_got_json)
|
2011-10-13 16:29:51 +00:00
|
|
|
return d
|
2011-10-02 04:00:36 +00:00
|
|
|
d = defer.succeed(None)
|
|
|
|
d.addCallback(_check_upload_unlinked, "chk", "URI:CHK")
|
|
|
|
d.addCallback(_check_upload_unlinked, "CHK", "URI:CHK")
|
|
|
|
d.addCallback(_check_upload_unlinked, "sdmf", "URI:SSK")
|
|
|
|
d.addCallback(_check_upload_unlinked, "mdmf", "URI:MDMF")
|
2011-08-07 00:43:48 +00:00
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_POST_upload_bad_format_unlinked(self):
|
2017-07-25 00:53:48 +00:00
|
|
|
url = self.webish_url + "/uri?t=upload&format=foo"
|
|
|
|
body, headers = self.build_form(file=("foo.txt", self.NEWFILE_CONTENTS * 300000))
|
|
|
|
yield self.assertHTTPError(url, 400,
|
|
|
|
"Unknown format: foo",
|
|
|
|
method="post", data=body, headers=headers)
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_POST_upload_format(self):
|
|
|
|
def _check_upload(ign, format, uri_prefix, fn=None):
|
|
|
|
filename = format + ".txt"
|
|
|
|
d = self.POST(self.public_url +
|
|
|
|
"/foo?t=upload&format=" + format,
|
|
|
|
file=(filename, self.NEWFILE_CONTENTS * 300000))
|
|
|
|
def _got_filecap(filecap):
|
|
|
|
if fn is not None:
|
|
|
|
filenameu = unicode(filename)
|
|
|
|
self.failUnlessURIMatchesRWChild(filecap, fn, filenameu)
|
|
|
|
self.failUnless(filecap.startswith(uri_prefix))
|
|
|
|
return self.GET(self.public_url + "/foo/%s?t=json" % filename)
|
|
|
|
d.addCallback(_got_filecap)
|
2017-01-19 22:39:53 +00:00
|
|
|
def _got_json(raw):
|
|
|
|
data = json.loads(raw)
|
2011-10-02 04:00:36 +00:00
|
|
|
data = data[1]
|
|
|
|
self.failUnlessIn("format", data)
|
2011-10-13 16:31:43 +00:00
|
|
|
self.failUnlessEqual(data["format"], format.upper())
|
2011-10-02 04:00:36 +00:00
|
|
|
d.addCallback(_got_json)
|
2011-10-13 16:29:51 +00:00
|
|
|
return d
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2011-10-02 04:00:36 +00:00
|
|
|
d = defer.succeed(None)
|
|
|
|
d.addCallback(_check_upload, "chk", "URI:CHK")
|
|
|
|
d.addCallback(_check_upload, "sdmf", "URI:SSK", self._foo_node)
|
|
|
|
d.addCallback(_check_upload, "mdmf", "URI:MDMF")
|
|
|
|
d.addCallback(_check_upload, "MDMF", "URI:MDMF")
|
2008-06-03 07:03:16 +00:00
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_POST_upload_bad_format(self):
|
2017-07-25 00:53:48 +00:00
|
|
|
url = self.webish_url + self.public_url + "/foo?t=upload&format=foo"
|
|
|
|
body, headers = self.build_form(file=("foo.txt", self.NEWFILE_CONTENTS * 300000))
|
|
|
|
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
|
|
|
method="post", data=body, headers=headers)
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2007-12-05 06:42:54 +00:00
|
|
|
def test_POST_upload_mutable(self):
|
|
|
|
# this creates a mutable file
|
|
|
|
d = self.POST(self.public_url + "/foo", t="upload", mutable="true",
|
|
|
|
file=("new.txt", self.NEWFILE_CONTENTS))
|
|
|
|
fn = self._foo_node
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
|
2007-12-05 06:42:54 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-04-18 00:51:38 +00:00
|
|
|
self.failUnlessMutableChildContentsAre(fn, u"new.txt",
|
|
|
|
self.NEWFILE_CONTENTS))
|
2008-02-14 22:45:56 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
|
2007-12-05 06:42:54 +00:00
|
|
|
def _got(newnode):
|
|
|
|
self.failUnless(IMutableFileNode.providedBy(newnode))
|
|
|
|
self.failUnless(newnode.is_mutable())
|
|
|
|
self.failIf(newnode.is_readonly())
|
2008-05-20 01:37:28 +00:00
|
|
|
self._mutable_node = newnode
|
2007-12-05 06:42:54 +00:00
|
|
|
self._mutable_uri = newnode.get_uri()
|
|
|
|
d.addCallback(_got)
|
|
|
|
|
|
|
|
# now upload it again and make sure that the URI doesn't change
|
|
|
|
NEWER_CONTENTS = self.NEWFILE_CONTENTS + "newer\n"
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.POST(self.public_url + "/foo", t="upload",
|
|
|
|
mutable="true",
|
|
|
|
file=("new.txt", NEWER_CONTENTS)))
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
|
2007-12-05 06:42:54 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-04-18 00:51:38 +00:00
|
|
|
self.failUnlessMutableChildContentsAre(fn, u"new.txt",
|
|
|
|
NEWER_CONTENTS))
|
2008-02-14 22:45:56 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
|
2007-12-05 06:42:54 +00:00
|
|
|
def _got2(newnode):
|
|
|
|
self.failUnless(IMutableFileNode.providedBy(newnode))
|
|
|
|
self.failUnless(newnode.is_mutable())
|
|
|
|
self.failIf(newnode.is_readonly())
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
|
2007-12-05 06:42:54 +00:00
|
|
|
d.addCallback(_got2)
|
|
|
|
|
2008-05-20 01:38:39 +00:00
|
|
|
# upload a second time, using PUT instead of POST
|
|
|
|
NEW2_CONTENTS = NEWER_CONTENTS + "overwrite with PUT\n"
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.PUT(self.public_url + "/foo/new.txt", NEW2_CONTENTS))
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesRWChild, fn, u"new.txt")
|
2008-05-20 01:38:39 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessMutableChildContentsAre(fn, u"new.txt",
|
|
|
|
NEW2_CONTENTS))
|
|
|
|
|
2007-12-05 06:57:40 +00:00
|
|
|
# finally list the directory, since mutable files are displayed
|
2008-09-18 05:00:41 +00:00
|
|
|
# slightly differently
|
2007-12-05 06:57:40 +00:00
|
|
|
|
|
|
|
d.addCallback(lambda res:
|
2020-03-03 00:08:30 +00:00
|
|
|
self.GET(self.public_url + "/foo",
|
2007-12-05 06:57:40 +00:00
|
|
|
followRedirect=True))
|
|
|
|
def _check_page(res):
|
2007-12-05 07:15:13 +00:00
|
|
|
# TODO: assert more about the contents
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("SSK", res)
|
2007-12-15 00:52:05 +00:00
|
|
|
return res
|
2007-12-05 06:57:40 +00:00
|
|
|
d.addCallback(_check_page)
|
|
|
|
|
2008-02-14 22:45:56 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"new.txt"))
|
2007-12-15 00:52:05 +00:00
|
|
|
def _got3(newnode):
|
|
|
|
self.failUnless(IMutableFileNode.providedBy(newnode))
|
|
|
|
self.failUnless(newnode.is_mutable())
|
|
|
|
self.failIf(newnode.is_readonly())
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(self._mutable_uri, newnode.get_uri())
|
2007-12-15 00:52:05 +00:00
|
|
|
d.addCallback(_got3)
|
|
|
|
|
2008-05-20 22:14:19 +00:00
|
|
|
# look at the JSON form of the enclosing directory
|
2008-05-20 01:37:28 +00:00
|
|
|
d.addCallback(lambda res:
|
2020-03-03 00:08:30 +00:00
|
|
|
self.GET(self.public_url + "/foo?t=json",
|
2008-05-20 01:37:28 +00:00
|
|
|
followRedirect=True))
|
|
|
|
def _check_page_json(res):
|
2017-01-19 22:39:53 +00:00
|
|
|
parsed = json.loads(res)
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessEqual(parsed[0], "dirnode")
|
2008-09-30 22:21:06 +00:00
|
|
|
children = dict( [(unicode(name),value)
|
|
|
|
for (name,value)
|
|
|
|
in parsed[1]["children"].iteritems()] )
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn(u"new.txt", children)
|
2010-07-18 14:29:15 +00:00
|
|
|
new_json = children[u"new.txt"]
|
|
|
|
self.failUnlessEqual(new_json[0], "filenode")
|
2008-05-20 22:40:49 +00:00
|
|
|
self.failUnless(new_json[1]["mutable"])
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(new_json[1]["rw_uri"]), self._mutable_uri)
|
2010-07-11 20:02:52 +00:00
|
|
|
ro_uri = self._mutable_node.get_readonly().to_string()
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(new_json[1]["ro_uri"]), ro_uri)
|
2008-05-20 01:37:28 +00:00
|
|
|
d.addCallback(_check_page_json)
|
|
|
|
|
2008-05-20 22:14:19 +00:00
|
|
|
# and the JSON form of the file
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.GET(self.public_url + "/foo/new.txt?t=json"))
|
|
|
|
def _check_file_json(res):
|
2017-01-19 22:39:53 +00:00
|
|
|
parsed = json.loads(res)
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessEqual(parsed[0], "filenode")
|
2008-05-20 22:40:49 +00:00
|
|
|
self.failUnless(parsed[1]["mutable"])
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(parsed[1]["rw_uri"]), self._mutable_uri)
|
2010-07-11 20:02:52 +00:00
|
|
|
ro_uri = self._mutable_node.get_readonly().to_string()
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessReallyEqual(to_str(parsed[1]["ro_uri"]), ro_uri)
|
2008-05-20 22:14:19 +00:00
|
|
|
d.addCallback(_check_file_json)
|
|
|
|
|
2008-05-20 18:33:14 +00:00
|
|
|
# and look at t=uri and t=readonly-uri
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.GET(self.public_url + "/foo/new.txt?t=uri"))
|
2010-07-11 20:02:52 +00:00
|
|
|
d.addCallback(lambda res: self.failUnlessReallyEqual(res, self._mutable_uri))
|
2008-05-20 18:33:14 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.GET(self.public_url + "/foo/new.txt?t=readonly-uri"))
|
|
|
|
def _check_ro_uri(res):
|
2010-07-11 20:02:52 +00:00
|
|
|
ro_uri = self._mutable_node.get_readonly().to_string()
|
|
|
|
self.failUnlessReallyEqual(res, ro_uri)
|
2008-05-20 18:33:14 +00:00
|
|
|
d.addCallback(_check_ro_uri)
|
|
|
|
|
2008-05-20 18:35:47 +00:00
|
|
|
# make sure we can get to it from /uri/URI
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.GET("/uri/%s" % urllib.quote(self._mutable_uri)))
|
|
|
|
d.addCallback(lambda res:
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, NEW2_CONTENTS))
|
2008-05-20 18:35:47 +00:00
|
|
|
|
2008-05-20 18:47:43 +00:00
|
|
|
# and that HEAD computes the size correctly
|
|
|
|
d.addCallback(lambda res:
|
2008-10-28 20:41:04 +00:00
|
|
|
self.HEAD(self.public_url + "/foo/new.txt",
|
|
|
|
return_response=True))
|
2019-04-12 14:18:36 +00:00
|
|
|
def _got_headers(res_and_status_and_headers):
|
|
|
|
(res, status, headers) = res_and_status_and_headers
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, "")
|
2017-07-24 20:51:58 +00:00
|
|
|
self.failUnlessReallyEqual(headers.getRawHeaders("content-length")[0],
|
2010-07-11 20:02:52 +00:00
|
|
|
str(len(NEW2_CONTENTS)))
|
2017-07-24 20:51:58 +00:00
|
|
|
self.failUnlessReallyEqual(headers.getRawHeaders("content-type"),
|
|
|
|
["text/plain"])
|
2008-05-20 18:47:43 +00:00
|
|
|
d.addCallback(_got_headers)
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
# make sure that outdated size limits aren't enforced anymore.
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.POST(self.public_url + "/foo", t="upload",
|
|
|
|
mutable="true",
|
|
|
|
file=("new.txt",
|
|
|
|
"b" * (self.s.MUTABLE_SIZELIMIT+1))))
|
2008-05-19 19:33:39 +00:00
|
|
|
d.addErrback(self.dump_error)
|
2007-12-05 06:42:54 +00:00
|
|
|
return d
|
|
|
|
|
2008-06-03 07:03:16 +00:00
|
|
|
def test_POST_upload_mutable_toobig(self):
|
2011-08-07 00:43:48 +00:00
|
|
|
# SDMF had a size limti that was removed a while ago. MDMF has
|
|
|
|
# never had a size limit. Test to make sure that we do not
|
|
|
|
# encounter errors when trying to upload large mutable files,
|
|
|
|
# since there should be no coded prohibitions regarding large
|
|
|
|
# mutable files.
|
|
|
|
d = self.POST(self.public_url + "/foo",
|
|
|
|
t="upload", mutable="true",
|
|
|
|
file=("new.txt", "b" * (self.s.MUTABLE_SIZELIMIT + 1)))
|
2008-06-03 07:03:16 +00:00
|
|
|
return d
|
|
|
|
|
2008-05-19 19:33:39 +00:00
|
|
|
def dump_error(self, f):
|
|
|
|
# if the web server returns an error code (like 400 Bad Request),
|
|
|
|
# web.client.getPage puts the HTTP response body into the .response
|
|
|
|
# attribute of the exception object that it gives back. It does not
|
|
|
|
# appear in the Failure's repr(), so the ERROR that trial displays
|
|
|
|
# will be rather terse and unhelpful. addErrback this method to the
|
|
|
|
# end of your chain to get more information out of these errors.
|
|
|
|
if f.check(error.Error):
|
2019-03-22 16:25:11 +00:00
|
|
|
print("web.error.Error:")
|
|
|
|
print(f)
|
|
|
|
print(f.value.response)
|
2008-05-19 19:33:39 +00:00
|
|
|
return f
|
|
|
|
|
2007-08-15 22:21:38 +00:00
|
|
|
def test_POST_upload_replace(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="upload",
|
2007-08-15 22:21:38 +00:00
|
|
|
file=("bar.txt", self.NEWFILE_CONTENTS))
|
2007-12-05 06:01:37 +00:00
|
|
|
fn = self._foo_node
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, fn, u"bar.txt")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessChildContentsAre(fn, u"bar.txt",
|
2007-12-05 06:01:37 +00:00
|
|
|
self.NEWFILE_CONTENTS))
|
2007-08-15 22:21:38 +00:00
|
|
|
return d
|
|
|
|
|
2007-12-25 10:48:57 +00:00
|
|
|
def test_POST_upload_no_replace_ok(self):
|
|
|
|
d = self.POST(self.public_url + "/foo?replace=false", t="upload",
|
|
|
|
file=("new.txt", self.NEWFILE_CONTENTS))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/new.txt"))
|
2010-07-11 20:02:52 +00:00
|
|
|
d.addCallback(lambda res: self.failUnlessReallyEqual(res,
|
|
|
|
self.NEWFILE_CONTENTS))
|
2007-12-25 10:48:57 +00:00
|
|
|
return d
|
|
|
|
|
2007-08-15 22:21:38 +00:00
|
|
|
def test_POST_upload_no_replace_queryarg(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo?replace=false", t="upload",
|
2007-08-15 22:21:38 +00:00
|
|
|
file=("bar.txt", self.NEWFILE_CONTENTS))
|
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
|
|
|
"POST_upload_no_replace_queryarg",
|
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me "
|
|
|
|
"to not replace it")
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
2007-08-15 22:21:38 +00:00
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_upload_no_replace_field(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="upload", replace="false",
|
2007-08-15 22:21:38 +00:00
|
|
|
file=("bar.txt", self.NEWFILE_CONTENTS))
|
|
|
|
d.addBoth(self.shouldFail, error.Error, "POST_upload_no_replace_field",
|
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me "
|
|
|
|
"to not replace it")
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
2007-08-15 22:21:38 +00:00
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
return d
|
|
|
|
|
2017-07-24 22:16:24 +00:00
|
|
|
@inlineCallbacks
|
2007-08-14 00:45:02 +00:00
|
|
|
def test_POST_upload_whendone(self):
|
2017-07-24 22:16:24 +00:00
|
|
|
body, headers = self.build_form(t="upload", when_done="/THERE",
|
|
|
|
file=("new.txt", self.NEWFILE_CONTENTS))
|
|
|
|
yield self.shouldRedirectTo(self.webish_url + self.public_url + "/foo",
|
2019-10-13 09:47:59 +00:00
|
|
|
"/THERE",
|
2017-07-24 22:16:24 +00:00
|
|
|
method="post", data=body, headers=headers,
|
|
|
|
code=http.FOUND)
|
2007-12-05 06:01:37 +00:00
|
|
|
fn = self._foo_node
|
2017-07-24 22:16:24 +00:00
|
|
|
yield self.failUnlessChildContentsAre(fn, u"new.txt",
|
|
|
|
self.NEWFILE_CONTENTS)
|
2007-08-14 00:45:02 +00:00
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_POST_upload_named(self):
|
2007-12-05 06:01:37 +00:00
|
|
|
fn = self._foo_node
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="upload",
|
2007-07-08 03:06:58 +00:00
|
|
|
name="new.txt", file=self.NEWFILE_CONTENTS)
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, fn, u"new.txt")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessChildContentsAre(fn, u"new.txt",
|
2007-12-05 06:01:37 +00:00
|
|
|
self.NEWFILE_CONTENTS))
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_POST_upload_named_badfilename(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="upload",
|
2007-07-16 18:53:12 +00:00
|
|
|
name="slashes/are/bad.txt", file=self.NEWFILE_CONTENTS)
|
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
|
|
|
"test_POST_upload_named_badfilename",
|
|
|
|
"400 Bad Request",
|
|
|
|
"name= may not contain a slash",
|
|
|
|
)
|
2007-12-05 06:01:37 +00:00
|
|
|
# make sure that nothing was added
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeKeysAre(self._foo_node,
|
2012-10-25 00:01:25 +00:00
|
|
|
[self._htmlname_unicode,
|
|
|
|
u"bar.txt", u"baz.txt", u"blockingfile",
|
2011-08-07 00:43:48 +00:00
|
|
|
u"empty", u"n\u00fc.txt", u"quux.txt",
|
2008-02-14 22:45:56 +00:00
|
|
|
u"sub"]))
|
2007-07-16 18:53:12 +00:00
|
|
|
return d
|
|
|
|
|
2017-07-24 21:40:11 +00:00
|
|
|
@inlineCallbacks
|
2008-05-20 18:33:14 +00:00
|
|
|
def test_POST_FILEURL_check(self):
|
2008-07-16 22:42:56 +00:00
|
|
|
bar_url = self.public_url + "/foo/bar.txt"
|
2017-07-24 21:40:11 +00:00
|
|
|
res = yield self.POST(bar_url, t="check")
|
|
|
|
self.failUnlessIn("Healthy :", res)
|
|
|
|
|
2008-07-16 22:42:56 +00:00
|
|
|
redir_url = "http://allmydata.org/TARGET"
|
2017-07-24 22:02:23 +00:00
|
|
|
body, headers = self.build_form(t="check", when_done=redir_url)
|
|
|
|
yield self.shouldRedirectTo(self.webish_url + bar_url, redir_url,
|
|
|
|
method="post", data=body, headers=headers,
|
|
|
|
code=http.FOUND)
|
|
|
|
|
2017-07-24 21:40:11 +00:00
|
|
|
res = yield self.POST(bar_url, t="check", return_to=redir_url)
|
|
|
|
self.failUnlessIn("Healthy :", res)
|
|
|
|
self.failUnlessIn("Return to file", res)
|
|
|
|
self.failUnlessIn(redir_url, res)
|
|
|
|
|
|
|
|
res = yield self.POST(bar_url, t="check", output="JSON")
|
|
|
|
data = json.loads(res)
|
|
|
|
self.failUnlessIn("storage-index", data)
|
|
|
|
self.failUnless(data["results"]["healthy"])
|
2008-07-16 22:42:56 +00:00
|
|
|
|
2017-07-24 21:40:11 +00:00
|
|
|
@inlineCallbacks
|
2008-09-07 19:44:56 +00:00
|
|
|
def test_POST_FILEURL_check_and_repair(self):
|
|
|
|
bar_url = self.public_url + "/foo/bar.txt"
|
2017-07-24 21:40:11 +00:00
|
|
|
res = yield self.POST(bar_url, t="check", repair="true")
|
|
|
|
self.failUnlessIn("Healthy :", res)
|
|
|
|
|
2008-09-07 19:44:56 +00:00
|
|
|
redir_url = "http://allmydata.org/TARGET"
|
2017-07-24 22:02:23 +00:00
|
|
|
body, headers = self.build_form(t="check", repair="true",
|
|
|
|
when_done=redir_url)
|
|
|
|
yield self.shouldRedirectTo(self.webish_url + bar_url, redir_url,
|
|
|
|
method="post", data=body, headers=headers,
|
|
|
|
code=http.FOUND)
|
|
|
|
|
2017-07-24 21:40:11 +00:00
|
|
|
res = yield self.POST(bar_url, t="check", return_to=redir_url)
|
|
|
|
self.failUnlessIn("Healthy :", res)
|
|
|
|
self.failUnlessIn("Return to file", res)
|
|
|
|
self.failUnlessIn(redir_url, res)
|
2008-09-07 19:44:56 +00:00
|
|
|
|
2017-07-24 21:40:11 +00:00
|
|
|
@inlineCallbacks
|
2008-07-16 22:42:56 +00:00
|
|
|
def test_POST_DIRURL_check(self):
|
2020-03-03 00:08:30 +00:00
|
|
|
foo_url = self.public_url + "/foo"
|
2017-07-24 21:40:11 +00:00
|
|
|
res = yield self.POST(foo_url, t="check")
|
|
|
|
self.failUnlessIn("Healthy :", res)
|
|
|
|
|
2008-07-16 22:42:56 +00:00
|
|
|
redir_url = "http://allmydata.org/TARGET"
|
2017-07-24 22:02:23 +00:00
|
|
|
body, headers = self.build_form(t="check", when_done=redir_url)
|
|
|
|
yield self.shouldRedirectTo(self.webish_url + foo_url, redir_url,
|
|
|
|
method="post", data=body, headers=headers,
|
|
|
|
code=http.FOUND)
|
|
|
|
|
2017-07-24 21:40:11 +00:00
|
|
|
res = yield self.POST(foo_url, t="check", return_to=redir_url)
|
|
|
|
self.failUnlessIn("Healthy :", res)
|
|
|
|
self.failUnlessIn("Return to file/directory", res)
|
|
|
|
self.failUnlessIn(redir_url, res)
|
|
|
|
|
|
|
|
res = yield self.POST(foo_url, t="check", output="JSON")
|
|
|
|
data = json.loads(res)
|
|
|
|
self.failUnlessIn("storage-index", data)
|
|
|
|
self.failUnless(data["results"]["healthy"])
|
2008-05-20 18:33:14 +00:00
|
|
|
|
2017-07-24 21:40:11 +00:00
|
|
|
@inlineCallbacks
|
2008-09-07 19:44:56 +00:00
|
|
|
def test_POST_DIRURL_check_and_repair(self):
|
2020-03-03 00:08:30 +00:00
|
|
|
foo_url = self.public_url + "/foo"
|
2017-07-24 21:40:11 +00:00
|
|
|
res = yield self.POST(foo_url, t="check", repair="true")
|
|
|
|
self.failUnlessIn("Healthy :", res)
|
|
|
|
|
2008-09-07 19:44:56 +00:00
|
|
|
redir_url = "http://allmydata.org/TARGET"
|
2017-07-24 22:02:23 +00:00
|
|
|
body, headers = self.build_form(t="check", repair="true",
|
|
|
|
when_done=redir_url)
|
|
|
|
yield self.shouldRedirectTo(self.webish_url + foo_url, redir_url,
|
|
|
|
method="post", data=body, headers=headers,
|
|
|
|
code=http.FOUND)
|
2017-07-24 21:40:11 +00:00
|
|
|
res = yield self.POST(foo_url, t="check", return_to=redir_url)
|
|
|
|
self.failUnlessIn("Healthy :", res)
|
|
|
|
self.failUnlessIn("Return to file/directory", res)
|
|
|
|
self.failUnlessIn(redir_url, res)
|
2008-09-07 19:44:56 +00:00
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_POST_FILEURL_mdmf_check(self):
|
|
|
|
quux_url = "/uri/%s" % urllib.quote(self._quux_txt_uri)
|
|
|
|
d = self.POST(quux_url, t="check")
|
|
|
|
def _check(res):
|
|
|
|
self.failUnlessIn("Healthy", res)
|
|
|
|
d.addCallback(_check)
|
|
|
|
quux_extension_url = "/uri/%s" % urllib.quote("%s:3:131073" % self._quux_txt_uri)
|
|
|
|
d.addCallback(lambda ignored:
|
2011-12-17 04:27:10 +00:00
|
|
|
self.POST(quux_extension_url, t="check"))
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_FILEURL_mdmf_check_and_repair(self):
|
|
|
|
quux_url = "/uri/%s" % urllib.quote(self._quux_txt_uri)
|
|
|
|
d = self.POST(quux_url, t="check", repair="true")
|
|
|
|
def _check(res):
|
|
|
|
self.failUnlessIn("Healthy", res)
|
|
|
|
d.addCallback(_check)
|
2011-12-17 04:27:10 +00:00
|
|
|
quux_extension_url = "/uri/%s" % urllib.quote("%s:3:131073" % self._quux_txt_uri)
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(lambda ignored:
|
2011-12-17 04:27:10 +00:00
|
|
|
self.POST(quux_extension_url, t="check", repair="true"))
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
2008-10-22 00:03:07 +00:00
|
|
|
def wait_for_operation(self, ignored, ophandle):
|
|
|
|
url = "/operations/" + ophandle
|
|
|
|
url += "?t=status&output=JSON"
|
|
|
|
d = self.GET(url)
|
|
|
|
def _got(res):
|
2017-01-19 22:39:53 +00:00
|
|
|
data = json.loads(res)
|
2008-10-22 00:03:07 +00:00
|
|
|
if not data["finished"]:
|
|
|
|
d = self.stall(delay=1.0)
|
|
|
|
d.addCallback(self.wait_for_operation, ophandle)
|
|
|
|
return d
|
|
|
|
return data
|
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_operation_results(self, ignored, ophandle, output=None):
|
|
|
|
url = "/operations/" + ophandle
|
|
|
|
url += "?t=status"
|
|
|
|
if output:
|
|
|
|
url += "&output=" + output
|
|
|
|
d = self.GET(url)
|
|
|
|
def _got(res):
|
|
|
|
if output and output.lower() == "json":
|
2017-01-19 22:39:53 +00:00
|
|
|
return json.loads(res)
|
2008-10-22 00:03:07 +00:00
|
|
|
return res
|
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_DIRURL_deepcheck_no_ophandle(self):
|
|
|
|
d = self.shouldFail2(error.Error,
|
|
|
|
"test_POST_DIRURL_deepcheck_no_ophandle",
|
|
|
|
"400 Bad Request",
|
|
|
|
"slow operation requires ophandle=",
|
|
|
|
self.POST, self.public_url, t="start-deep-check")
|
|
|
|
return d
|
|
|
|
|
2017-07-24 21:40:11 +00:00
|
|
|
@inlineCallbacks
|
2008-07-17 23:47:09 +00:00
|
|
|
def test_POST_DIRURL_deepcheck(self):
|
2017-07-24 22:02:23 +00:00
|
|
|
body, headers = self.build_form(t="start-deep-check", ophandle="123")
|
|
|
|
yield self.shouldRedirectTo(self.webish_url + self.public_url,
|
|
|
|
self.webish_url + "/operations/123",
|
|
|
|
method="post", data=body, headers=headers,
|
|
|
|
code=http.FOUND)
|
|
|
|
|
2017-07-24 21:40:11 +00:00
|
|
|
data = yield self.wait_for_operation(None, "123")
|
|
|
|
self.failUnlessReallyEqual(data["finished"], True)
|
|
|
|
self.failUnlessReallyEqual(data["count-objects-checked"], 11)
|
|
|
|
self.failUnlessReallyEqual(data["count-objects-healthy"], 11)
|
|
|
|
|
|
|
|
res = yield self.get_operation_results(None, "123", "html")
|
|
|
|
self.failUnlessIn("Objects Checked: <span>11</span>", res)
|
|
|
|
self.failUnlessIn("Objects Healthy: <span>11</span>", res)
|
|
|
|
self.failUnlessIn(FAVICON_MARKUP, res)
|
|
|
|
|
|
|
|
res = yield self.GET("/operations/123/")
|
|
|
|
# should be the same as without the slash
|
|
|
|
self.failUnlessIn("Objects Checked: <span>11</span>", res)
|
|
|
|
self.failUnlessIn("Objects Healthy: <span>11</span>", res)
|
|
|
|
self.failUnlessIn(FAVICON_MARKUP, res)
|
|
|
|
|
|
|
|
yield self.shouldFail2(error.Error, "one", "404 Not Found",
|
|
|
|
"No detailed results for SI bogus",
|
|
|
|
self.GET, "/operations/123/bogus")
|
2008-10-24 02:17:59 +00:00
|
|
|
|
2008-10-24 00:11:18 +00:00
|
|
|
foo_si = self._foo_node.get_storage_index()
|
|
|
|
foo_si_s = base32.b2a(foo_si)
|
2017-07-24 21:40:11 +00:00
|
|
|
res = yield self.GET("/operations/123/%s?output=JSON" % foo_si_s)
|
|
|
|
data = json.loads(res)
|
|
|
|
self.failUnlessEqual(data["storage-index"], foo_si_s)
|
|
|
|
self.failUnless(data["results"]["healthy"])
|
2008-09-07 19:44:56 +00:00
|
|
|
|
|
|
|
def test_POST_DIRURL_deepcheck_and_repair(self):
|
2017-07-24 23:12:35 +00:00
|
|
|
url = self.webish_url + self.public_url
|
|
|
|
body, headers = self.build_form(t="start-deep-check", repair="true",
|
|
|
|
ophandle="124", output="json")
|
|
|
|
d = do_http("post", url, data=body, headers=headers,
|
|
|
|
allow_redirects=True,
|
|
|
|
browser_like_redirects=True)
|
2008-10-22 00:03:07 +00:00
|
|
|
d.addCallback(self.wait_for_operation, "124")
|
|
|
|
def _check_json(data):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(data["finished"], True)
|
2012-10-25 00:01:25 +00:00
|
|
|
self.failUnlessReallyEqual(data["count-objects-checked"], 11)
|
|
|
|
self.failUnlessReallyEqual(data["count-objects-healthy-pre-repair"], 11)
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(data["count-objects-unhealthy-pre-repair"], 0)
|
|
|
|
self.failUnlessReallyEqual(data["count-corrupt-shares-pre-repair"], 0)
|
|
|
|
self.failUnlessReallyEqual(data["count-repairs-attempted"], 0)
|
|
|
|
self.failUnlessReallyEqual(data["count-repairs-successful"], 0)
|
|
|
|
self.failUnlessReallyEqual(data["count-repairs-unsuccessful"], 0)
|
2012-10-25 00:01:25 +00:00
|
|
|
self.failUnlessReallyEqual(data["count-objects-healthy-post-repair"], 11)
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(data["count-objects-unhealthy-post-repair"], 0)
|
|
|
|
self.failUnlessReallyEqual(data["count-corrupt-shares-post-repair"], 0)
|
2008-10-22 00:03:07 +00:00
|
|
|
d.addCallback(_check_json)
|
|
|
|
d.addCallback(self.get_operation_results, "124", "html")
|
|
|
|
def _check_html(res):
|
2012-10-25 00:01:25 +00:00
|
|
|
self.failUnlessIn("Objects Checked: <span>11</span>", res)
|
2008-09-07 19:44:56 +00:00
|
|
|
|
2012-10-25 00:01:25 +00:00
|
|
|
self.failUnlessIn("Objects Healthy (before repair): <span>11</span>", res)
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("Objects Unhealthy (before repair): <span>0</span>", res)
|
|
|
|
self.failUnlessIn("Corrupt Shares (before repair): <span>0</span>", res)
|
2008-09-07 19:44:56 +00:00
|
|
|
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("Repairs Attempted: <span>0</span>", res)
|
|
|
|
self.failUnlessIn("Repairs Successful: <span>0</span>", res)
|
|
|
|
self.failUnlessIn("Repairs Unsuccessful: <span>0</span>", res)
|
2008-09-07 19:44:56 +00:00
|
|
|
|
2012-10-25 00:01:25 +00:00
|
|
|
self.failUnlessIn("Objects Healthy (after repair): <span>11</span>", res)
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn("Objects Unhealthy (after repair): <span>0</span>", res)
|
|
|
|
self.failUnlessIn("Corrupt Shares (after repair): <span>0</span>", res)
|
2011-12-17 04:31:30 +00:00
|
|
|
|
|
|
|
self.failUnlessIn(FAVICON_MARKUP, res)
|
2008-10-22 00:03:07 +00:00
|
|
|
d.addCallback(_check_html)
|
2008-07-17 23:47:09 +00:00
|
|
|
return d
|
|
|
|
|
2008-05-20 18:33:14 +00:00
|
|
|
def test_POST_FILEURL_bad_t(self):
|
|
|
|
d = self.shouldFail2(error.Error, "POST_bad_t", "400 Bad Request",
|
|
|
|
"POST to file: bad t=bogus",
|
|
|
|
self.POST, self.public_url + "/foo/bar.txt",
|
|
|
|
t="bogus")
|
|
|
|
return d
|
|
|
|
|
2007-07-25 10:21:30 +00:00
|
|
|
def test_POST_mkdir(self): # return value?
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="mkdir", name="newdir")
|
2008-02-14 22:45:56 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, [])
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_POST_mkdir_mdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.POST(self.public_url + "/foo?t=mkdir&name=newdir&format=mdmf")
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(lambda node:
|
|
|
|
self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_mkdir_sdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.POST(self.public_url + "/foo?t=mkdir&name=newdir&format=sdmf")
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(lambda node:
|
|
|
|
self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
|
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_POST_mkdir_bad_format(self):
|
2017-07-25 00:53:48 +00:00
|
|
|
url = (self.webish_url + self.public_url +
|
|
|
|
"/foo?t=mkdir&name=newdir&format=foo")
|
|
|
|
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
|
|
|
method="post")
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2009-10-13 02:34:44 +00:00
|
|
|
def test_POST_mkdir_initial_children(self):
|
2010-01-27 06:44:30 +00:00
|
|
|
(newkids, caps) = self._create_initial_children()
|
2009-11-18 07:09:00 +00:00
|
|
|
d = self.POST2(self.public_url +
|
|
|
|
"/foo?t=mkdir-with-children&name=newdir",
|
2017-01-19 22:39:53 +00:00
|
|
|
json.dumps(newkids))
|
2009-11-18 07:09:00 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
|
2009-11-18 07:09:00 +00:00
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_POST_mkdir_initial_children_mdmf(self):
|
|
|
|
(newkids, caps) = self._create_initial_children()
|
|
|
|
d = self.POST2(self.public_url +
|
2011-10-02 04:00:36 +00:00
|
|
|
"/foo?t=mkdir-with-children&name=newdir&format=mdmf",
|
2017-01-19 22:39:53 +00:00
|
|
|
json.dumps(newkids))
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(lambda node:
|
|
|
|
self.failUnlessEqual(node._node.get_version(), MDMF_VERSION))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"child-imm",
|
|
|
|
caps['filecap1'])
|
|
|
|
return d
|
|
|
|
|
|
|
|
# XXX: Duplication.
|
|
|
|
def test_POST_mkdir_initial_children_sdmf(self):
|
|
|
|
(newkids, caps) = self._create_initial_children()
|
|
|
|
d = self.POST2(self.public_url +
|
2011-10-02 04:00:36 +00:00
|
|
|
"/foo?t=mkdir-with-children&name=newdir&format=sdmf",
|
2017-01-19 22:39:53 +00:00
|
|
|
json.dumps(newkids))
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(lambda node:
|
|
|
|
self.failUnlessEqual(node._node.get_version(), SDMF_VERSION))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"child-imm",
|
|
|
|
caps['filecap1'])
|
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_POST_mkdir_initial_children_bad_format(self):
|
2011-08-07 00:43:48 +00:00
|
|
|
(newkids, caps) = self._create_initial_children()
|
2017-07-25 00:53:48 +00:00
|
|
|
url = (self.webish_url + self.public_url +
|
|
|
|
"/foo?t=mkdir-with-children&name=newdir&format=foo")
|
|
|
|
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
|
|
|
method="post", data=json.dumps(newkids))
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2009-11-18 07:09:00 +00:00
|
|
|
def test_POST_mkdir_immutable(self):
|
2010-01-27 06:44:30 +00:00
|
|
|
(newkids, caps) = self._create_immutable_children()
|
2009-11-18 07:09:00 +00:00
|
|
|
d = self.POST2(self.public_url +
|
|
|
|
"/foo?t=mkdir-immutable&name=newdir",
|
2017-01-19 22:39:53 +00:00
|
|
|
json.dumps(newkids))
|
2009-10-13 02:34:44 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, newkids.keys())
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"child-imm", caps['filecap1'])
|
2009-11-18 07:09:00 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"unknownchild-imm", caps['unknown_immcap'])
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"dirchild-imm", caps['immdircap'])
|
2010-02-22 02:53:52 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"dirchild-lit", caps['litdircap'])
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessROChildURIIs, u"dirchild-empty", caps['emptydircap'])
|
2009-11-18 07:09:00 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_mkdir_immutable_bad(self):
|
2010-01-27 06:44:30 +00:00
|
|
|
(newkids, caps) = self._create_initial_children()
|
2011-10-02 04:03:32 +00:00
|
|
|
d = self.shouldFail2(error.Error, "POST_mkdir_immutable_bad",
|
2009-11-18 07:09:00 +00:00
|
|
|
"400 Bad Request",
|
2010-01-27 06:44:30 +00:00
|
|
|
"needed to be immutable but was not",
|
2009-11-18 07:09:00 +00:00
|
|
|
self.POST2,
|
|
|
|
self.public_url +
|
|
|
|
"/foo?t=mkdir-immutable&name=newdir",
|
2017-01-19 22:39:53 +00:00
|
|
|
json.dumps(newkids))
|
2009-10-13 02:34:44 +00:00
|
|
|
return d
|
|
|
|
|
2008-05-19 19:56:02 +00:00
|
|
|
def test_POST_mkdir_2(self):
|
2017-07-24 23:22:36 +00:00
|
|
|
d = self.POST2(self.public_url + "/foo/newdir?t=mkdir", "")
|
2008-05-19 19:56:02 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"newdir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, [])
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_mkdirs_2(self):
|
2017-07-24 23:22:36 +00:00
|
|
|
d = self.POST2(self.public_url + "/foo/bardir/newdir?t=mkdir", "")
|
2008-05-19 19:56:02 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"bardir"))
|
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"bardir"))
|
|
|
|
d.addCallback(lambda bardirnode: bardirnode.get(u"newdir"))
|
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, [])
|
|
|
|
return d
|
|
|
|
|
2007-12-19 00:47:49 +00:00
|
|
|
def test_POST_mkdir_no_parentdir_noredirect(self):
|
2007-12-25 00:46:52 +00:00
|
|
|
d = self.POST("/uri?t=mkdir")
|
2007-12-18 20:15:08 +00:00
|
|
|
def _after_mkdir(res):
|
2009-07-17 01:01:03 +00:00
|
|
|
uri.DirectoryURI.init_from_string(res)
|
2007-12-18 20:15:08 +00:00
|
|
|
d.addCallback(_after_mkdir)
|
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_POST_mkdir_no_parentdir_noredirect_mdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.POST("/uri?t=mkdir&format=mdmf")
|
2011-08-07 00:43:48 +00:00
|
|
|
def _after_mkdir(res):
|
|
|
|
u = uri.from_string(res)
|
|
|
|
# Check that this is an MDMF writecap
|
|
|
|
self.failUnlessIsInstance(u, uri.MDMFDirectoryURI)
|
|
|
|
d.addCallback(_after_mkdir)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_mkdir_no_parentdir_noredirect_sdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.POST("/uri?t=mkdir&format=sdmf")
|
2011-08-07 00:43:48 +00:00
|
|
|
def _after_mkdir(res):
|
|
|
|
u = uri.from_string(res)
|
|
|
|
self.failUnlessIsInstance(u, uri.DirectoryURI)
|
|
|
|
d.addCallback(_after_mkdir)
|
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_POST_mkdir_no_parentdir_noredirect_bad_format(self):
|
2017-07-25 00:53:48 +00:00
|
|
|
url = self.webish_url + self.public_url + "/uri?t=mkdir&format=foo"
|
|
|
|
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
|
|
|
method="post")
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2010-01-21 06:50:52 +00:00
|
|
|
def test_POST_mkdir_no_parentdir_noredirect2(self):
|
|
|
|
# make sure form-based arguments (as on the welcome page) still work
|
|
|
|
d = self.POST("/uri", t="mkdir")
|
|
|
|
def _after_mkdir(res):
|
|
|
|
uri.DirectoryURI.init_from_string(res)
|
|
|
|
d.addCallback(_after_mkdir)
|
|
|
|
d.addErrback(self.explain_web_error)
|
|
|
|
return d
|
|
|
|
|
2017-07-24 22:16:24 +00:00
|
|
|
@inlineCallbacks
|
2007-12-20 19:58:17 +00:00
|
|
|
def test_POST_mkdir_no_parentdir_redirect(self):
|
2017-07-24 22:16:24 +00:00
|
|
|
url = self.webish_url + "/uri?t=mkdir&redirect_to_result=true"
|
|
|
|
target = yield self.shouldRedirectTo(url, None, method="post",
|
|
|
|
code=http.SEE_OTHER)
|
|
|
|
target = urllib.unquote(target)
|
|
|
|
self.failUnless(target.startswith("uri/URI:DIR2:"), target)
|
2007-12-20 19:58:17 +00:00
|
|
|
|
2017-07-24 22:16:24 +00:00
|
|
|
@inlineCallbacks
|
2010-01-21 06:50:52 +00:00
|
|
|
def test_POST_mkdir_no_parentdir_redirect2(self):
|
2017-07-24 22:16:24 +00:00
|
|
|
body, headers = self.build_form(t="mkdir", redirect_to_result="true")
|
|
|
|
target = yield self.shouldRedirectTo(self.webish_url + "/uri", None,
|
|
|
|
method="post",
|
|
|
|
data=body, headers=headers,
|
|
|
|
code=http.SEE_OTHER)
|
|
|
|
target = urllib.unquote(target)
|
|
|
|
self.failUnless(target.startswith("uri/URI:DIR2:"), target)
|
2010-01-21 06:50:52 +00:00
|
|
|
|
2010-01-27 06:44:30 +00:00
|
|
|
def _make_readonly(self, u):
|
|
|
|
ro_uri = uri.from_string(u).get_readonly()
|
|
|
|
if ro_uri is None:
|
|
|
|
return None
|
|
|
|
return ro_uri.to_string()
|
|
|
|
|
2009-10-13 02:34:44 +00:00
|
|
|
def _create_initial_children(self):
|
|
|
|
contents, n, filecap1 = self.makefile(12)
|
|
|
|
md1 = {"metakey1": "metavalue1"}
|
|
|
|
filecap2 = make_mutable_file_uri()
|
|
|
|
node3 = self.s.create_node_from_uri(make_mutable_file_uri())
|
|
|
|
filecap3 = node3.get_readonly_uri()
|
|
|
|
node4 = self.s.create_node_from_uri(make_mutable_file_uri())
|
|
|
|
dircap = DirectoryNode(node4, None, None).get_uri()
|
2011-08-07 00:43:48 +00:00
|
|
|
mdmfcap = make_mutable_file_uri(mdmf=True)
|
2010-02-22 02:53:52 +00:00
|
|
|
litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
|
|
|
|
emptydircap = "URI:DIR2-LIT:"
|
2010-01-27 06:44:30 +00:00
|
|
|
newkids = {u"child-imm": ["filenode", {"rw_uri": filecap1,
|
|
|
|
"ro_uri": self._make_readonly(filecap1),
|
|
|
|
"metadata": md1, }],
|
|
|
|
u"child-mutable": ["filenode", {"rw_uri": filecap2,
|
|
|
|
"ro_uri": self._make_readonly(filecap2)}],
|
2009-10-13 02:34:44 +00:00
|
|
|
u"child-mutable-ro": ["filenode", {"ro_uri": filecap3}],
|
2010-01-27 06:44:30 +00:00
|
|
|
u"unknownchild-rw": ["unknown", {"rw_uri": unknown_rwcap,
|
|
|
|
"ro_uri": unknown_rocap}],
|
|
|
|
u"unknownchild-ro": ["unknown", {"ro_uri": unknown_rocap}],
|
|
|
|
u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
|
|
|
|
u"dirchild": ["dirnode", {"rw_uri": dircap,
|
|
|
|
"ro_uri": self._make_readonly(dircap)}],
|
2010-02-22 02:53:52 +00:00
|
|
|
u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
|
|
|
|
u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
|
2011-08-07 00:43:48 +00:00
|
|
|
u"child-mutable-mdmf": ["filenode", {"rw_uri": mdmfcap,
|
|
|
|
"ro_uri": self._make_readonly(mdmfcap)}],
|
2009-10-13 02:34:44 +00:00
|
|
|
}
|
2010-01-27 06:44:30 +00:00
|
|
|
return newkids, {'filecap1': filecap1,
|
|
|
|
'filecap2': filecap2,
|
|
|
|
'filecap3': filecap3,
|
|
|
|
'unknown_rwcap': unknown_rwcap,
|
|
|
|
'unknown_rocap': unknown_rocap,
|
|
|
|
'unknown_immcap': unknown_immcap,
|
2010-02-22 02:53:52 +00:00
|
|
|
'dircap': dircap,
|
|
|
|
'litdircap': litdircap,
|
2011-08-07 00:43:48 +00:00
|
|
|
'emptydircap': emptydircap,
|
|
|
|
'mdmfcap': mdmfcap}
|
2009-10-13 02:34:44 +00:00
|
|
|
|
2009-11-18 07:09:00 +00:00
|
|
|
def _create_immutable_children(self):
|
|
|
|
contents, n, filecap1 = self.makefile(12)
|
|
|
|
md1 = {"metakey1": "metavalue1"}
|
2012-05-22 22:18:26 +00:00
|
|
|
tnode = create_chk_filenode("immutable directory contents\n"*10,
|
|
|
|
self.get_all_contents())
|
2009-11-18 07:09:00 +00:00
|
|
|
dnode = DirectoryNode(tnode, None, None)
|
|
|
|
assert not dnode.is_mutable()
|
|
|
|
immdircap = dnode.get_uri()
|
2010-02-22 02:53:52 +00:00
|
|
|
litdircap = "URI:DIR2-LIT:ge3dumj2mewdcotyfqydulbshj5x2lbm"
|
|
|
|
emptydircap = "URI:DIR2-LIT:"
|
2010-01-27 06:44:30 +00:00
|
|
|
newkids = {u"child-imm": ["filenode", {"ro_uri": filecap1,
|
|
|
|
"metadata": md1, }],
|
|
|
|
u"unknownchild-imm": ["unknown", {"ro_uri": unknown_immcap}],
|
|
|
|
u"dirchild-imm": ["dirnode", {"ro_uri": immdircap}],
|
2010-02-22 02:53:52 +00:00
|
|
|
u"dirchild-lit": ["dirnode", {"ro_uri": litdircap}],
|
|
|
|
u"dirchild-empty": ["dirnode", {"ro_uri": emptydircap}],
|
2009-11-18 07:09:00 +00:00
|
|
|
}
|
2010-01-27 06:44:30 +00:00
|
|
|
return newkids, {'filecap1': filecap1,
|
|
|
|
'unknown_immcap': unknown_immcap,
|
2010-02-22 02:53:52 +00:00
|
|
|
'immdircap': immdircap,
|
|
|
|
'litdircap': litdircap,
|
|
|
|
'emptydircap': emptydircap}
|
2009-11-18 07:09:00 +00:00
|
|
|
|
2009-10-13 02:34:44 +00:00
|
|
|
def test_POST_mkdir_no_parentdir_initial_children(self):
|
2010-01-27 06:44:30 +00:00
|
|
|
(newkids, caps) = self._create_initial_children()
|
2017-01-19 22:39:53 +00:00
|
|
|
d = self.POST2("/uri?t=mkdir-with-children", json.dumps(newkids))
|
2009-10-13 02:34:44 +00:00
|
|
|
def _after_mkdir(res):
|
|
|
|
self.failUnless(res.startswith("URI:DIR"), res)
|
|
|
|
n = self.s.create_node_from_uri(res)
|
|
|
|
d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
|
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessROChildURIIs(n, u"child-imm",
|
|
|
|
caps['filecap1']))
|
2009-10-13 02:34:44 +00:00
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessRWChildURIIs(n, u"child-mutable",
|
|
|
|
caps['filecap2']))
|
2009-10-13 02:34:44 +00:00
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessROChildURIIs(n, u"child-mutable-ro",
|
|
|
|
caps['filecap3']))
|
2009-10-13 02:34:44 +00:00
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessRWChildURIIs(n, u"unknownchild-rw",
|
|
|
|
caps['unknown_rwcap']))
|
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessROChildURIIs(n, u"unknownchild-ro",
|
|
|
|
caps['unknown_rocap']))
|
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessROChildURIIs(n, u"unknownchild-imm",
|
|
|
|
caps['unknown_immcap']))
|
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessRWChildURIIs(n, u"dirchild",
|
|
|
|
caps['dircap']))
|
2009-10-13 02:34:44 +00:00
|
|
|
return d2
|
|
|
|
d.addCallback(_after_mkdir)
|
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2009-10-26 01:13:21 +00:00
|
|
|
def test_POST_mkdir_no_parentdir_unexpected_children(self):
|
|
|
|
# the regular /uri?t=mkdir operation is specified to ignore its body.
|
|
|
|
# Only t=mkdir-with-children pays attention to it.
|
2010-01-27 06:44:30 +00:00
|
|
|
(newkids, caps) = self._create_initial_children()
|
2017-07-25 00:53:48 +00:00
|
|
|
url = self.webish_url + "/uri?t=mkdir" # without children
|
|
|
|
yield self.assertHTTPError(url, 400,
|
|
|
|
"t=mkdir does not accept children=, "
|
|
|
|
"try t=mkdir-with-children instead",
|
|
|
|
method="post", data=json.dumps(newkids))
|
2009-10-26 01:13:21 +00:00
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2007-12-25 05:49:35 +00:00
|
|
|
def test_POST_noparent_bad(self):
|
2017-07-25 00:53:48 +00:00
|
|
|
url = self.webish_url + "/uri?t=bogus"
|
|
|
|
yield self.assertHTTPError(url, 400,
|
|
|
|
"/uri accepts only PUT, PUT?t=mkdir, "
|
|
|
|
"POST?t=upload, and POST?t=mkdir",
|
|
|
|
method="post")
|
2007-12-25 05:49:35 +00:00
|
|
|
|
2009-11-18 07:09:00 +00:00
|
|
|
def test_POST_mkdir_no_parentdir_immutable(self):
|
2010-01-27 06:44:30 +00:00
|
|
|
(newkids, caps) = self._create_immutable_children()
|
2017-01-19 22:39:53 +00:00
|
|
|
d = self.POST2("/uri?t=mkdir-immutable", json.dumps(newkids))
|
2009-11-18 07:09:00 +00:00
|
|
|
def _after_mkdir(res):
|
|
|
|
self.failUnless(res.startswith("URI:DIR"), res)
|
|
|
|
n = self.s.create_node_from_uri(res)
|
|
|
|
d2 = self.failUnlessNodeKeysAre(n, newkids.keys())
|
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessROChildURIIs(n, u"child-imm",
|
|
|
|
caps['filecap1']))
|
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessROChildURIIs(n, u"unknownchild-imm",
|
|
|
|
caps['unknown_immcap']))
|
2009-11-18 07:09:00 +00:00
|
|
|
d2.addCallback(lambda ign:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessROChildURIIs(n, u"dirchild-imm",
|
|
|
|
caps['immdircap']))
|
2010-02-22 02:53:52 +00:00
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessROChildURIIs(n, u"dirchild-lit",
|
|
|
|
caps['litdircap']))
|
|
|
|
d2.addCallback(lambda ign:
|
|
|
|
self.failUnlessROChildURIIs(n, u"dirchild-empty",
|
|
|
|
caps['emptydircap']))
|
2009-11-18 07:09:00 +00:00
|
|
|
return d2
|
|
|
|
d.addCallback(_after_mkdir)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_mkdir_no_parentdir_immutable_bad(self):
|
2010-01-27 06:44:30 +00:00
|
|
|
(newkids, caps) = self._create_initial_children()
|
2009-11-18 07:09:00 +00:00
|
|
|
d = self.shouldFail2(error.Error,
|
|
|
|
"test_POST_mkdir_no_parentdir_immutable_bad",
|
|
|
|
"400 Bad Request",
|
2010-01-27 06:44:30 +00:00
|
|
|
"needed to be immutable but was not",
|
2009-11-18 07:09:00 +00:00
|
|
|
self.POST2,
|
|
|
|
"/uri?t=mkdir-immutable",
|
2017-01-19 22:39:53 +00:00
|
|
|
json.dumps(newkids))
|
2009-11-18 07:09:00 +00:00
|
|
|
return d
|
|
|
|
|
2017-07-24 22:26:07 +00:00
|
|
|
@inlineCallbacks
|
2007-12-20 22:31:12 +00:00
|
|
|
def test_welcome_page_mkdir_button(self):
|
|
|
|
# Fetch the welcome page.
|
2017-07-24 22:26:07 +00:00
|
|
|
res = yield self.GET("/")
|
|
|
|
MKDIR_BUTTON_RE = re.compile(
|
|
|
|
'<form(?: action="([^"]*)"| method="post"| enctype="multipart/form-data"){3}>.*'
|
|
|
|
'<input (?:type="hidden" |name="t" |value="([^"]*?)" ){3}/>[ ]*'
|
|
|
|
'<input (?:type="hidden" |name="([^"]*)" |value="([^"]*)" ){3}/>[ ]*'
|
|
|
|
'<input (type="submit" |class="btn" |value="Create a directory[^"]*" ){3}/>')
|
|
|
|
html = res.replace('\n', ' ')
|
|
|
|
mo = MKDIR_BUTTON_RE.search(html)
|
|
|
|
self.failUnless(mo, html)
|
|
|
|
formaction = mo.group(1)
|
|
|
|
formt = mo.group(2)
|
|
|
|
formaname = mo.group(3)
|
|
|
|
formavalue = mo.group(4)
|
|
|
|
|
|
|
|
url = self.webish_url + "/%s?t=%s&%s=%s" % (formaction, formt,
|
|
|
|
formaname, formavalue)
|
|
|
|
target = yield self.shouldRedirectTo(url, None,
|
|
|
|
method="post",
|
|
|
|
code=http.SEE_OTHER)
|
|
|
|
target = urllib.unquote(target)
|
|
|
|
self.failUnless(target.startswith("uri/URI:DIR2:"), target)
|
2007-12-20 22:31:12 +00:00
|
|
|
|
2007-08-15 22:21:38 +00:00
|
|
|
def test_POST_mkdir_replace(self): # return value?
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="mkdir", name="sub")
|
2008-02-14 22:45:56 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"sub"))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, [])
|
2007-08-15 22:21:38 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_mkdir_no_replace_queryarg(self): # return value?
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo?replace=false", t="mkdir", name="sub")
|
2007-08-15 22:21:38 +00:00
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
|
|
|
"POST_mkdir_no_replace_queryarg",
|
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me "
|
|
|
|
"to not replace it")
|
2008-02-14 22:45:56 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"sub"))
|
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
|
2007-08-15 22:21:38 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_mkdir_no_replace_field(self): # return value?
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="mkdir", name="sub",
|
2007-08-15 22:21:38 +00:00
|
|
|
replace="false")
|
|
|
|
d.addBoth(self.shouldFail, error.Error, "POST_mkdir_no_replace_field",
|
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me "
|
|
|
|
"to not replace it")
|
2008-02-14 22:45:56 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.get(u"sub"))
|
|
|
|
d.addCallback(self.failUnlessNodeKeysAre, [u"baz.txt"])
|
2007-08-15 22:21:38 +00:00
|
|
|
return d
|
|
|
|
|
2017-07-24 22:26:07 +00:00
|
|
|
@inlineCallbacks
|
2007-08-14 00:45:02 +00:00
|
|
|
def test_POST_mkdir_whendone_field(self):
|
2017-07-24 22:26:07 +00:00
|
|
|
body, headers = self.build_form(t="mkdir", name="newdir",
|
|
|
|
when_done="/THERE")
|
|
|
|
yield self.shouldRedirectTo(self.webish_url + self.public_url + "/foo",
|
2019-10-13 09:47:59 +00:00
|
|
|
"/THERE",
|
2017-07-24 22:26:07 +00:00
|
|
|
method="post", data=body, headers=headers,
|
|
|
|
code=http.FOUND)
|
|
|
|
res = yield self._foo_node.get(u"newdir")
|
|
|
|
self.failUnlessNodeKeysAre(res, [])
|
2007-08-14 00:45:02 +00:00
|
|
|
|
2017-07-24 22:26:07 +00:00
|
|
|
@inlineCallbacks
|
2007-08-14 00:45:02 +00:00
|
|
|
def test_POST_mkdir_whendone_queryarg(self):
|
2017-07-24 22:26:07 +00:00
|
|
|
body, headers = self.build_form(t="mkdir", name="newdir")
|
|
|
|
url = self.webish_url + self.public_url + "/foo?when_done=/THERE"
|
2019-10-13 09:47:59 +00:00
|
|
|
yield self.shouldRedirectTo(url, "/THERE",
|
2017-07-24 22:26:07 +00:00
|
|
|
method="post", data=body, headers=headers,
|
|
|
|
code=http.FOUND)
|
|
|
|
res = yield self._foo_node.get(u"newdir")
|
|
|
|
self.failUnlessNodeKeysAre(res, [])
|
2007-07-08 05:47:18 +00:00
|
|
|
|
2008-04-15 18:11:29 +00:00
|
|
|
def test_POST_bad_t(self):
|
2011-10-02 04:03:32 +00:00
|
|
|
d = self.shouldFail2(error.Error, "POST_bad_t",
|
|
|
|
"400 Bad Request",
|
2008-05-19 19:56:02 +00:00
|
|
|
"POST to a directory with bad t=BOGUS",
|
2008-04-15 18:11:29 +00:00
|
|
|
self.POST, self.public_url + "/foo", t="BOGUS")
|
|
|
|
return d
|
|
|
|
|
2010-01-24 03:00:20 +00:00
|
|
|
def test_POST_set_children(self, command_name="set_children"):
|
2008-03-01 01:40:27 +00:00
|
|
|
contents9, n9, newuri9 = self.makefile(9)
|
|
|
|
contents10, n10, newuri10 = self.makefile(10)
|
|
|
|
contents11, n11, newuri11 = self.makefile(11)
|
|
|
|
|
|
|
|
reqbody = """{
|
|
|
|
"atomic_added_1": [ "filenode", { "rw_uri": "%s",
|
|
|
|
"size": 0,
|
|
|
|
"metadata": {
|
|
|
|
"ctime": 1002777696.7564139,
|
|
|
|
"mtime": 1002777696.7564139
|
|
|
|
}
|
2008-03-03 21:48:52 +00:00
|
|
|
} ],
|
2008-03-01 01:40:27 +00:00
|
|
|
"atomic_added_2": [ "filenode", { "rw_uri": "%s",
|
|
|
|
"size": 1,
|
|
|
|
"metadata": {
|
|
|
|
"ctime": 1002777696.7564139,
|
|
|
|
"mtime": 1002777696.7564139
|
|
|
|
}
|
|
|
|
} ],
|
|
|
|
"atomic_added_3": [ "filenode", { "rw_uri": "%s",
|
|
|
|
"size": 2,
|
|
|
|
"metadata": {
|
|
|
|
"ctime": 1002777696.7564139,
|
|
|
|
"mtime": 1002777696.7564139
|
|
|
|
}
|
|
|
|
} ]
|
|
|
|
}""" % (newuri9, newuri10, newuri11)
|
|
|
|
|
2010-01-24 03:00:20 +00:00
|
|
|
url = self.webish_url + self.public_url + "/foo" + "?t=" + command_name
|
2008-03-01 01:40:27 +00:00
|
|
|
|
2017-07-24 23:31:44 +00:00
|
|
|
d = do_http("post", url, data=reqbody)
|
2008-03-01 01:40:27 +00:00
|
|
|
def _then(res):
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessURIMatchesROChild(newuri9, self._foo_node, u"atomic_added_1")
|
|
|
|
self.failUnlessURIMatchesROChild(newuri10, self._foo_node, u"atomic_added_2")
|
|
|
|
self.failUnlessURIMatchesROChild(newuri11, self._foo_node, u"atomic_added_3")
|
2008-03-01 01:40:27 +00:00
|
|
|
|
|
|
|
d.addCallback(_then)
|
2008-05-19 19:33:39 +00:00
|
|
|
d.addErrback(self.dump_error)
|
2008-03-01 01:40:27 +00:00
|
|
|
return d
|
|
|
|
|
2010-01-24 03:00:20 +00:00
|
|
|
def test_POST_set_children_with_hyphen(self):
|
|
|
|
return self.test_POST_set_children(command_name="set-children")
|
|
|
|
|
2010-01-27 23:06:42 +00:00
|
|
|
def test_POST_link_uri(self):
|
2007-12-05 06:01:37 +00:00
|
|
|
contents, n, newuri = self.makefile(8)
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="uri", name="new.txt", uri=newuri)
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"new.txt")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
|
2007-12-05 06:01:37 +00:00
|
|
|
contents))
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2010-01-27 23:06:42 +00:00
|
|
|
def test_POST_link_uri_replace(self):
|
2007-12-05 06:01:37 +00:00
|
|
|
contents, n, newuri = self.makefile(8)
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="uri", name="bar.txt", uri=newuri)
|
2010-01-27 06:44:30 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"bar.txt")
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
|
2007-12-05 06:01:37 +00:00
|
|
|
contents))
|
2007-08-15 22:21:38 +00:00
|
|
|
return d
|
|
|
|
|
2010-01-27 23:06:42 +00:00
|
|
|
def test_POST_link_uri_unknown_bad(self):
|
2010-05-19 05:51:46 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="uri", name="future.txt", uri=unknown_rwcap)
|
2010-01-27 23:06:42 +00:00
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
|
|
|
"POST_link_uri_unknown_bad",
|
|
|
|
"400 Bad Request",
|
|
|
|
"unknown cap in a write slot")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_link_uri_unknown_ro_good(self):
|
2010-05-19 05:51:46 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="uri", name="future-ro.txt", uri=unknown_rocap)
|
2010-01-27 23:06:42 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-ro.txt")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_link_uri_unknown_imm_good(self):
|
2010-05-19 05:51:46 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="uri", name="future-imm.txt", uri=unknown_immcap)
|
2010-01-27 23:06:42 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node, u"future-imm.txt")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_link_uri_no_replace_queryarg(self):
|
2007-12-05 06:01:37 +00:00
|
|
|
contents, n, newuri = self.makefile(8)
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo?replace=false", t="uri",
|
2007-08-15 22:21:38 +00:00
|
|
|
name="bar.txt", uri=newuri)
|
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
2010-01-27 23:06:42 +00:00
|
|
|
"POST_link_uri_no_replace_queryarg",
|
2007-08-15 22:21:38 +00:00
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me "
|
|
|
|
"to not replace it")
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
2007-08-15 22:21:38 +00:00
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
return d
|
|
|
|
|
2010-01-27 23:06:42 +00:00
|
|
|
def test_POST_link_uri_no_replace_field(self):
|
2007-12-05 06:01:37 +00:00
|
|
|
contents, n, newuri = self.makefile(8)
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="uri", replace="false",
|
2007-08-15 22:21:38 +00:00
|
|
|
name="bar.txt", uri=newuri)
|
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
2010-01-27 23:06:42 +00:00
|
|
|
"POST_link_uri_no_replace_field",
|
2007-08-15 22:21:38 +00:00
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me "
|
|
|
|
"to not replace it")
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
2007-08-15 22:21:38 +00:00
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
return d
|
|
|
|
|
2011-07-13 00:12:18 +00:00
|
|
|
def test_POST_delete(self, command_name='delete'):
|
|
|
|
d = self._foo_node.list()
|
|
|
|
def _check_before(children):
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failUnlessIn(u"bar.txt", children)
|
2011-07-13 00:12:18 +00:00
|
|
|
d.addCallback(_check_before)
|
|
|
|
d.addCallback(lambda res: self.POST(self.public_url + "/foo", t=command_name, name="bar.txt"))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res: self._foo_node.list())
|
2011-07-13 00:12:18 +00:00
|
|
|
def _check_after(children):
|
2011-12-17 04:27:10 +00:00
|
|
|
self.failIfIn(u"bar.txt", children)
|
2011-07-13 00:12:18 +00:00
|
|
|
d.addCallback(_check_after)
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2011-07-13 00:12:18 +00:00
|
|
|
def test_POST_unlink(self):
|
|
|
|
return self.test_POST_delete(command_name='unlink')
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_POST_rename_file(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="rename",
|
2007-07-17 00:05:01 +00:00
|
|
|
from_name="bar.txt", to_name='wibble.txt')
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"wibble.txt"))
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt"))
|
2007-07-17 00:05:01 +00:00
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/wibble.txt?t=json"))
|
2007-07-17 00:05:01 +00:00
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
2008-10-23 23:32:36 +00:00
|
|
|
def test_POST_rename_file_redundant(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="rename",
|
|
|
|
from_name="bar.txt", to_name='bar.txt')
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
2007-08-15 22:21:38 +00:00
|
|
|
def test_POST_rename_file_replace(self):
|
|
|
|
# rename a file and replace a directory with it
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="rename",
|
2007-08-15 22:21:38 +00:00
|
|
|
from_name="bar.txt", to_name='empty')
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"empty"))
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty"))
|
2007-08-15 22:21:38 +00:00
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
|
2007-08-15 22:21:38 +00:00
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_rename_file_no_replace_queryarg(self):
|
|
|
|
# rename a file and replace a directory with it
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo?replace=false", t="rename",
|
2007-08-15 22:21:38 +00:00
|
|
|
from_name="bar.txt", to_name='empty')
|
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
|
|
|
"POST_rename_file_no_replace_queryarg",
|
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me "
|
|
|
|
"to not replace it")
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
|
2007-08-15 22:21:38 +00:00
|
|
|
d.addCallback(self.failUnlessIsEmptyJSON)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_rename_file_no_replace_field(self):
|
|
|
|
# rename a file and replace a directory with it
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="rename", replace="false",
|
2007-08-15 22:21:38 +00:00
|
|
|
from_name="bar.txt", to_name='empty')
|
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
|
|
|
"POST_rename_file_no_replace_field",
|
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me "
|
|
|
|
"to not replace it")
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/empty?t=json"))
|
2007-08-15 22:21:38 +00:00
|
|
|
d.addCallback(self.failUnlessIsEmptyJSON)
|
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_rename_file_no_replace_same_link(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="rename",
|
|
|
|
replace="false", from_name="bar.txt", to_name="bar.txt")
|
|
|
|
d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_rename_file_replace_only_files(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="rename",
|
|
|
|
replace="only-files", from_name="bar.txt",
|
|
|
|
to_name="baz.txt")
|
|
|
|
d.addCallback(lambda res: self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/baz.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/baz.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_rename_file_replace_only_files_conflict(self):
|
|
|
|
d = self.shouldFail2(error.Error, "POST_relink_file_replace_only_files_conflict",
|
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me to not replace it.",
|
|
|
|
self.POST, self.public_url + "/foo", t="relink",
|
|
|
|
replace="only-files", from_name="bar.txt",
|
|
|
|
to_name="empty")
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
2007-08-15 22:21:38 +00:00
|
|
|
def failUnlessIsEmptyJSON(self, res):
|
2017-01-19 22:39:53 +00:00
|
|
|
data = json.loads(res)
|
2010-07-18 14:29:15 +00:00
|
|
|
self.failUnlessEqual(data[0], "dirnode", data)
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(len(data[1]["children"]), 0)
|
2007-08-15 22:21:38 +00:00
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_rename_file_to_slash_fail(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url + "/foo", t="rename",
|
2007-07-17 00:05:01 +00:00
|
|
|
from_name="bar.txt", to_name='kirk/spock.txt')
|
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
2013-04-05 05:36:14 +00:00
|
|
|
"test_POST_rename_file_to_slash_fail",
|
2007-07-17 00:05:01 +00:00
|
|
|
"400 Bad Request",
|
|
|
|
"to_name= may not contain a slash",
|
|
|
|
)
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
|
2007-07-17 00:05:01 +00:00
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_rename_file_from_slash_fail(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="rename",
|
|
|
|
from_name="sub/bar.txt", to_name='spock.txt')
|
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
|
|
|
"test_POST_rename_from_file_slash_fail",
|
|
|
|
"400 Bad Request",
|
|
|
|
"from_name= may not contain a slash",
|
|
|
|
)
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_POST_rename_dir(self):
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.POST(self.public_url, t="rename",
|
2007-07-17 00:05:01 +00:00
|
|
|
from_name="foo", to_name='plunk')
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failIfNodeHasChild(self.public_root, u"foo"))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessNodeHasChild(self.public_root, u"plunk"))
|
2007-12-03 21:52:42 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/plunk?t=json"))
|
2007-07-17 00:05:01 +00:00
|
|
|
d.addCallback(self.failUnlessIsFooJSON)
|
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="relink",
|
|
|
|
from_name="bar.txt",
|
|
|
|
to_dir=self.public_root.get_uri() + "/foo/sub")
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._sub_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file_new_name(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="relink",
|
|
|
|
from_name="bar.txt",
|
|
|
|
to_name="wibble.txt", to_dir=self.public_root.get_uri() + "/foo/sub")
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failIfNodeHasChild(self._sub_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._sub_node, u"wibble.txt"))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/wibble.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/wibble.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file_replace(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="relink",
|
|
|
|
from_name="bar.txt",
|
|
|
|
to_name="baz.txt", to_dir=self.public_root.get_uri() + "/foo/sub")
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/baz.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/baz.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file_no_replace(self):
|
|
|
|
d = self.shouldFail2(error.Error, "POST_relink_file_no_replace",
|
2012-05-09 21:18:27 +00:00
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me to not replace it",
|
2013-04-05 05:36:14 +00:00
|
|
|
self.POST, self.public_url + "/foo", t="relink",
|
2012-05-09 21:18:27 +00:00
|
|
|
replace="false", from_name="bar.txt",
|
2013-04-05 05:36:14 +00:00
|
|
|
to_name="baz.txt", to_dir=self.public_root.get_uri() + "/foo/sub")
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/baz.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsSubBazDotTxt)
|
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file_no_replace_explicitly_same_link(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="relink",
|
|
|
|
replace="false", from_name="bar.txt",
|
|
|
|
to_name="bar.txt", to_dir=self.public_root.get_uri() + "/foo")
|
|
|
|
d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_relink_file_replace_only_files(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="relink",
|
|
|
|
replace="only-files", from_name="bar.txt",
|
|
|
|
to_name="baz.txt", to_dir=self.public_root.get_uri() + "/foo/sub")
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/baz.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/baz.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_relink_file_replace_only_files_conflict(self):
|
|
|
|
d = self.shouldFail2(error.Error, "POST_relink_file_replace_only_files_conflict",
|
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me to not replace it.",
|
|
|
|
self.POST, self.public_url + "/foo", t="relink",
|
|
|
|
replace="only-files", from_name="bar.txt",
|
|
|
|
to_name="sub", to_dir=self.public_root.get_uri() + "/foo")
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_relink_file_to_slash_fail(self):
|
2012-05-09 21:18:27 +00:00
|
|
|
d = self.shouldFail2(error.Error, "test_POST_rename_file_slash_fail",
|
|
|
|
"400 Bad Request",
|
|
|
|
"to_name= may not contain a slash",
|
2013-04-05 05:36:14 +00:00
|
|
|
self.POST, self.public_url + "/foo", t="relink",
|
2012-05-09 21:18:27 +00:00
|
|
|
from_name="bar.txt",
|
2013-04-05 05:36:14 +00:00
|
|
|
to_name="slash/fail.txt", to_dir=self.public_root.get_uri() + "/foo/sub")
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failIfNodeHasChild(self._sub_node, u"slash/fail.txt"))
|
2012-05-09 21:18:27 +00:00
|
|
|
d.addCallback(lambda ign:
|
|
|
|
self.shouldFail2(error.Error,
|
|
|
|
"test_POST_rename_file_slash_fail2",
|
|
|
|
"400 Bad Request",
|
|
|
|
"from_name= may not contain a slash",
|
|
|
|
self.POST, self.public_url + "/foo",
|
2013-04-05 05:36:14 +00:00
|
|
|
t="relink",
|
2012-05-09 21:18:27 +00:00
|
|
|
from_name="nope/bar.txt",
|
2013-04-05 05:36:14 +00:00
|
|
|
to_name="fail.txt",
|
|
|
|
to_dir=self.public_root.get_uri() + "/foo/sub"))
|
2011-11-10 08:00:11 +00:00
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file_explicitly_same_link(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="relink",
|
|
|
|
from_name="bar.txt",
|
|
|
|
to_name="bar.txt", to_dir=self.public_root.get_uri() + "/foo")
|
|
|
|
d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_relink_file_implicitly_same_link(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="relink",
|
|
|
|
from_name="bar.txt")
|
|
|
|
d.addCallback(lambda res: self.failUnlessNodeHasChild(self._foo_node, u"bar.txt"))
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
2013-04-05 05:36:14 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
def test_POST_relink_file_same_dir(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="relink",
|
|
|
|
from_name="bar.txt",
|
|
|
|
to_name="baz.txt", to_dir=self.public_root.get_uri() + "/foo")
|
|
|
|
d.addCallback(lambda res: self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.failUnlessNodeHasChild(self._sub_node, u"baz.txt"))
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/baz.txt"))
|
2013-04-05 05:36:14 +00:00
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/baz.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
2011-11-10 08:00:11 +00:00
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file_bad_replace(self):
|
|
|
|
d = self.shouldFail2(error.Error, "test_POST_relink_file_bad_replace",
|
|
|
|
"400 Bad Request", "invalid replace= argument: 'boogabooga'",
|
2012-05-09 21:18:27 +00:00
|
|
|
self.POST,
|
2013-04-05 05:36:14 +00:00
|
|
|
self.public_url + "/foo", t="relink",
|
|
|
|
replace="boogabooga", from_name="bar.txt",
|
|
|
|
to_dir=self.public_root.get_uri() + "/foo/sub")
|
2011-11-19 01:42:10 +00:00
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file_multi_level(self):
|
2017-07-24 23:22:36 +00:00
|
|
|
d = self.POST2(self.public_url + "/foo/sub/level2?t=mkdir", "")
|
2013-04-05 05:36:14 +00:00
|
|
|
d.addCallback(lambda res: self.POST(self.public_url + "/foo", t="relink",
|
|
|
|
from_name="bar.txt", to_dir=self.public_root.get_uri() + "/foo/sub/level2"))
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res: self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.failIfNodeHasChild(self._sub_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/level2/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/level2/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file_to_uri(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="relink", target_type="uri",
|
2011-11-10 08:00:11 +00:00
|
|
|
from_name="bar.txt", to_dir=self._sub_uri)
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failIfNodeHasChild(self._foo_node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/sub/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file_to_nonexistent_dir(self):
|
|
|
|
d = self.shouldFail2(error.Error, "POST_relink_file_to_nonexistent_dir",
|
2012-05-09 21:18:27 +00:00
|
|
|
"404 Not Found", "No such child: nopechucktesta",
|
2013-04-05 05:36:14 +00:00
|
|
|
self.POST, self.public_url + "/foo", t="relink",
|
|
|
|
from_name="bar.txt",
|
|
|
|
to_dir=self.public_root.get_uri() + "/nopechucktesta")
|
2011-11-10 08:00:11 +00:00
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file_into_file(self):
|
|
|
|
d = self.shouldFail2(error.Error, "POST_relink_file_into_file",
|
2012-05-09 21:18:27 +00:00
|
|
|
"400 Bad Request", "to_dir is not a directory",
|
2013-04-05 05:36:14 +00:00
|
|
|
self.POST, self.public_url + "/foo", t="relink",
|
|
|
|
from_name="bar.txt",
|
|
|
|
to_dir=self.public_root.get_uri() + "/foo/baz.txt")
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/baz.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBazDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_file_to_bad_uri(self):
|
|
|
|
d = self.shouldFail2(error.Error, "POST_relink_file_to_bad_uri",
|
2012-05-09 21:18:27 +00:00
|
|
|
"400 Bad Request", "to_dir is not a directory",
|
2013-04-05 05:36:14 +00:00
|
|
|
self.POST, self.public_url + "/foo", t="relink",
|
|
|
|
from_name="bar.txt",
|
2012-05-09 21:18:27 +00:00
|
|
|
to_dir="URI:DIR2:mn5jlyjnrjeuydyswlzyui72i:rmneifcj6k6sycjljjhj3f6majsq2zqffydnnul5hfa4j577arma")
|
2011-11-10 08:00:11 +00:00
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(self.public_url + "/foo/bar.txt?t=json"))
|
|
|
|
d.addCallback(self.failUnlessIsBarJSON)
|
|
|
|
return d
|
|
|
|
|
2013-04-05 05:36:14 +00:00
|
|
|
def test_POST_relink_dir(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="relink",
|
|
|
|
from_name="bar.txt",
|
|
|
|
to_dir=self.public_root.get_uri() + "/foo/empty")
|
2011-11-24 06:51:42 +00:00
|
|
|
d.addCallback(lambda res: self.POST(self.public_url + "/foo",
|
2013-04-05 05:36:14 +00:00
|
|
|
t="relink", from_name="empty",
|
|
|
|
to_dir=self.public_root.get_uri() + "/foo/sub"))
|
2011-11-24 06:51:42 +00:00
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failIfNodeHasChild(self._foo_node, u"empty"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.failUnlessNodeHasChild(self._sub_node, u"empty"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self._sub_node.get_child_at_path(u"empty"))
|
|
|
|
d.addCallback(lambda node:
|
|
|
|
self.failUnlessNodeHasChild(node, u"bar.txt"))
|
|
|
|
d.addCallback(lambda res:
|
|
|
|
self.GET(self.public_url + "/foo/sub/empty/bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
return d
|
|
|
|
|
2017-07-24 20:51:58 +00:00
|
|
|
@inlineCallbacks
|
2017-07-24 22:02:23 +00:00
|
|
|
def shouldRedirectTo(self, url, target_location, method="get",
|
|
|
|
code=None, **args):
|
|
|
|
response = yield treq.request(method, url, persistent=False,
|
|
|
|
allow_redirects=False, **args)
|
|
|
|
codes = [http.MOVED_PERMANENTLY,
|
|
|
|
http.FOUND,
|
|
|
|
http.TEMPORARY_REDIRECT,
|
|
|
|
] if code is None else [code]
|
|
|
|
self.assertIn(response.code, codes)
|
2017-07-24 20:51:58 +00:00
|
|
|
location = response.headers.getRawHeaders(b"location")[0]
|
2017-07-24 22:02:23 +00:00
|
|
|
if target_location is not None:
|
|
|
|
self.assertEquals(location, target_location)
|
|
|
|
returnValue(location)
|
2017-07-24 20:51:58 +00:00
|
|
|
|
|
|
|
@inlineCallbacks
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_URI_form(self):
|
2017-07-24 20:51:58 +00:00
|
|
|
relbase = "/uri?uri=%s" % self._bar_txt_uri
|
|
|
|
base = self.webish_url + relbase
|
2007-07-08 03:06:58 +00:00
|
|
|
# this is supposed to give us a redirect to /uri/$URI, plus arguments
|
2017-07-24 20:51:58 +00:00
|
|
|
targetbase = self.webish_url + "/uri/%s" % urllib.quote(self._bar_txt_uri)
|
|
|
|
yield self.shouldRedirectTo(base, targetbase)
|
|
|
|
yield self.shouldRedirectTo(base+"&filename=bar.txt",
|
|
|
|
targetbase+"?filename=bar.txt")
|
|
|
|
yield self.shouldRedirectTo(base+"&t=json",
|
|
|
|
targetbase+"?t=json")
|
|
|
|
|
|
|
|
self.log(None, "about to get file by uri")
|
|
|
|
data = yield self.GET(relbase, followRedirect=True)
|
|
|
|
self.failUnlessIsBarDotTxt(data)
|
|
|
|
self.log(None, "got file by uri, about to get dir by uri")
|
|
|
|
data = yield self.GET("/uri?uri=%s&t=json" % self._foo_uri,
|
|
|
|
followRedirect=True)
|
|
|
|
self.failUnlessIsFooJSON(data)
|
|
|
|
self.log(None, "got dir by uri")
|
2007-07-08 03:06:58 +00:00
|
|
|
|
2008-05-20 06:28:52 +00:00
|
|
|
def test_GET_URI_form_bad(self):
|
|
|
|
d = self.shouldFail2(error.Error, "test_GET_URI_form_bad",
|
|
|
|
"400 Bad Request", "GET /uri requires uri=",
|
|
|
|
self.GET, "/uri")
|
|
|
|
return d
|
|
|
|
|
2019-12-23 01:33:42 +00:00
|
|
|
@inlineCallbacks
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_rename_form(self):
|
2019-12-23 01:33:42 +00:00
|
|
|
data = yield self.GET(
|
|
|
|
self.public_url + "/foo?t=rename-form&name=bar.txt",
|
|
|
|
followRedirect=True
|
|
|
|
)
|
|
|
|
soup = BeautifulSoup(data, 'html5lib')
|
|
|
|
assert_soup_has_favicon(self, soup)
|
2019-12-23 01:38:22 +00:00
|
|
|
assert_soup_has_tag_with_attributes(
|
|
|
|
self, soup, u"input",
|
2019-12-23 01:33:42 +00:00
|
|
|
{u"name": u"when_done", u"value": u".", u"type": u"hidden"},
|
2019-12-23 01:38:22 +00:00
|
|
|
)
|
|
|
|
assert_soup_has_tag_with_attributes(
|
|
|
|
self, soup, u"input",
|
2019-12-23 01:33:42 +00:00
|
|
|
{u"readonly": u"true", u"name": u"from_name", u"value": u"bar.txt", u"type": u"text"},
|
|
|
|
)
|
2007-07-17 00:05:01 +00:00
|
|
|
|
2007-07-08 03:06:58 +00:00
|
|
|
def log(self, res, msg):
|
2007-07-08 03:11:30 +00:00
|
|
|
#print "MSG: %s RES: %s" % (msg, res)
|
2007-07-08 03:06:58 +00:00
|
|
|
log.msg(msg)
|
|
|
|
return res
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_URI_URL(self):
|
2007-12-20 00:54:40 +00:00
|
|
|
base = "/uri/%s" % self._bar_txt_uri
|
2007-07-08 03:06:58 +00:00
|
|
|
d = self.GET(base)
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(base+"?filename=bar.txt"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
d.addCallback(lambda res: self.GET(base+"?filename=bar.txt&save=true"))
|
|
|
|
d.addCallback(self.failUnlessIsBarDotTxt)
|
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_URI_URL_dir(self):
|
2007-12-20 00:54:40 +00:00
|
|
|
base = "/uri/%s?t=json" % self._foo_uri
|
2007-07-08 03:06:58 +00:00
|
|
|
d = self.GET(base)
|
|
|
|
d.addCallback(self.failUnlessIsFooJSON)
|
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_GET_URI_URL_missing(self):
|
2007-12-20 00:54:40 +00:00
|
|
|
base = "/uri/%s" % self._bad_file_uri
|
2017-07-25 00:53:48 +00:00
|
|
|
url = self.webish_url + base
|
|
|
|
yield self.assertHTTPError(url, http.GONE, "NotEnoughSharesError")
|
2007-07-17 19:16:45 +00:00
|
|
|
# TODO: how can we exercise both sides of WebDownloadTarget.fail
|
|
|
|
# here? we must arrange for a download to fail after target.open()
|
|
|
|
# has been called, and then inspect the response to see that it is
|
|
|
|
# shorter than we expected.
|
2007-07-08 05:06:52 +00:00
|
|
|
|
2008-10-29 04:54:46 +00:00
|
|
|
def test_PUT_DIRURL_uri(self):
|
2009-10-12 22:45:06 +00:00
|
|
|
d = self.s.create_dirnode()
|
2008-10-29 04:54:46 +00:00
|
|
|
def _made_dir(dn):
|
|
|
|
new_uri = dn.get_uri()
|
|
|
|
# replace /foo with a new (empty) directory
|
|
|
|
d = self.PUT(self.public_url + "/foo?t=uri", new_uri)
|
|
|
|
d.addCallback(lambda res:
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res.strip(), new_uri))
|
2008-10-29 04:54:46 +00:00
|
|
|
d.addCallback(lambda res:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessRWChildURIIs(self.public_root,
|
|
|
|
u"foo",
|
|
|
|
new_uri))
|
2008-10-29 04:54:46 +00:00
|
|
|
return d
|
|
|
|
d.addCallback(_made_dir)
|
|
|
|
return d
|
|
|
|
|
2008-10-29 04:57:44 +00:00
|
|
|
def test_PUT_DIRURL_uri_noreplace(self):
|
2009-10-12 22:45:06 +00:00
|
|
|
d = self.s.create_dirnode()
|
2008-10-29 04:57:44 +00:00
|
|
|
def _made_dir(dn):
|
|
|
|
new_uri = dn.get_uri()
|
|
|
|
# replace /foo with a new (empty) directory, but ask that
|
|
|
|
# replace=false, so it should fail
|
|
|
|
d = self.shouldFail2(error.Error, "test_PUT_DIRURL_uri_noreplace",
|
|
|
|
"409 Conflict", "There was already a child by that name, and you asked me to not replace it",
|
|
|
|
self.PUT,
|
|
|
|
self.public_url + "/foo?t=uri&replace=false",
|
|
|
|
new_uri)
|
|
|
|
d.addCallback(lambda res:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessRWChildURIIs(self.public_root,
|
|
|
|
u"foo",
|
|
|
|
self._foo_uri))
|
2008-10-29 04:57:44 +00:00
|
|
|
return d
|
|
|
|
d.addCallback(_made_dir)
|
|
|
|
return d
|
|
|
|
|
2008-10-29 05:00:15 +00:00
|
|
|
def test_PUT_DIRURL_bad_t(self):
|
|
|
|
d = self.shouldFail2(error.Error, "test_PUT_DIRURL_bad_t",
|
2011-10-02 04:03:32 +00:00
|
|
|
"400 Bad Request", "PUT to a directory",
|
|
|
|
self.PUT, self.public_url + "/foo?t=BOGUS", "")
|
2008-10-29 05:00:15 +00:00
|
|
|
d.addCallback(lambda res:
|
2010-01-27 06:44:30 +00:00
|
|
|
self.failUnlessRWChildURIIs(self.public_root,
|
|
|
|
u"foo",
|
|
|
|
self._foo_uri))
|
2008-10-29 05:00:15 +00:00
|
|
|
return d
|
|
|
|
|
2007-07-25 03:16:21 +00:00
|
|
|
def test_PUT_NEWFILEURL_uri(self):
|
2007-12-05 06:01:37 +00:00
|
|
|
contents, n, new_uri = self.makefile(8)
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/new.txt?t=uri", new_uri)
|
2010-07-11 20:02:52 +00:00
|
|
|
d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessChildContentsAre(self._foo_node, u"new.txt",
|
2007-12-05 06:01:37 +00:00
|
|
|
contents))
|
2007-07-07 02:43:55 +00:00
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_PUT_NEWFILEURL_mdmf(self):
|
|
|
|
new_contents = self.NEWFILE_CONTENTS * 300000
|
|
|
|
d = self.PUT(self.public_url + \
|
2011-10-02 04:00:36 +00:00
|
|
|
"/foo/mdmf.txt?format=mdmf",
|
2011-08-07 00:43:48 +00:00
|
|
|
new_contents)
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.GET(self.public_url + "/foo/mdmf.txt?t=json"))
|
2017-01-19 22:39:53 +00:00
|
|
|
def _got_json(raw):
|
|
|
|
data = json.loads(raw)
|
2011-08-07 00:43:48 +00:00
|
|
|
data = data[1]
|
2011-10-02 04:00:36 +00:00
|
|
|
self.failUnlessIn("format", data)
|
2011-10-13 16:31:43 +00:00
|
|
|
self.failUnlessEqual(data["format"], "MDMF")
|
2011-08-07 00:43:48 +00:00
|
|
|
self.failUnless(data['rw_uri'].startswith("URI:MDMF"))
|
|
|
|
self.failUnless(data['ro_uri'].startswith("URI:MDMF"))
|
|
|
|
d.addCallback(_got_json)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_NEWFILEURL_sdmf(self):
|
|
|
|
new_contents = self.NEWFILE_CONTENTS * 300000
|
|
|
|
d = self.PUT(self.public_url + \
|
2011-10-02 04:00:36 +00:00
|
|
|
"/foo/sdmf.txt?format=sdmf",
|
2011-08-07 00:43:48 +00:00
|
|
|
new_contents)
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.GET(self.public_url + "/foo/sdmf.txt?t=json"))
|
2017-01-19 22:39:53 +00:00
|
|
|
def _got_json(raw):
|
|
|
|
data = json.loads(raw)
|
2011-08-07 00:43:48 +00:00
|
|
|
data = data[1]
|
2011-10-02 04:00:36 +00:00
|
|
|
self.failUnlessIn("format", data)
|
2011-10-13 16:31:43 +00:00
|
|
|
self.failUnlessEqual(data["format"], "SDMF")
|
2011-08-07 00:43:48 +00:00
|
|
|
d.addCallback(_got_json)
|
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_PUT_NEWFILEURL_bad_format(self):
|
2012-10-25 00:01:25 +00:00
|
|
|
new_contents = self.NEWFILE_CONTENTS * 300000
|
2017-07-25 00:53:48 +00:00
|
|
|
url = self.webish_url + self.public_url + "/foo/foo.txt?format=foo"
|
|
|
|
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
|
|
|
method="put", data=new_contents)
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2007-08-15 20:22:23 +00:00
|
|
|
def test_PUT_NEWFILEURL_uri_replace(self):
|
2007-12-05 06:01:37 +00:00
|
|
|
contents, n, new_uri = self.makefile(8)
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/bar.txt?t=uri", new_uri)
|
2010-07-11 20:02:52 +00:00
|
|
|
d.addCallback(lambda res: self.failUnlessReallyEqual(res.strip(), new_uri))
|
2007-12-05 06:01:37 +00:00
|
|
|
d.addCallback(lambda res:
|
2008-02-14 22:45:56 +00:00
|
|
|
self.failUnlessChildContentsAre(self._foo_node, u"bar.txt",
|
2007-12-05 06:01:37 +00:00
|
|
|
contents))
|
2007-08-15 20:22:23 +00:00
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_NEWFILEURL_uri_no_replace(self):
|
2007-12-05 06:01:37 +00:00
|
|
|
contents, n, new_uri = self.makefile(8)
|
2007-12-03 21:52:42 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/bar.txt?t=uri&replace=false", new_uri)
|
2011-10-02 04:03:32 +00:00
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
|
|
|
"PUT_NEWFILEURL_uri_no_replace",
|
2007-08-15 20:22:23 +00:00
|
|
|
"409 Conflict",
|
|
|
|
"There was already a child by that name, and you asked me "
|
|
|
|
"to not replace it")
|
|
|
|
return d
|
|
|
|
|
2010-01-27 23:06:42 +00:00
|
|
|
def test_PUT_NEWFILEURL_uri_unknown_bad(self):
|
2010-05-19 05:51:46 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/put-future.txt?t=uri", unknown_rwcap)
|
2010-01-27 23:06:42 +00:00
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
|
|
|
"POST_put_uri_unknown_bad",
|
|
|
|
"400 Bad Request",
|
|
|
|
"unknown cap in a write slot")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_NEWFILEURL_uri_unknown_ro_good(self):
|
2010-05-19 05:51:46 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/put-future-ro.txt?t=uri", unknown_rocap)
|
2010-01-27 23:06:42 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
|
|
|
|
u"put-future-ro.txt")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_NEWFILEURL_uri_unknown_imm_good(self):
|
2010-05-19 05:51:46 +00:00
|
|
|
d = self.PUT(self.public_url + "/foo/put-future-imm.txt?t=uri", unknown_immcap)
|
2010-01-27 23:06:42 +00:00
|
|
|
d.addCallback(self.failUnlessURIMatchesROChild, self._foo_node,
|
|
|
|
u"put-future-imm.txt")
|
|
|
|
return d
|
|
|
|
|
2007-09-06 00:12:27 +00:00
|
|
|
def test_PUT_NEWFILE_URI(self):
|
|
|
|
file_contents = "New file contents here\n"
|
|
|
|
d = self.PUT("/uri", file_contents)
|
2009-04-08 02:13:40 +00:00
|
|
|
def _check(uri):
|
|
|
|
assert isinstance(uri, str), uri
|
2012-05-22 22:18:26 +00:00
|
|
|
self.failUnlessIn(uri, self.get_all_contents())
|
|
|
|
self.failUnlessReallyEqual(self.get_all_contents()[uri],
|
2010-07-11 20:02:52 +00:00
|
|
|
file_contents)
|
2009-04-08 02:13:40 +00:00
|
|
|
return self.GET("/uri/%s" % uri)
|
|
|
|
d.addCallback(_check)
|
|
|
|
def _check2(res):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, file_contents)
|
2009-04-08 02:13:40 +00:00
|
|
|
d.addCallback(_check2)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_NEWFILE_URI_not_mutable(self):
|
|
|
|
file_contents = "New file contents here\n"
|
|
|
|
d = self.PUT("/uri?mutable=false", file_contents)
|
2007-09-06 00:12:27 +00:00
|
|
|
def _check(uri):
|
2008-12-19 15:39:24 +00:00
|
|
|
assert isinstance(uri, str), uri
|
2012-05-22 22:18:26 +00:00
|
|
|
self.failUnlessIn(uri, self.get_all_contents())
|
|
|
|
self.failUnlessReallyEqual(self.get_all_contents()[uri],
|
2010-07-11 20:02:52 +00:00
|
|
|
file_contents)
|
2007-12-20 00:54:40 +00:00
|
|
|
return self.GET("/uri/%s" % uri)
|
2007-09-06 00:12:27 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
def _check2(res):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, file_contents)
|
2007-09-06 00:12:27 +00:00
|
|
|
d.addCallback(_check2)
|
|
|
|
return d
|
|
|
|
|
2007-09-17 08:53:46 +00:00
|
|
|
def test_PUT_NEWFILE_URI_only_PUT(self):
|
|
|
|
d = self.PUT("/uri?t=bogus", "")
|
|
|
|
d.addBoth(self.shouldFail, error.Error,
|
|
|
|
"PUT_NEWFILE_URI_only_PUT",
|
|
|
|
"400 Bad Request",
|
2008-05-19 19:56:02 +00:00
|
|
|
"/uri accepts only PUT, PUT?t=mkdir, POST?t=upload, and POST?t=mkdir")
|
2007-09-17 08:53:46 +00:00
|
|
|
return d
|
|
|
|
|
2008-02-06 05:18:02 +00:00
|
|
|
def test_PUT_NEWFILE_URI_mutable(self):
|
|
|
|
file_contents = "New file contents here\n"
|
|
|
|
d = self.PUT("/uri?mutable=true", file_contents)
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
def _check1(filecap):
|
|
|
|
filecap = filecap.strip()
|
|
|
|
self.failUnless(filecap.startswith("URI:SSK:"), filecap)
|
|
|
|
self.filecap = filecap
|
|
|
|
u = uri.WriteableSSKFileURI.init_from_string(filecap)
|
2012-05-22 22:18:26 +00:00
|
|
|
self.failUnlessIn(u.get_storage_index(), self.get_all_contents())
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
n = self.s.create_node_from_uri(filecap)
|
2008-04-18 00:51:38 +00:00
|
|
|
return n.download_best_version()
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
d.addCallback(_check1)
|
|
|
|
def _check2(data):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(data, file_contents)
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
return self.GET("/uri/%s" % urllib.quote(self.filecap))
|
2008-02-06 05:18:02 +00:00
|
|
|
d.addCallback(_check2)
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
def _check3(res):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, file_contents)
|
Overhaul IFilesystemNode handling, to simplify tests and use POLA internally.
* stop using IURI as an adapter
* pass cap strings around instead of URI instances
* move filenode/dirnode creation duties from Client to new NodeMaker class
* move other Client duties to KeyGenerator, SecretHolder, History classes
* stop passing Client reference to dirnode/filenode constructors
- pass less-powerful references instead, like StorageBroker or Uploader
* always create DirectoryNodes by wrapping a filenode (mutable for now)
* remove some specialized mock classes from unit tests
Detailed list of changes (done one at a time, then merged together)
always pass a string to create_node_from_uri(), not an IURI instance
always pass a string to IFilesystemNode constructors, not an IURI instance
stop using IURI() as an adapter, switch on cap prefix in create_node_from_uri()
client.py: move SecretHolder code out to a separate class
test_web.py: hush pyflakes
client.py: move NodeMaker functionality out into a separate object
LiteralFileNode: stop storing a Client reference
immutable Checker: remove Client reference, it only needs a SecretHolder
immutable Upload: remove Client reference, leave SecretHolder and StorageBroker
immutable Repairer: replace Client reference with StorageBroker and SecretHolder
immutable FileNode: remove Client reference
mutable.Publish: stop passing Client
mutable.ServermapUpdater: get StorageBroker in constructor, not by peeking into Client reference
MutableChecker: reference StorageBroker and History directly, not through Client
mutable.FileNode: removed unused indirection to checker classes
mutable.FileNode: remove Client reference
client.py: move RSA key generation into a separate class, so it can be passed to the nodemaker
move create_mutable_file() into NodeMaker
test_dirnode.py: stop using FakeClient mockups, use NoNetworkGrid instead. This simplifies the code, but takes longer to run (17s instead of 6s). This should come down later when other cleanups make it possible to use simpler (non-RSA) fake mutable files for dirnode tests.
test_mutable.py: clean up basedir names
client.py: move create_empty_dirnode() into NodeMaker
dirnode.py: get rid of DirectoryNode.create
remove DirectoryNode.init_from_uri, refactor NodeMaker for customization, simplify test_web's mock Client to match
stop passing Client to DirectoryNode, make DirectoryNode.create_with_mutablefile the normal DirectoryNode constructor, start removing client from NodeMaker
remove Client from NodeMaker
move helper status into History, pass History to web.Status instead of Client
test_mutable.py: fix minor typo
2009-08-15 11:02:56 +00:00
|
|
|
d.addCallback(_check3)
|
2008-02-06 05:18:02 +00:00
|
|
|
return d
|
|
|
|
|
2007-12-20 19:58:17 +00:00
|
|
|
def test_PUT_mkdir(self):
|
2007-09-06 00:23:06 +00:00
|
|
|
d = self.PUT("/uri?t=mkdir", "")
|
|
|
|
def _check(uri):
|
2007-12-05 06:01:37 +00:00
|
|
|
n = self.s.create_node_from_uri(uri.strip())
|
|
|
|
d2 = self.failUnlessNodeKeysAre(n, [])
|
|
|
|
d2.addCallback(lambda res:
|
2007-12-20 00:54:40 +00:00
|
|
|
self.GET("/uri/%s?t=json" % uri))
|
2007-12-05 06:01:37 +00:00
|
|
|
return d2
|
2007-09-06 00:23:06 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
d.addCallback(self.failUnlessIsEmptyJSON)
|
|
|
|
return d
|
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_PUT_mkdir_mdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.PUT("/uri?t=mkdir&format=mdmf", "")
|
2011-08-07 00:43:48 +00:00
|
|
|
def _got(res):
|
|
|
|
u = uri.from_string(res)
|
|
|
|
# Check that this is an MDMF writecap
|
|
|
|
self.failUnlessIsInstance(u, uri.MDMFDirectoryURI)
|
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def test_PUT_mkdir_sdmf(self):
|
2011-10-02 04:00:36 +00:00
|
|
|
d = self.PUT("/uri?t=mkdir&format=sdmf", "")
|
2011-08-07 00:43:48 +00:00
|
|
|
def _got(res):
|
|
|
|
u = uri.from_string(res)
|
|
|
|
self.failUnlessIsInstance(u, uri.DirectoryURI)
|
|
|
|
d.addCallback(_got)
|
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-10-02 04:00:36 +00:00
|
|
|
def test_PUT_mkdir_bad_format(self):
|
2017-07-25 00:53:48 +00:00
|
|
|
url = self.webish_url + "/uri?t=mkdir&format=foo"
|
|
|
|
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
|
|
|
method="put", data="")
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2007-12-05 06:49:38 +00:00
|
|
|
def test_POST_check(self):
|
|
|
|
d = self.POST(self.public_url + "/foo", t="check", name="bar.txt")
|
|
|
|
def _done(res):
|
|
|
|
# this returns a string form of the results, which are probably
|
|
|
|
# None since we're using fake filenodes.
|
|
|
|
# TODO: verify that the check actually happened, by changing
|
|
|
|
# FakeCHKFileNode to count how many times .check() has been
|
|
|
|
# called.
|
|
|
|
pass
|
|
|
|
d.addCallback(_done)
|
|
|
|
return d
|
2007-12-25 10:48:57 +00:00
|
|
|
|
2011-08-07 00:43:48 +00:00
|
|
|
|
|
|
|
def test_PUT_update_at_offset(self):
|
|
|
|
file_contents = "test file" * 100000 # about 900 KiB
|
|
|
|
d = self.PUT("/uri?mutable=true", file_contents)
|
|
|
|
def _then(filecap):
|
|
|
|
self.filecap = filecap
|
|
|
|
new_data = file_contents[:100]
|
|
|
|
new = "replaced and so on"
|
|
|
|
new_data += new
|
|
|
|
new_data += file_contents[len(new_data):]
|
|
|
|
assert len(new_data) == len(file_contents)
|
|
|
|
self.new_data = new_data
|
|
|
|
d.addCallback(_then)
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.PUT("/uri/%s?replace=True&offset=100" % self.filecap,
|
|
|
|
"replaced and so on"))
|
|
|
|
def _get_data(filecap):
|
|
|
|
n = self.s.create_node_from_uri(filecap)
|
|
|
|
return n.download_best_version()
|
|
|
|
d.addCallback(_get_data)
|
|
|
|
d.addCallback(lambda results:
|
|
|
|
self.failUnlessEqual(results, self.new_data))
|
|
|
|
# Now try appending things to the file
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.PUT("/uri/%s?offset=%d" % (self.filecap, len(self.new_data)),
|
|
|
|
"puppies" * 100))
|
|
|
|
d.addCallback(_get_data)
|
|
|
|
d.addCallback(lambda results:
|
|
|
|
self.failUnlessEqual(results, self.new_data + ("puppies" * 100)))
|
|
|
|
# and try replacing the beginning of the file
|
|
|
|
d.addCallback(lambda ignored:
|
|
|
|
self.PUT("/uri/%s?offset=0" % self.filecap, "begin"))
|
|
|
|
d.addCallback(_get_data)
|
|
|
|
d.addCallback(lambda results:
|
|
|
|
self.failUnlessEqual(results, "begin"+self.new_data[len("begin"):]+("puppies"*100)))
|
|
|
|
return d
|
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_PUT_update_at_invalid_offset(self):
|
|
|
|
file_contents = "test file" * 100000 # about 900 KiB
|
2017-07-25 00:53:48 +00:00
|
|
|
filecap = yield self.PUT("/uri?mutable=true", file_contents)
|
2011-08-07 00:43:48 +00:00
|
|
|
# Negative offsets should cause an error.
|
2017-07-25 00:53:48 +00:00
|
|
|
url = self.webish_url + "/uri/%s?offset=-1" % filecap
|
|
|
|
yield self.assertHTTPError(url, 400, "Invalid offset",
|
|
|
|
method="put", data="foo")
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2011-08-07 00:43:48 +00:00
|
|
|
def test_PUT_update_at_offset_immutable(self):
|
|
|
|
file_contents = "Test file" * 100000
|
2017-07-25 00:53:48 +00:00
|
|
|
filecap = yield self.PUT("/uri", file_contents)
|
|
|
|
url = self.webish_url + "/uri/%s?offset=50" % filecap
|
|
|
|
yield self.assertHTTPError(url, 400, "immutable",
|
|
|
|
method="put", data="foo")
|
2011-08-07 00:43:48 +00:00
|
|
|
|
2017-07-24 21:02:31 +00:00
|
|
|
@inlineCallbacks
|
2007-12-25 10:48:57 +00:00
|
|
|
def test_bad_method(self):
|
|
|
|
url = self.webish_url + self.public_url + "/foo/bar.txt"
|
2017-07-24 21:02:31 +00:00
|
|
|
yield self.assertHTTPError(url, 501,
|
|
|
|
"I don't know how to treat a BOGUS request.",
|
|
|
|
method="BOGUS")
|
2007-12-25 10:48:57 +00:00
|
|
|
|
2017-07-24 23:31:44 +00:00
|
|
|
@inlineCallbacks
|
2007-12-25 10:48:57 +00:00
|
|
|
def test_short_url(self):
|
|
|
|
url = self.webish_url + "/uri"
|
2017-07-24 23:31:44 +00:00
|
|
|
yield self.assertHTTPError(url, 501,
|
|
|
|
"I don't know how to treat a DELETE request.",
|
|
|
|
method="DELETE")
|
2008-05-20 22:21:46 +00:00
|
|
|
|
2017-07-24 23:31:44 +00:00
|
|
|
@inlineCallbacks
|
2008-10-22 05:13:54 +00:00
|
|
|
def test_ophandle_bad(self):
|
2008-10-22 00:52:56 +00:00
|
|
|
url = self.webish_url + "/operations/bogus?t=status"
|
2017-07-24 23:31:44 +00:00
|
|
|
yield self.assertHTTPError(url, 404,
|
|
|
|
"unknown/expired handle 'bogus'")
|
2008-10-22 00:52:56 +00:00
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2008-10-22 05:13:54 +00:00
|
|
|
def test_ophandle_cancel(self):
|
2020-03-03 00:08:30 +00:00
|
|
|
url = self.webish_url + self.public_url + "/foo?t=start-manifest&ophandle=128"
|
2017-07-25 00:53:48 +00:00
|
|
|
yield do_http("post", url,
|
|
|
|
allow_redirects=True, browser_like_redirects=True)
|
|
|
|
res = yield self.GET("/operations/128?t=status&output=JSON")
|
|
|
|
data = json.loads(res)
|
|
|
|
self.failUnless("finished" in data, res)
|
2019-08-08 01:22:25 +00:00
|
|
|
monitor = self.ws.getServiceNamed("operations").handles["128"][0]
|
2017-07-25 00:53:48 +00:00
|
|
|
|
|
|
|
res = yield self.POST("/operations/128?t=cancel&output=JSON")
|
|
|
|
data = json.loads(res)
|
|
|
|
self.failUnless("finished" in data, res)
|
|
|
|
# t=cancel causes the handle to be forgotten
|
|
|
|
self.failUnless(monitor.is_cancelled())
|
|
|
|
|
|
|
|
url = self.webish_url + "/operations/128?t=status&output=JSON"
|
|
|
|
yield self.assertHTTPError(url, 404, "unknown/expired handle '128'")
|
2008-10-22 05:13:54 +00:00
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2008-10-22 05:13:54 +00:00
|
|
|
def test_ophandle_retainfor(self):
|
2020-03-03 00:08:30 +00:00
|
|
|
url = self.webish_url + self.public_url + "/foo?t=start-manifest&ophandle=129&retain-for=60"
|
2017-07-25 00:53:48 +00:00
|
|
|
yield do_http("post", url,
|
|
|
|
allow_redirects=True, browser_like_redirects=True)
|
|
|
|
res = yield self.GET("/operations/129?t=status&output=JSON&retain-for=0")
|
|
|
|
data = json.loads(res)
|
|
|
|
self.failUnless("finished" in data, res)
|
|
|
|
|
2008-10-22 05:13:54 +00:00
|
|
|
# the retain-for=0 will cause the handle to be expired very soon
|
2017-07-25 00:53:48 +00:00
|
|
|
yield self.clock.advance(2.0)
|
|
|
|
url = self.webish_url + "/operations/129?t=status&output=JSON"
|
|
|
|
yield self.assertHTTPError(url, 404, "unknown/expired handle '129'")
|
2008-10-22 05:13:54 +00:00
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2008-10-22 05:13:54 +00:00
|
|
|
def test_ophandle_release_after_complete(self):
|
2020-03-03 00:08:30 +00:00
|
|
|
url = self.webish_url + self.public_url + "/foo?t=start-manifest&ophandle=130"
|
2017-07-25 00:53:48 +00:00
|
|
|
yield do_http("post", url,
|
|
|
|
allow_redirects=True, browser_like_redirects=True)
|
|
|
|
yield self.wait_for_operation(None, "130")
|
|
|
|
yield self.GET("/operations/130?t=status&output=JSON&release-after-complete=true")
|
2008-10-22 05:13:54 +00:00
|
|
|
# the release-after-complete=true will cause the handle to be expired
|
2017-07-25 00:53:48 +00:00
|
|
|
op_url = self.webish_url + "/operations/130?t=status&output=JSON"
|
|
|
|
yield self.assertHTTPError(op_url, 404, "unknown/expired handle '130'")
|
2008-10-22 05:13:54 +00:00
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2010-02-21 01:04:55 +00:00
|
|
|
def test_uncollected_ophandle_expiration(self):
|
|
|
|
# uncollected ophandles should expire after 4 days
|
|
|
|
def _make_uncollected_ophandle(ophandle):
|
2017-07-24 23:12:35 +00:00
|
|
|
url = (self.webish_url + self.public_url +
|
2020-03-03 00:08:30 +00:00
|
|
|
"/foo?t=start-manifest&ophandle=%d" % ophandle)
|
2017-07-24 23:12:35 +00:00
|
|
|
# When we start the operation, the webapi server will want to
|
|
|
|
# redirect us to the page for the ophandle, so we get
|
|
|
|
# confirmation that the operation has started. If the manifest
|
|
|
|
# operation has finished by the time we get there, following that
|
|
|
|
# redirect would have the side effect of collecting the ophandle
|
|
|
|
# that we've just created, which means that we can't use the
|
|
|
|
# ophandle to test the uncollected timeout anymore. So, instead,
|
|
|
|
# catch+ignore any 302 here and don't follow it.
|
|
|
|
d = treq.request("post", url, persistent=False)
|
|
|
|
def _ignore_redirect(f):
|
|
|
|
f.trap(client.ResponseFailed)
|
|
|
|
e = f.value
|
|
|
|
reasons = e.reasons
|
|
|
|
r0 = reasons[0]
|
|
|
|
r0.trap(error.PageRedirect)
|
|
|
|
d.addErrback(_ignore_redirect)
|
2010-02-21 01:04:55 +00:00
|
|
|
return d
|
|
|
|
# Create an ophandle, don't collect it, then advance the clock by
|
|
|
|
# 4 days - 1 second and make sure that the ophandle is still there.
|
2017-07-25 00:53:48 +00:00
|
|
|
yield _make_uncollected_ophandle(131)
|
|
|
|
yield self.clock.advance((96*60*60) - 1) # 96 hours = 4 days
|
|
|
|
res = yield self.GET("/operations/131?t=status&output=JSON")
|
|
|
|
data = json.loads(res)
|
|
|
|
self.failUnless("finished" in data, res)
|
|
|
|
|
2010-02-21 01:04:55 +00:00
|
|
|
# Create an ophandle, don't collect it, then try to collect it
|
|
|
|
# after 4 days. It should be gone.
|
2017-07-25 00:53:48 +00:00
|
|
|
yield _make_uncollected_ophandle(132)
|
|
|
|
yield self.clock.advance(96*60*60)
|
|
|
|
op_url = self.webish_url + "/operations/132?t=status&output=JSON"
|
|
|
|
yield self.assertHTTPError(op_url, 404, "unknown/expired handle '132'")
|
2010-02-21 01:04:55 +00:00
|
|
|
|
2017-07-25 00:53:48 +00:00
|
|
|
@inlineCallbacks
|
2010-02-21 01:04:55 +00:00
|
|
|
def test_collected_ophandle_expiration(self):
|
|
|
|
# collected ophandles should expire after 1 day
|
|
|
|
def _make_collected_ophandle(ophandle):
|
2017-07-24 23:12:35 +00:00
|
|
|
url = (self.webish_url + self.public_url +
|
2020-03-03 00:08:30 +00:00
|
|
|
"/foo?t=start-manifest&ophandle=%d" % ophandle)
|
2011-08-09 00:11:17 +00:00
|
|
|
# By following the initial redirect, we collect the ophandle
|
2010-02-21 01:04:55 +00:00
|
|
|
# we've just created.
|
2017-07-24 23:12:35 +00:00
|
|
|
return do_http("post", url,
|
|
|
|
allow_redirects=True, browser_like_redirects=True)
|
2011-08-09 00:11:17 +00:00
|
|
|
# Create a collected ophandle, then collect it after 23 hours
|
2010-02-21 01:04:55 +00:00
|
|
|
# and 59 seconds to make sure that it is still there.
|
2017-07-25 00:53:48 +00:00
|
|
|
yield _make_collected_ophandle(133)
|
|
|
|
yield self.clock.advance((24*60*60) - 1)
|
|
|
|
res = yield self.GET("/operations/133?t=status&output=JSON")
|
|
|
|
data = json.loads(res)
|
|
|
|
self.failUnless("finished" in data, res)
|
|
|
|
|
2010-02-21 01:04:55 +00:00
|
|
|
# Create another uncollected ophandle, then try to collect it
|
|
|
|
# after 24 hours to make sure that it is gone.
|
2017-07-25 00:53:48 +00:00
|
|
|
yield _make_collected_ophandle(134)
|
|
|
|
yield self.clock.advance(24*60*60)
|
|
|
|
op_url = self.webish_url + "/operations/134?t=status&output=JSON"
|
|
|
|
yield self.assertHTTPError(op_url, 404, "unknown/expired handle '134'")
|
2010-02-21 01:04:55 +00:00
|
|
|
|
2008-10-22 00:52:56 +00:00
|
|
|
def test_incident(self):
|
|
|
|
d = self.POST("/report_incident", details="eek")
|
|
|
|
def _done(res):
|
2011-12-17 04:31:30 +00:00
|
|
|
self.failIfIn("<html>", res)
|
2013-05-19 07:31:43 +00:00
|
|
|
self.failUnlessIn("An incident report has been saved", res)
|
2008-10-22 00:52:56 +00:00
|
|
|
d.addCallback(_done)
|
|
|
|
return d
|
|
|
|
|
2008-10-29 22:34:31 +00:00
|
|
|
def test_static(self):
|
|
|
|
webdir = os.path.join(self.staticdir, "subdir")
|
|
|
|
fileutil.make_dirs(webdir)
|
|
|
|
f = open(os.path.join(webdir, "hello.txt"), "wb")
|
|
|
|
f.write("hello")
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
d = self.GET("/static/subdir/hello.txt")
|
|
|
|
def _check(res):
|
2010-07-11 20:02:52 +00:00
|
|
|
self.failUnlessReallyEqual(res, "hello")
|
2008-10-29 22:34:31 +00:00
|
|
|
d.addCallback(_check)
|
|
|
|
return d
|
|
|
|
|
2016-04-28 07:35:52 +00:00
|
|
|
def test_static_missing(self):
|
|
|
|
# self.staticdir does not exist yet, because we used self.mktemp()
|
|
|
|
d = self.assertFailure(self.GET("/static"), error.Error)
|
|
|
|
# nevow.static throws an exception when it tries to os.stat the
|
|
|
|
# missing directory, which gives the client a 500 Internal Server
|
|
|
|
# Error, and the traceback reveals the parent directory name. By
|
|
|
|
# switching to plain twisted.web.static, this gives a normal 404 that
|
|
|
|
# doesn't reveal anything. This addresses #1720.
|
|
|
|
d.addCallback(lambda e: self.assertEquals(str(e), "404 Not Found"))
|
|
|
|
return d
|