mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-02-12 05:55:35 +00:00
Closer to more passing tests, maybe.
This commit is contained in:
parent
45a2fcc3f2
commit
92a4a5afcc
@ -230,16 +230,16 @@ class UseNode(object):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
def format_config_items(config):
|
def format_config_items(config):
|
||||||
return b"\n".join(
|
return "\n".join(
|
||||||
b" = ".join((key, value))
|
" = ".join((key, value))
|
||||||
for (key, value)
|
for (key, value)
|
||||||
in config.items()
|
in config.items()
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.plugin_config is None:
|
if self.plugin_config is None:
|
||||||
plugin_config_section = b""
|
plugin_config_section = ""
|
||||||
else:
|
else:
|
||||||
plugin_config_section = b"""
|
plugin_config_section = """
|
||||||
[storageclient.plugins.{storage_plugin}]
|
[storageclient.plugins.{storage_plugin}]
|
||||||
{config}
|
{config}
|
||||||
""".format(
|
""".format(
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from six import ensure_text
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
from mock import Mock
|
from mock import Mock
|
||||||
from json import (
|
from json import (
|
||||||
@ -271,7 +273,7 @@ class PluginMatchedAnnouncement(SyncTestCase):
|
|||||||
"""
|
"""
|
||||||
yield self.make_node(
|
yield self.make_node(
|
||||||
introducer_furl=SOME_FURL,
|
introducer_furl=SOME_FURL,
|
||||||
storage_plugin=b"tahoe-lafs-dummy-v1",
|
storage_plugin="tahoe-lafs-dummy-v1",
|
||||||
plugin_config=None,
|
plugin_config=None,
|
||||||
)
|
)
|
||||||
server_id = b"v0-abcdef"
|
server_id = b"v0-abcdef"
|
||||||
@ -295,9 +297,9 @@ class PluginMatchedAnnouncement(SyncTestCase):
|
|||||||
configuration is matched and the plugin's storage client is used.
|
configuration is matched and the plugin's storage client is used.
|
||||||
"""
|
"""
|
||||||
plugin_config = {
|
plugin_config = {
|
||||||
b"abc": b"xyz",
|
"abc": "xyz",
|
||||||
}
|
}
|
||||||
plugin_name = b"tahoe-lafs-dummy-v1"
|
plugin_name = "tahoe-lafs-dummy-v1"
|
||||||
yield self.make_node(
|
yield self.make_node(
|
||||||
introducer_furl=SOME_FURL,
|
introducer_furl=SOME_FURL,
|
||||||
storage_plugin=plugin_name,
|
storage_plugin=plugin_name,
|
||||||
@ -348,7 +350,7 @@ class PluginMatchedAnnouncement(SyncTestCase):
|
|||||||
An announcement that could be matched by a plugin that is enabled with no
|
An announcement that could be matched by a plugin that is enabled with no
|
||||||
configuration is matched and the plugin's storage client is used.
|
configuration is matched and the plugin's storage client is used.
|
||||||
"""
|
"""
|
||||||
plugin_name = b"tahoe-lafs-dummy-v1"
|
plugin_name = "tahoe-lafs-dummy-v1"
|
||||||
yield self.make_node(
|
yield self.make_node(
|
||||||
introducer_furl=SOME_FURL,
|
introducer_furl=SOME_FURL,
|
||||||
storage_plugin=plugin_name,
|
storage_plugin=plugin_name,
|
||||||
@ -425,7 +427,7 @@ class StoragePluginWebPresence(AsyncTestCase):
|
|||||||
self.port_assigner = SameProcessStreamEndpointAssigner()
|
self.port_assigner = SameProcessStreamEndpointAssigner()
|
||||||
self.port_assigner.setUp()
|
self.port_assigner.setUp()
|
||||||
self.addCleanup(self.port_assigner.tearDown)
|
self.addCleanup(self.port_assigner.tearDown)
|
||||||
self.storage_plugin = b"tahoe-lafs-dummy-v1"
|
self.storage_plugin = "tahoe-lafs-dummy-v1"
|
||||||
|
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
_, port_endpoint = self.port_assigner.assign(reactor)
|
_, port_endpoint = self.port_assigner.assign(reactor)
|
||||||
@ -436,15 +438,15 @@ class StoragePluginWebPresence(AsyncTestCase):
|
|||||||
self.basedir.child(u"private").makedirs()
|
self.basedir.child(u"private").makedirs()
|
||||||
self.node_fixture = self.useFixture(UseNode(
|
self.node_fixture = self.useFixture(UseNode(
|
||||||
plugin_config={
|
plugin_config={
|
||||||
b"web": b"1",
|
"web": "1",
|
||||||
},
|
},
|
||||||
node_config={
|
node_config={
|
||||||
b"tub.location": b"127.0.0.1:1",
|
"tub.location": "127.0.0.1:1",
|
||||||
b"web.port": port_endpoint,
|
"web.port": ensure_text(port_endpoint),
|
||||||
},
|
},
|
||||||
storage_plugin=self.storage_plugin,
|
storage_plugin=self.storage_plugin,
|
||||||
basedir=self.basedir,
|
basedir=self.basedir,
|
||||||
introducer_furl=SOME_FURL,
|
introducer_furl=ensure_text(SOME_FURL),
|
||||||
))
|
))
|
||||||
self.node = yield self.node_fixture.create_node()
|
self.node = yield self.node_fixture.create_node()
|
||||||
self.webish = self.node.getServiceNamed(WebishServer.name)
|
self.webish = self.node.getServiceNamed(WebishServer.name)
|
||||||
@ -461,7 +463,7 @@ class StoragePluginWebPresence(AsyncTestCase):
|
|||||||
port=self.port,
|
port=self.port,
|
||||||
plugin_name=self.storage_plugin,
|
plugin_name=self.storage_plugin,
|
||||||
).encode("utf-8")
|
).encode("utf-8")
|
||||||
result = yield do_http(b"get", url)
|
result = yield do_http("get", url)
|
||||||
self.assertThat(result, Equals(dumps({b"web": b"1"})))
|
self.assertThat(result, Equals(dumps({b"web": b"1"})))
|
||||||
|
|
||||||
@inlineCallbacks
|
@inlineCallbacks
|
||||||
@ -476,13 +478,13 @@ class StoragePluginWebPresence(AsyncTestCase):
|
|||||||
port=self.port,
|
port=self.port,
|
||||||
path=(
|
path=(
|
||||||
u"storage-plugins",
|
u"storage-plugins",
|
||||||
self.storage_plugin.decode("utf-8"),
|
self.storage_plugin,
|
||||||
u"counter",
|
u"counter",
|
||||||
),
|
),
|
||||||
).to_text().encode("utf-8")
|
).to_text().encode("utf-8")
|
||||||
values = {
|
values = {
|
||||||
loads((yield do_http(b"get", url)))[u"value"],
|
loads((yield do_http("get", url)))[u"value"],
|
||||||
loads((yield do_http(b"get", url)))[u"value"],
|
loads((yield do_http("get", url)))[u"value"],
|
||||||
}
|
}
|
||||||
self.assertThat(
|
self.assertThat(
|
||||||
values,
|
values,
|
||||||
|
@ -57,7 +57,7 @@ class TahoeLAFSRequest(Request, object):
|
|||||||
|
|
||||||
self.method, self.uri = command, path
|
self.method, self.uri = command, path
|
||||||
self.clientproto = version
|
self.clientproto = version
|
||||||
x = self.uri.split('?', 1)
|
x = self.uri.split(b'?', 1)
|
||||||
|
|
||||||
if len(x) == 1:
|
if len(x) == 1:
|
||||||
self.path = self.uri
|
self.path = self.uri
|
||||||
@ -116,25 +116,25 @@ def _logFormatter(logDateTime, request):
|
|||||||
# match apache formatting. TODO: when we move to DSA dirnodes and
|
# match apache formatting. TODO: when we move to DSA dirnodes and
|
||||||
# shorter caps, consider exposing a few characters of the cap, or
|
# shorter caps, consider exposing a few characters of the cap, or
|
||||||
# maybe a few characters of its hash.
|
# maybe a few characters of its hash.
|
||||||
x = request.uri.split("?", 1)
|
x = request.uri.split(b"?", 1)
|
||||||
if len(x) == 1:
|
if len(x) == 1:
|
||||||
# no query args
|
# no query args
|
||||||
path = request.uri
|
path = request.uri
|
||||||
queryargs = ""
|
queryargs = b""
|
||||||
else:
|
else:
|
||||||
path, queryargs = x
|
path, queryargs = x
|
||||||
# there is a form handler which redirects POST /uri?uri=FOO into
|
# there is a form handler which redirects POST /uri?uri=FOO into
|
||||||
# GET /uri/FOO so folks can paste in non-HTTP-prefixed uris. Make
|
# GET /uri/FOO so folks can paste in non-HTTP-prefixed uris. Make
|
||||||
# sure we censor these too.
|
# sure we censor these too.
|
||||||
if queryargs.startswith("uri="):
|
if queryargs.startswith(b"uri="):
|
||||||
queryargs = "uri=[CENSORED]"
|
queryargs = b"uri=[CENSORED]"
|
||||||
queryargs = "?" + queryargs
|
queryargs = "?" + queryargs
|
||||||
if path.startswith("/uri/"):
|
if path.startswith(b"/uri/"):
|
||||||
path = "/uri/[CENSORED]"
|
path = b"/uri/[CENSORED]"
|
||||||
elif path.startswith("/file/"):
|
elif path.startswith(b"/file/"):
|
||||||
path = "/file/[CENSORED]"
|
path = b"/file/[CENSORED]"
|
||||||
elif path.startswith("/named/"):
|
elif path.startswith(b"/named/"):
|
||||||
path = "/named/[CENSORED]"
|
path = b"/named/[CENSORED]"
|
||||||
|
|
||||||
uri = path + queryargs
|
uri = path + queryargs
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user