Merge pull request #887 from tahoe-lafs/3465.storage_client-python-3-take-2

Port storage_client to Python 3

Fixes ticket:3465
This commit is contained in:
Itamar Turner-Trauring 2020-11-06 11:18:31 -05:00 committed by GitHub
commit eab228f22a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 264 additions and 89 deletions

0
newsfragments/3465.minor Normal file
View File

View File

@ -179,7 +179,7 @@ class IntroducerClient(service.Service, Referenceable):
self._subscriptions.add(service_name)
self._debug_outstanding += 1
d = self._publisher.callRemote("subscribe_v2",
self, service_name,
self, service_name.encode("utf-8"),
self._my_subscriber_info)
d.addBoth(self._debug_retired)
d.addErrback(log.err, facility="tahoe.introducer.client",

View File

@ -2,7 +2,13 @@
"""
I contain the client-side code which speaks to storage servers, in particular
the foolscap-based server implemented in src/allmydata/storage/*.py .
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
# roadmap:
#
@ -28,7 +34,10 @@ the foolscap-based server implemented in src/allmydata/storage/*.py .
#
# 6: implement other sorts of IStorageClient classes: S3, etc
from past.builtins import unicode
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import re, time, hashlib
@ -65,6 +74,8 @@ from allmydata.util.assertutil import precondition
from allmydata.util.observer import ObserverList
from allmydata.util.rrefutil import add_version_to_remote_reference
from allmydata.util.hashutil import permute_server_hash
from allmydata.util.dictutil import BytesKeyDict, UnicodeKeyDict
# who is responsible for de-duplication?
# both?
@ -92,7 +103,7 @@ class StorageClientConfig(object):
decreasing preference. See the *[client]peers.preferred*
documentation for details.
:ivar dict[unicode, dict[bytes, bytes]] storage_plugins: A mapping from
:ivar dict[unicode, dict[unicode, unicode]] storage_plugins: A mapping from
names of ``IFoolscapStoragePlugin`` configured in *tahoe.cfg* to the
respective configuration.
"""
@ -107,24 +118,24 @@ class StorageClientConfig(object):
:param _Config config: The loaded Tahoe-LAFS node configuration.
"""
ps = config.get_config("client", "peers.preferred", b"").split(b",")
preferred_peers = tuple([p.strip() for p in ps if p != b""])
ps = config.get_config("client", "peers.preferred", "").split(",")
preferred_peers = tuple([p.strip() for p in ps if p != ""])
enabled_storage_plugins = (
name.strip()
for name
in config.get_config(
b"client",
b"storage.plugins",
b"",
).decode("utf-8").split(u",")
"client",
"storage.plugins",
"",
).split(u",")
if name.strip()
)
storage_plugins = {}
for plugin_name in enabled_storage_plugins:
try:
plugin_config = config.items(b"storageclient.plugins." + plugin_name)
plugin_config = config.items("storageclient.plugins." + plugin_name)
except NoSectionError:
plugin_config = []
storage_plugins[plugin_name] = dict(plugin_config)
@ -173,7 +184,7 @@ class StorageFarmBroker(service.MultiService):
# storage servers that we've heard about. Each descriptor manages its
# own Reconnector, and will give us a RemoteReference when we ask
# them for it.
self.servers = {}
self.servers = BytesKeyDict()
self._static_server_ids = set() # ignore announcements for these
self.introducer_client = None
self._threshold_listeners = [] # tuples of (threshold, Deferred)
@ -200,6 +211,8 @@ class StorageFarmBroker(service.MultiService):
# information.
pass
else:
if isinstance(server_id, str):
server_id = server_id.encode("utf-8")
self._static_server_ids.add(server_id)
self.servers[server_id] = storage_server
storage_server.setServiceParent(self)
@ -350,8 +363,8 @@ class StorageFarmBroker(service.MultiService):
so if we use more than one introducer it is possible for them to
deliver us stale announcements in some cases.
"""
precondition(isinstance(key_s, str), key_s)
precondition(key_s.startswith("v0-"), key_s)
precondition(isinstance(key_s, bytes), key_s)
precondition(key_s.startswith(b"v0-"), key_s)
precondition(ann["service-name"] == "storage", ann["service-name"])
server_id = key_s
@ -399,7 +412,7 @@ class StorageFarmBroker(service.MultiService):
# connections to only a subset of the servers, which would increase
# the chances that we'll put shares in weird places (and not update
# existing shares of mutable files). See #374 for more details.
for dsc in self.servers.values():
for dsc in list(self.servers.values()):
dsc.try_to_connect()
def get_servers_for_psi(self, peer_selection_index):
@ -439,7 +452,7 @@ class StorageFarmBroker(service.MultiService):
# Upload Results web page). If the Helper is running 1.12 or newer,
# it will send pubkeys, but if it's still running 1.11, it will send
# tubids. This clause maps the old tubids to our existing servers.
for s in self.servers.values():
for s in list(self.servers.values()):
if isinstance(s, NativeStorageServer):
if serverid == s.get_tubid():
return s
@ -552,10 +565,10 @@ class _FoolscapStorage(object):
tubid = base32.a2b(tubid_s)
if "permutation-seed-base32" in ann:
seed = ann["permutation-seed-base32"]
if isinstance(seed, unicode):
if isinstance(seed, str):
seed = seed.encode("utf-8")
ps = base32.a2b(seed)
elif re.search(r'^v0-[0-9a-zA-Z]{52}$', server_id):
elif re.search(br'^v0-[0-9a-zA-Z]{52}$', server_id):
ps = base32.a2b(server_id[3:])
else:
log.msg("unable to parse serverid '%(server_id)s as pubkey, "
@ -647,7 +660,7 @@ def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref):
in getPlugins(IFoolscapStoragePlugin)
}
storage_options = announcement.get(u"storage-options", [])
for plugin_name, plugin_config in config.storage_plugins.items():
for plugin_name, plugin_config in list(config.storage_plugins.items()):
try:
plugin = plugins[plugin_name]
except KeyError:
@ -680,16 +693,16 @@ class NativeStorageServer(service.MultiService):
@ivar remote_host: the IAddress, if connected, otherwise None
"""
VERSION_DEFAULTS = {
b"http://allmydata.org/tahoe/protocols/storage/v1" :
{ b"maximum-immutable-share-size": 2**32 - 1,
b"maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2
b"tolerates-immutable-read-overrun": False,
b"delete-mutable-shares-with-zero-length-writev": False,
b"available-space": None,
},
b"application-version": "unknown: no get_version()",
}
VERSION_DEFAULTS = UnicodeKeyDict({
"http://allmydata.org/tahoe/protocols/storage/v1" :
UnicodeKeyDict({ "maximum-immutable-share-size": 2**32 - 1,
"maximum-mutable-share-size": 2*1000*1000*1000, # maximum prior to v1.9.2
"tolerates-immutable-read-overrun": False,
"delete-mutable-shares-with-zero-length-writev": False,
"available-space": None,
}),
"application-version": "unknown: no get_version()",
})
def __init__(self, server_id, ann, tub_maker, handler_overrides, node_config, config=StorageClientConfig()):
service.MultiService.__init__(self)
@ -754,7 +767,7 @@ class NativeStorageServer(service.MultiService):
# Nope
pass
else:
if isinstance(furl, unicode):
if isinstance(furl, str):
furl = furl.encode("utf-8")
# See comment above for the _storage_from_foolscap_plugin case
# about passing in get_rref.
@ -829,7 +842,7 @@ class NativeStorageServer(service.MultiService):
version = self.get_version()
if version is None:
return None
protocol_v1_version = version.get(b'http://allmydata.org/tahoe/protocols/storage/v1', {})
protocol_v1_version = version.get('http://allmydata.org/tahoe/protocols/storage/v1', UnicodeKeyDict())
available_space = protocol_v1_version.get('available-space')
if available_space is None:
available_space = protocol_v1_version.get('maximum-immutable-share-size', None)

View File

@ -230,16 +230,16 @@ class UseNode(object):
def setUp(self):
def format_config_items(config):
return b"\n".join(
b" = ".join((key, value))
return "\n".join(
" = ".join((key, value))
for (key, value)
in config.items()
)
if self.plugin_config is None:
plugin_config_section = b""
plugin_config_section = ""
else:
plugin_config_section = b"""
plugin_config_section = """
[storageclient.plugins.{storage_plugin}]
{config}
""".format(

View File

@ -1,3 +1,4 @@
from six import ensure_str
__all__ = [
"do_http",
@ -36,6 +37,14 @@ from ..webish import (
TahoeLAFSRequest,
)
class VerboseError(Error):
"""Include the HTTP body response too."""
def __str__(self):
return Error.__str__(self) + " " + ensure_str(self.response)
@inlineCallbacks
def do_http(method, url, **kwargs):
response = yield treq.request(method, url, persistent=False, **kwargs)
@ -43,7 +52,7 @@ def do_http(method, url, **kwargs):
# TODO: replace this with response.fail_for_status when
# https://github.com/twisted/treq/pull/159 has landed
if 400 <= response.code < 600:
raise Error(response.code, response=body)
raise VerboseError(response.code, response=body)
returnValue(body)

View File

@ -6,6 +6,7 @@ Tools aimed at the interaction between tests and Eliot.
# Can't use `builtins.str` because it's not JSON encodable:
# `exceptions.TypeError: <class 'future.types.newstr.newstr'> is not JSON-encodeable`
from past.builtins import unicode as str
from future.utils import PY3
__all__ = [
"RUN_TEST",
@ -164,6 +165,9 @@ class EliotLoggedRunTest(object):
@eliot_logged_test
def run(self, result=None):
# Workaround for https://github.com/itamarst/eliot/issues/456
if PY3:
self.case.eliot_logger._validate_message = lambda *args, **kwargs: None
return self._run_tests_with_factory(
self.case,
self.handlers,

View File

@ -3,11 +3,8 @@ A storage server plugin the test suite can use to validate the
functionality.
"""
from future.utils import native_str
from json import (
dumps,
)
from future.utils import native_str, native_str_to_bytes
from six import ensure_str
import attr
@ -35,6 +32,9 @@ from allmydata.interfaces import (
from allmydata.client import (
AnnounceableStorageServer,
)
from allmydata.util.jsonbytes import (
dumps,
)
class RIDummy(RemoteInterface):
@ -84,8 +84,8 @@ class DummyStorage(object):
"""
items = configuration.items(self._client_section_name, [])
resource = Data(
dumps(dict(items)),
b"text/json",
native_str_to_bytes(dumps(dict(items))),
ensure_str("text/json"),
)
# Give it some dynamic stuff too.
resource.putChild(b"counter", GetCounter())
@ -102,7 +102,7 @@ class GetCounter(Resource, object):
value = 0
def render_GET(self, request):
self.value += 1
return dumps({"value": self.value})
return native_str_to_bytes(dumps({"value": self.value}))
@implementer(RIDummy)

View File

@ -8,9 +8,12 @@ from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
from future.utils import PY2, PY3
if PY2:
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
# dict omitted to match dictutil.py.
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
from unittest import skipIf
from twisted.trial import unittest
@ -88,3 +91,80 @@ class DictUtil(unittest.TestCase):
self.failUnlessEqual(sorted(d.keys()), ["one","two"])
self.failUnlessEqual(d["one"], 1)
self.failUnlessEqual(d.get_aux("one"), None)
class TypedKeyDict(unittest.TestCase):
"""Tests for dictionaries that limit keys."""
@skipIf(PY2, "Python 2 doesn't have issues mixing bytes and unicode.")
def setUp(self):
pass
def test_bytes(self):
"""BytesKeyDict is limited to just byte keys."""
self.assertRaises(TypeError, dictutil.BytesKeyDict, {u"hello": 123})
d = dictutil.BytesKeyDict({b"123": 200})
with self.assertRaises(TypeError):
d[u"hello"] = "blah"
with self.assertRaises(TypeError):
d[u"hello"]
with self.assertRaises(TypeError):
del d[u"hello"]
with self.assertRaises(TypeError):
d.setdefault(u"hello", "123")
with self.assertRaises(TypeError):
d.get(u"xcd")
# Byte keys are fine:
self.assertEqual(d, {b"123": 200})
d[b"456"] = 400
self.assertEqual(d[b"456"], 400)
del d[b"456"]
self.assertEqual(d.get(b"456", 50), 50)
self.assertEqual(d.setdefault(b"456", 300), 300)
self.assertEqual(d[b"456"], 300)
def test_unicode(self):
"""UnicodeKeyDict is limited to just unicode keys."""
self.assertRaises(TypeError, dictutil.UnicodeKeyDict, {b"hello": 123})
d = dictutil.UnicodeKeyDict({u"123": 200})
with self.assertRaises(TypeError):
d[b"hello"] = "blah"
with self.assertRaises(TypeError):
d[b"hello"]
with self.assertRaises(TypeError):
del d[b"hello"]
with self.assertRaises(TypeError):
d.setdefault(b"hello", "123")
with self.assertRaises(TypeError):
d.get(b"xcd")
# Byte keys are fine:
self.assertEqual(d, {u"123": 200})
d[u"456"] = 400
self.assertEqual(d[u"456"], 400)
del d[u"456"]
self.assertEqual(d.get(u"456", 50), 50)
self.assertEqual(d.setdefault(u"456", 300), 300)
self.assertEqual(d[u"456"], 300)
class TypedKeyDictPython2(unittest.TestCase):
"""Tests for dictionaries that limit keys on Python 2."""
@skipIf(PY3, "Testing Python 2 behavior.")
def test_python2(self):
"""
On Python2, BytesKeyDict and UnicodeKeyDict are unnecessary, because
dicts can mix both without problem so you don't get confusing behavior
if you get the type wrong.
Eventually in a Python 3-only world mixing bytes and unicode will be
bad, thus the existence of these classes, but as we port there will be
situations where it's mixed on Python 2, which again is fine.
"""
self.assertIs(dictutil.UnicodeKeyDict, dict)
self.assertIs(dictutil.BytesKeyDict, dict)
# Demonstration of how bytes and unicode can be mixed:
d = {u"abc": 1}
self.assertEqual(d[b"abc"], 1)

View File

@ -1,9 +1,23 @@
import hashlib
from mock import Mock
"""
Ported from Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from six import ensure_text
from json import (
dumps,
loads,
)
import hashlib
from mock import Mock
from fixtures import (
TempDir,
)
@ -107,7 +121,7 @@ class TestNativeStorageServer(unittest.TestCase):
ann = {"anonymous-storage-FURL": "pb://w2hqnbaa25yw4qgcvghl5psa3srpfgw3@tcp:127.0.0.1:51309/vucto2z4fxment3vfxbqecblbf6zyp6x",
"permutation-seed-base32": "w2hqnbaa25yw4qgcvghl5psa3srpfgw3",
}
nss = NativeStorageServer("server_id", ann, None, {}, EMPTY_CLIENT_CONFIG)
nss = NativeStorageServer(b"server_id", ann, None, {}, EMPTY_CLIENT_CONFIG)
self.assertEqual(nss.get_nickname(), "")
@ -123,7 +137,7 @@ class GetConnectionStatus(unittest.TestCase):
"""
# Pretty hard to recognize anything from an empty announcement.
ann = {}
nss = NativeStorageServer("server_id", ann, Tub, {}, EMPTY_CLIENT_CONFIG)
nss = NativeStorageServer(b"server_id", ann, Tub, {}, EMPTY_CLIENT_CONFIG)
nss.start_connecting(lambda: None)
connection_status = nss.get_connection_status()
self.assertTrue(IConnectionStatus.providedBy(connection_status))
@ -271,7 +285,7 @@ class PluginMatchedAnnouncement(SyncTestCase):
"""
yield self.make_node(
introducer_furl=SOME_FURL,
storage_plugin=b"tahoe-lafs-dummy-v1",
storage_plugin="tahoe-lafs-dummy-v1",
plugin_config=None,
)
server_id = b"v0-abcdef"
@ -295,9 +309,9 @@ class PluginMatchedAnnouncement(SyncTestCase):
configuration is matched and the plugin's storage client is used.
"""
plugin_config = {
b"abc": b"xyz",
"abc": "xyz",
}
plugin_name = b"tahoe-lafs-dummy-v1"
plugin_name = "tahoe-lafs-dummy-v1"
yield self.make_node(
introducer_furl=SOME_FURL,
storage_plugin=plugin_name,
@ -348,7 +362,7 @@ class PluginMatchedAnnouncement(SyncTestCase):
An announcement that could be matched by a plugin that is enabled with no
configuration is matched and the plugin's storage client is used.
"""
plugin_name = b"tahoe-lafs-dummy-v1"
plugin_name = "tahoe-lafs-dummy-v1"
yield self.make_node(
introducer_furl=SOME_FURL,
storage_plugin=plugin_name,
@ -403,7 +417,7 @@ class FoolscapStorageServers(unittest.TestCase):
verifyObject(
IFoolscapStorageServer,
_FoolscapStorage.from_announcement(
u"server-id",
b"server-id",
SOME_FURL,
{u"permutation-seed-base32": base32.b2a(b"permutationseed")},
NotStorageServer(),
@ -425,7 +439,7 @@ class StoragePluginWebPresence(AsyncTestCase):
self.port_assigner = SameProcessStreamEndpointAssigner()
self.port_assigner.setUp()
self.addCleanup(self.port_assigner.tearDown)
self.storage_plugin = b"tahoe-lafs-dummy-v1"
self.storage_plugin = u"tahoe-lafs-dummy-v1"
from twisted.internet import reactor
_, port_endpoint = self.port_assigner.assign(reactor)
@ -436,15 +450,15 @@ class StoragePluginWebPresence(AsyncTestCase):
self.basedir.child(u"private").makedirs()
self.node_fixture = self.useFixture(UseNode(
plugin_config={
b"web": b"1",
"web": "1",
},
node_config={
b"tub.location": b"127.0.0.1:1",
b"web.port": port_endpoint,
"tub.location": "127.0.0.1:1",
"web.port": ensure_text(port_endpoint),
},
storage_plugin=self.storage_plugin,
basedir=self.basedir,
introducer_furl=SOME_FURL,
introducer_furl=ensure_text(SOME_FURL),
))
self.node = yield self.node_fixture.create_node()
self.webish = self.node.getServiceNamed(WebishServer.name)
@ -461,8 +475,8 @@ class StoragePluginWebPresence(AsyncTestCase):
port=self.port,
plugin_name=self.storage_plugin,
).encode("utf-8")
result = yield do_http(b"get", url)
self.assertThat(result, Equals(dumps({b"web": b"1"})))
result = yield do_http("get", url)
self.assertThat(loads(result), Equals({"web": "1"}))
@inlineCallbacks
def test_plugin_resource_persistent_across_requests(self):
@ -476,13 +490,13 @@ class StoragePluginWebPresence(AsyncTestCase):
port=self.port,
path=(
u"storage-plugins",
self.storage_plugin.decode("utf-8"),
self.storage_plugin,
u"counter",
),
).to_text().encode("utf-8")
values = {
loads((yield do_http(b"get", url)))[u"value"],
loads((yield do_http(b"get", url)))[u"value"],
loads((yield do_http("get", url)))[u"value"],
loads((yield do_http("get", url)))[u"value"],
}
self.assertThat(
values,
@ -504,16 +518,16 @@ class TestStorageFarmBroker(unittest.TestCase):
def test_static_servers(self):
broker = make_broker()
key_s = 'v0-1234-1'
servers_yaml = b"""\
key_s = b'v0-1234-1'
servers_yaml = """\
storage:
v0-1234-1:
ann:
anonymous-storage-FURL: {furl}
permutation-seed-base32: aaaaaaaaaaaaaaaaaaaaaaaa
""".format(furl=SOME_FURL)
""".format(furl=SOME_FURL.decode("utf-8"))
servers = yamlutil.safe_load(servers_yaml)
permseed = base32.a2b("aaaaaaaaaaaaaaaaaaaaaaaa")
permseed = base32.a2b(b"aaaaaaaaaaaaaaaaaaaaaaaa")
broker.set_static_servers(servers["storage"])
self.failUnlessEqual(len(broker._static_server_ids), 1)
s = broker.servers[key_s]
@ -527,7 +541,7 @@ storage:
ann2 = {
"service-name": "storage",
"anonymous-storage-FURL": "pb://{}@nowhere/fake2".format(base32.b2a(str(1))),
"anonymous-storage-FURL": "pb://{}@nowhere/fake2".format(base32.b2a(b"1")),
"permutation-seed-base32": "bbbbbbbbbbbbbbbbbbbbbbbb",
}
broker._got_announcement(key_s, ann2)
@ -537,8 +551,8 @@ storage:
def test_static_permutation_seed_pubkey(self):
broker = make_broker()
server_id = "v0-4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia"
k = "4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia"
server_id = b"v0-4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia"
k = b"4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia"
ann = {
"anonymous-storage-FURL": SOME_FURL,
}
@ -548,8 +562,8 @@ storage:
def test_static_permutation_seed_explicit(self):
broker = make_broker()
server_id = "v0-4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia"
k = "w5gl5igiexhwmftwzhai5jy2jixn7yx7"
server_id = b"v0-4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia"
k = b"w5gl5igiexhwmftwzhai5jy2jixn7yx7"
ann = {
"anonymous-storage-FURL": SOME_FURL,
"permutation-seed-base32": k,
@ -560,7 +574,7 @@ storage:
def test_static_permutation_seed_hashed(self):
broker = make_broker()
server_id = "unparseable"
server_id = b"unparseable"
ann = {
"anonymous-storage-FURL": SOME_FURL,
}
@ -591,10 +605,10 @@ storage:
}
def add_one_server(x):
data["anonymous-storage-FURL"] = "pb://{}@nowhere/fake".format(base32.b2a(str(x)))
data["anonymous-storage-FURL"] = b"pb://%s@nowhere/fake" % (base32.b2a(b"%d" % x),)
tub = Mock()
new_tubs.append(tub)
got_announcement('v0-1234-{}'.format(x), data)
got_announcement(b'v0-1234-%d' % x, data)
self.assertEqual(tub.mock_calls[-1][0], 'connectTo')
got_connection = tub.mock_calls[-1][1][1]
rref = Mock()

View File

@ -54,6 +54,7 @@ PORTED_MODULES = [
"allmydata.introducer.interfaces",
"allmydata.monitor",
"allmydata.node",
"allmydata.storage_client",
"allmydata.storage.common",
"allmydata.storage.crawler",
"allmydata.storage.expirer",
@ -131,6 +132,7 @@ PORTED_TEST_MODULES = [
"allmydata.test.test_spans",
"allmydata.test.test_statistics",
"allmydata.test.test_storage",
"allmydata.test.test_storage_client",
"allmydata.test.test_storage_web",
"allmydata.test.test_time_format",
"allmydata.test.test_upload",

View File

@ -14,6 +14,7 @@ if PY2:
# subclassing dict, so we'd end up exposing Python 3 dict APIs to lots of
# code that doesn't support it.
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
from six import ensure_str
class DictOfSets(dict):
@ -76,3 +77,54 @@ class AuxValueDict(dict):
have an auxvalue."""
super(AuxValueDict, self).__setitem__(key, value)
self.auxilliary[key] = auxilliary
class _TypedKeyDict(dict):
"""Dictionary that enforces key type.
Doesn't override everything, but probably good enough to catch most
problems.
Subclass and override KEY_TYPE.
"""
KEY_TYPE = object
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
for key in self:
if not isinstance(key, self.KEY_TYPE):
raise TypeError("{} must be of type {}".format(
repr(key), self.KEY_TYPE))
def _make_enforcing_override(K, method_name):
def f(self, key, *args, **kwargs):
if not isinstance(key, self.KEY_TYPE):
raise TypeError("{} must be of type {}".format(
repr(key), self.KEY_TYPE))
return getattr(dict, method_name)(self, key, *args, **kwargs)
f.__name__ = ensure_str(method_name)
setattr(K, method_name, f)
for _method_name in ["__setitem__", "__getitem__", "setdefault", "get",
"__delitem__"]:
_make_enforcing_override(_TypedKeyDict, _method_name)
del _method_name
if PY2:
# No need for enforcement, can use either bytes or unicode as keys and it's
# fine.
BytesKeyDict = UnicodeKeyDict = dict
else:
class BytesKeyDict(_TypedKeyDict):
"""Keys should be bytes."""
KEY_TYPE = bytes
class UnicodeKeyDict(_TypedKeyDict):
"""Keys should be unicode strings."""
KEY_TYPE = str

View File

@ -29,7 +29,8 @@ class StoragePlugins(Resource, object):
"""
resources = self._client.get_client_storage_plugin_web_resources()
try:
result = resources[segment]
# Technically client could be using some other encoding?
result = resources[segment.decode("utf-8")]
except KeyError:
result = NoResource()
self.putChild(segment, result)

View File

@ -57,7 +57,7 @@ class TahoeLAFSRequest(Request, object):
self.method, self.uri = command, path
self.clientproto = version
x = self.uri.split('?', 1)
x = self.uri.split(b'?', 1)
if len(x) == 1:
self.path = self.uri
@ -116,25 +116,25 @@ def _logFormatter(logDateTime, request):
# match apache formatting. TODO: when we move to DSA dirnodes and
# shorter caps, consider exposing a few characters of the cap, or
# maybe a few characters of its hash.
x = request.uri.split("?", 1)
x = request.uri.split(b"?", 1)
if len(x) == 1:
# no query args
path = request.uri
queryargs = ""
queryargs = b""
else:
path, queryargs = x
# there is a form handler which redirects POST /uri?uri=FOO into
# GET /uri/FOO so folks can paste in non-HTTP-prefixed uris. Make
# sure we censor these too.
if queryargs.startswith("uri="):
queryargs = "uri=[CENSORED]"
if queryargs.startswith(b"uri="):
queryargs = b"uri=[CENSORED]"
queryargs = "?" + queryargs
if path.startswith("/uri/"):
path = "/uri/[CENSORED]"
elif path.startswith("/file/"):
path = "/file/[CENSORED]"
elif path.startswith("/named/"):
path = "/named/[CENSORED]"
if path.startswith(b"/uri/"):
path = b"/uri/[CENSORED]"
elif path.startswith(b"/file/"):
path = b"/file/[CENSORED]"
elif path.startswith(b"/named/"):
path = b"/named/[CENSORED]"
uri = path + queryargs