mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-12 07:52:38 +00:00
Merge branch '3611.web-python-3-part-2' into 3615.web-python-3-part-3
This commit is contained in:
commit
debda0d21a
0
newsfragments/3611.minor
Normal file
0
newsfragments/3611.minor
Normal file
@ -63,7 +63,7 @@ class Blacklist(object):
|
|||||||
reason = self.entries.get(si, None)
|
reason = self.entries.get(si, None)
|
||||||
if reason is not None:
|
if reason is not None:
|
||||||
# log this to logs/twistd.log, since web logs go there too
|
# log this to logs/twistd.log, since web logs go there too
|
||||||
twisted_log.msg("blacklist prohibited access to SI %s: %s" %
|
twisted_log.msg("blacklist prohibited access to SI %r: %r" %
|
||||||
(base32.b2a(si), reason))
|
(base32.b2a(si), reason))
|
||||||
return reason
|
return reason
|
||||||
|
|
||||||
|
@ -328,7 +328,7 @@ class DirectoryNode(object):
|
|||||||
return "<%s %s-%s %s>" % (self.__class__.__name__,
|
return "<%s %s-%s %s>" % (self.__class__.__name__,
|
||||||
self.is_readonly() and "RO" or "RW",
|
self.is_readonly() and "RO" or "RW",
|
||||||
self.is_mutable() and "MUT" or "IMM",
|
self.is_mutable() and "MUT" or "IMM",
|
||||||
hasattr(self, '_uri') and self._uri.abbrev())
|
hasattr(self, '_uri') and str(self._uri.abbrev(), "utf-8"))
|
||||||
|
|
||||||
def get_size(self):
|
def get_size(self):
|
||||||
"""Return the size of our backing mutable file, in bytes, if we've
|
"""Return the size of our backing mutable file, in bytes, if we've
|
||||||
|
@ -430,8 +430,8 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
|||||||
for i,h in new_hashes.items():
|
for i,h in new_hashes.items():
|
||||||
if self[i]:
|
if self[i]:
|
||||||
if self[i] != h:
|
if self[i] != h:
|
||||||
raise BadHashError("new hash %s does not match "
|
raise BadHashError("new hash %r does not match "
|
||||||
"existing hash %s at %s"
|
"existing hash %r at %r"
|
||||||
% (base32.b2a(h),
|
% (base32.b2a(h),
|
||||||
base32.b2a(self[i]),
|
base32.b2a(self[i]),
|
||||||
self._name_hash(i)))
|
self._name_hash(i)))
|
||||||
|
@ -477,7 +477,7 @@ class Checker(log.PrefixingLogMixin):
|
|||||||
monitor):
|
monitor):
|
||||||
assert precondition(isinstance(verifycap, CHKFileVerifierURI), verifycap, type(verifycap))
|
assert precondition(isinstance(verifycap, CHKFileVerifierURI), verifycap, type(verifycap))
|
||||||
|
|
||||||
prefix = "%s" % base32.b2a(verifycap.get_storage_index()[:8])[:12]
|
prefix = str(base32.b2a(verifycap.get_storage_index()[:8])[:12], "utf-8")
|
||||||
log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.checker", prefix=prefix)
|
log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.checker", prefix=prefix)
|
||||||
|
|
||||||
self._verifycap = verifycap
|
self._verifycap = verifycap
|
||||||
|
@ -63,7 +63,7 @@ class SegmentFetcher(object):
|
|||||||
self._running = True
|
self._running = True
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
log.msg("SegmentFetcher(%s).stop" % self._node._si_prefix,
|
log.msg("SegmentFetcher(%r).stop" % self._node._si_prefix,
|
||||||
level=log.NOISY, parent=self._lp, umid="LWyqpg")
|
level=log.NOISY, parent=self._lp, umid="LWyqpg")
|
||||||
self._cancel_all_requests()
|
self._cancel_all_requests()
|
||||||
self._running = False
|
self._running = False
|
||||||
@ -241,7 +241,7 @@ class SegmentFetcher(object):
|
|||||||
# called by Shares, in response to our s.send_request() calls.
|
# called by Shares, in response to our s.send_request() calls.
|
||||||
if not self._running:
|
if not self._running:
|
||||||
return
|
return
|
||||||
log.msg("SegmentFetcher(%s)._block_request_activity: %s -> %s" %
|
log.msg("SegmentFetcher(%r)._block_request_activity: %s -> %r" %
|
||||||
(self._node._si_prefix, repr(share), state),
|
(self._node._si_prefix, repr(share), state),
|
||||||
level=log.NOISY, parent=self._lp, umid="vilNWA")
|
level=log.NOISY, parent=self._lp, umid="vilNWA")
|
||||||
# COMPLETE, CORRUPT, DEAD, BADSEGNUM are terminal. Remove the share
|
# COMPLETE, CORRUPT, DEAD, BADSEGNUM are terminal. Remove the share
|
||||||
|
@ -125,7 +125,7 @@ class DownloadNode(object):
|
|||||||
self.ciphertext_hash_tree_leaves = self.guessed_num_segments
|
self.ciphertext_hash_tree_leaves = self.guessed_num_segments
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "ImmutableDownloadNode(%s)" % (self._si_prefix,)
|
return "ImmutableDownloadNode(%r)" % (self._si_prefix,)
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
# called by the Terminator at shutdown, mostly for tests
|
# called by the Terminator at shutdown, mostly for tests
|
||||||
|
@ -106,7 +106,7 @@ class Encoder(object):
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
if hasattr(self, "_storage_index"):
|
if hasattr(self, "_storage_index"):
|
||||||
return "<Encoder for %s>" % si_b2a(self._storage_index)[:5]
|
return "<Encoder for %r>" % si_b2a(self._storage_index)[:5]
|
||||||
return "<Encoder for unknown storage index>"
|
return "<Encoder for unknown storage index>"
|
||||||
|
|
||||||
def log(self, *args, **kwargs):
|
def log(self, *args, **kwargs):
|
||||||
|
@ -437,7 +437,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
|||||||
self._reactor = reactor
|
self._reactor = reactor
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<Tahoe2ServerSelector for upload %s>" % self.upload_id
|
return "<Tahoe2ServerSelector for upload %r>" % self.upload_id
|
||||||
|
|
||||||
def _create_trackers(self, candidate_servers, allocated_size,
|
def _create_trackers(self, candidate_servers, allocated_size,
|
||||||
file_renewal_secret, file_cancel_secret, create_server_tracker):
|
file_renewal_secret, file_cancel_secret, create_server_tracker):
|
||||||
@ -1314,7 +1314,7 @@ class CHKUploader(object):
|
|||||||
storage_index = encoder.get_param("storage_index")
|
storage_index = encoder.get_param("storage_index")
|
||||||
self._storage_index = storage_index
|
self._storage_index = storage_index
|
||||||
upload_id = si_b2a(storage_index)[:5]
|
upload_id = si_b2a(storage_index)[:5]
|
||||||
self.log("using storage index %s" % upload_id)
|
self.log("using storage index %r" % upload_id)
|
||||||
server_selector = Tahoe2ServerSelector(
|
server_selector = Tahoe2ServerSelector(
|
||||||
upload_id,
|
upload_id,
|
||||||
self._log_number,
|
self._log_number,
|
||||||
|
@ -127,7 +127,7 @@ class Publish(object):
|
|||||||
self._servermap = servermap
|
self._servermap = servermap
|
||||||
self._storage_index = self._node.get_storage_index()
|
self._storage_index = self._node.get_storage_index()
|
||||||
self._log_prefix = prefix = si_b2a(self._storage_index)[:5]
|
self._log_prefix = prefix = si_b2a(self._storage_index)[:5]
|
||||||
num = self.log("Publish(%s): starting" % prefix, parent=None)
|
num = self.log("Publish(%r): starting" % prefix, parent=None)
|
||||||
self._log_number = num
|
self._log_number = num
|
||||||
self._running = True
|
self._running = True
|
||||||
self._first_write_error = None
|
self._first_write_error = None
|
||||||
|
@ -122,7 +122,7 @@ class Retrieve(object):
|
|||||||
_assert(self._node.get_readkey())
|
_assert(self._node.get_readkey())
|
||||||
self._last_failure = None
|
self._last_failure = None
|
||||||
prefix = si_b2a(self._storage_index)[:5]
|
prefix = si_b2a(self._storage_index)[:5]
|
||||||
self._log_number = log.msg("Retrieve(%s): starting" % prefix)
|
self._log_number = log.msg("Retrieve(%r): starting" % prefix)
|
||||||
self._running = True
|
self._running = True
|
||||||
self._decoding = False
|
self._decoding = False
|
||||||
self._bad_shares = set()
|
self._bad_shares = set()
|
||||||
|
@ -275,7 +275,7 @@ class ServerMap(object):
|
|||||||
"""Take a versionid, return a string that describes it."""
|
"""Take a versionid, return a string that describes it."""
|
||||||
(seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
|
(seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
|
||||||
offsets_tuple) = verinfo
|
offsets_tuple) = verinfo
|
||||||
return "seq%d-%s" % (seqnum, base32.b2a(root_hash)[:4])
|
return "seq%d-%s" % (seqnum, unicode(base32.b2a(root_hash)[:4], "utf-8"))
|
||||||
|
|
||||||
def summarize_versions(self):
|
def summarize_versions(self):
|
||||||
"""Return a string describing which versions we know about."""
|
"""Return a string describing which versions we know about."""
|
||||||
@ -868,7 +868,7 @@ class ServermapUpdater(object):
|
|||||||
# ok, it's a valid verinfo. Add it to the list of validated
|
# ok, it's a valid verinfo. Add it to the list of validated
|
||||||
# versions.
|
# versions.
|
||||||
self.log(" found valid version %d-%s from %s-sh%d: %d-%d/%d/%d"
|
self.log(" found valid version %d-%s from %s-sh%d: %d-%d/%d/%d"
|
||||||
% (seqnum, base32.b2a(root_hash)[:4],
|
% (seqnum, unicode(base32.b2a(root_hash)[:4], "utf-8"),
|
||||||
server.get_name(), shnum,
|
server.get_name(), shnum,
|
||||||
k, n, segsize, datalen),
|
k, n, segsize, datalen),
|
||||||
parent=lp)
|
parent=lp)
|
||||||
|
@ -915,7 +915,7 @@ def create_main_tub(config, tub_options,
|
|||||||
tubport,
|
tubport,
|
||||||
location,
|
location,
|
||||||
)
|
)
|
||||||
log.msg("Tub location set to %s" % (location,))
|
log.msg("Tub location set to %r" % (location,))
|
||||||
return tub
|
return tub
|
||||||
|
|
||||||
|
|
||||||
|
@ -271,7 +271,7 @@ class StorageServer(service.MultiService, Referenceable):
|
|||||||
si_dir = storage_index_to_dir(storage_index)
|
si_dir = storage_index_to_dir(storage_index)
|
||||||
si_s = si_b2a(storage_index)
|
si_s = si_b2a(storage_index)
|
||||||
|
|
||||||
log.msg("storage: allocate_buckets %s" % si_s)
|
log.msg("storage: allocate_buckets %r" % si_s)
|
||||||
|
|
||||||
# in this implementation, the lease information (including secrets)
|
# in this implementation, the lease information (including secrets)
|
||||||
# goes into the share files themselves. It could also be put into a
|
# goes into the share files themselves. It could also be put into a
|
||||||
@ -397,7 +397,7 @@ class StorageServer(service.MultiService, Referenceable):
|
|||||||
start = time.time()
|
start = time.time()
|
||||||
self.count("get")
|
self.count("get")
|
||||||
si_s = si_b2a(storage_index)
|
si_s = si_b2a(storage_index)
|
||||||
log.msg("storage: get_buckets %s" % si_s)
|
log.msg("storage: get_buckets %r" % si_s)
|
||||||
bucketreaders = {} # k: sharenum, v: BucketReader
|
bucketreaders = {} # k: sharenum, v: BucketReader
|
||||||
for shnum, filename in self._get_bucket_shares(storage_index):
|
for shnum, filename in self._get_bucket_shares(storage_index):
|
||||||
bucketreaders[shnum] = BucketReader(self, filename,
|
bucketreaders[shnum] = BucketReader(self, filename,
|
||||||
@ -602,7 +602,7 @@ class StorageServer(service.MultiService, Referenceable):
|
|||||||
start = time.time()
|
start = time.time()
|
||||||
self.count("writev")
|
self.count("writev")
|
||||||
si_s = si_b2a(storage_index)
|
si_s = si_b2a(storage_index)
|
||||||
log.msg("storage: slot_writev %s" % si_s)
|
log.msg("storage: slot_writev %r" % si_s)
|
||||||
si_dir = storage_index_to_dir(storage_index)
|
si_dir = storage_index_to_dir(storage_index)
|
||||||
(write_enabler, renew_secret, cancel_secret) = secrets
|
(write_enabler, renew_secret, cancel_secret) = secrets
|
||||||
bucketdir = os.path.join(self.sharedir, si_dir)
|
bucketdir = os.path.join(self.sharedir, si_dir)
|
||||||
@ -669,7 +669,7 @@ class StorageServer(service.MultiService, Referenceable):
|
|||||||
start = time.time()
|
start = time.time()
|
||||||
self.count("readv")
|
self.count("readv")
|
||||||
si_s = si_b2a(storage_index)
|
si_s = si_b2a(storage_index)
|
||||||
lp = log.msg("storage: slot_readv %s %s" % (si_s, shares),
|
lp = log.msg("storage: slot_readv %r %r" % (si_s, shares),
|
||||||
facility="tahoe.storage", level=log.OPERATIONAL)
|
facility="tahoe.storage", level=log.OPERATIONAL)
|
||||||
si_dir = storage_index_to_dir(storage_index)
|
si_dir = storage_index_to_dir(storage_index)
|
||||||
# shares exist if there is a file for them
|
# shares exist if there is a file for them
|
||||||
@ -703,7 +703,7 @@ class StorageServer(service.MultiService, Referenceable):
|
|||||||
si_s = si_b2a(storage_index)
|
si_s = si_b2a(storage_index)
|
||||||
# windows can't handle colons in the filename
|
# windows can't handle colons in the filename
|
||||||
fn = os.path.join(self.corruption_advisory_dir,
|
fn = os.path.join(self.corruption_advisory_dir,
|
||||||
"%s--%s-%d" % (now, si_s, shnum)).replace(":","")
|
"%s--%s-%d" % (now, str(si_s, "utf-8"), shnum)).replace(":","")
|
||||||
with open(fn, "w") as f:
|
with open(fn, "w") as f:
|
||||||
f.write("report: Share Corruption\n")
|
f.write("report: Share Corruption\n")
|
||||||
f.write("type: %s\n" % bytes_to_native_str(share_type))
|
f.write("type: %s\n" % bytes_to_native_str(share_type))
|
||||||
|
@ -863,7 +863,7 @@ class WebErrorMixin(object):
|
|||||||
response_body = f.value.response
|
response_body = f.value.response
|
||||||
if response_substring:
|
if response_substring:
|
||||||
self.failUnless(response_substring in response_body,
|
self.failUnless(response_substring in response_body,
|
||||||
"%s: response substring '%s' not in '%s'"
|
"%r: response substring %r not in %r"
|
||||||
% (which, response_substring, response_body))
|
% (which, response_substring, response_body))
|
||||||
return response_body
|
return response_body
|
||||||
d = defer.maybeDeferred(callable, *args, **kwargs)
|
d = defer.maybeDeferred(callable, *args, **kwargs)
|
||||||
|
@ -56,7 +56,7 @@ def do_http(method, url, **kwargs):
|
|||||||
# https://github.com/twisted/treq/pull/159 has landed
|
# https://github.com/twisted/treq/pull/159 has landed
|
||||||
if 400 <= response.code < 600:
|
if 400 <= response.code < 600:
|
||||||
raise VerboseError(
|
raise VerboseError(
|
||||||
response.code, response="For request {} to {}, got: {}".format(
|
response.code, response="For request {!r} to {!r}, got: {!r}".format(
|
||||||
method, url, body))
|
method, url, body))
|
||||||
returnValue(body)
|
returnValue(body)
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
|||||||
from future.utils import native_str, PY2, bytes_to_native_str
|
from future.utils import native_str, PY2, bytes_to_native_str
|
||||||
if PY2:
|
if PY2:
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
from six import ensure_str
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import os.path
|
import os.path
|
||||||
@ -794,7 +794,7 @@ class Server(unittest.TestCase):
|
|||||||
reports = os.listdir(reportdir)
|
reports = os.listdir(reportdir)
|
||||||
self.failUnlessEqual(len(reports), 1)
|
self.failUnlessEqual(len(reports), 1)
|
||||||
report_si0 = reports[0]
|
report_si0 = reports[0]
|
||||||
self.failUnlessIn(native_str(si0_s), report_si0)
|
self.failUnlessIn(ensure_str(si0_s), report_si0)
|
||||||
f = open(os.path.join(reportdir, report_si0), "rb")
|
f = open(os.path.join(reportdir, report_si0), "rb")
|
||||||
report = f.read()
|
report = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
|
@ -628,7 +628,7 @@ class WebMixin(TimezoneMixin):
|
|||||||
if response_substring:
|
if response_substring:
|
||||||
self.failUnlessIn(response_substring, res.value.response, which)
|
self.failUnlessIn(response_substring, res.value.response, which)
|
||||||
else:
|
else:
|
||||||
self.fail("%s was supposed to raise %s, not get '%s'" %
|
self.fail("%r was supposed to raise %s, not get %r" %
|
||||||
(which, expected_failure, res))
|
(which, expected_failure, res))
|
||||||
|
|
||||||
def shouldFail2(self, expected_failure, which, substring,
|
def shouldFail2(self, expected_failure, which, substring,
|
||||||
@ -642,7 +642,7 @@ class WebMixin(TimezoneMixin):
|
|||||||
res.trap(expected_failure)
|
res.trap(expected_failure)
|
||||||
if substring:
|
if substring:
|
||||||
self.failUnlessIn(substring, str(res),
|
self.failUnlessIn(substring, str(res),
|
||||||
"'%s' not in '%s' (response is '%s') for test '%s'" % \
|
"%r not in %r (response is %r) for test %r" % \
|
||||||
(substring, str(res),
|
(substring, str(res),
|
||||||
getattr(res.value, "response", ""),
|
getattr(res.value, "response", ""),
|
||||||
which))
|
which))
|
||||||
@ -651,11 +651,11 @@ class WebMixin(TimezoneMixin):
|
|||||||
if isinstance(response, bytes):
|
if isinstance(response, bytes):
|
||||||
response = str(response, "utf-8")
|
response = str(response, "utf-8")
|
||||||
self.failUnlessIn(response_substring, response,
|
self.failUnlessIn(response_substring, response,
|
||||||
"'%s' not in '%s' for test '%s'" % \
|
"%r not in %r for test %r" % \
|
||||||
(response_substring, res.value.response,
|
(response_substring, res.value.response,
|
||||||
which))
|
which))
|
||||||
else:
|
else:
|
||||||
self.fail("%s was supposed to raise %s, not get '%s'" %
|
self.fail("%r was supposed to raise %s, not get %r" %
|
||||||
(which, expected_failure, res))
|
(which, expected_failure, res))
|
||||||
d.addBoth(done)
|
d.addBoth(done)
|
||||||
return d
|
return d
|
||||||
|
@ -243,7 +243,7 @@ class WriteableSSKFileURI(_BaseURI):
|
|||||||
def init_from_string(cls, uri):
|
def init_from_string(cls, uri):
|
||||||
mo = cls.STRING_RE.search(uri)
|
mo = cls.STRING_RE.search(uri)
|
||||||
if not mo:
|
if not mo:
|
||||||
raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
|
raise BadURIError("%r doesn't look like a %s cap" % (uri, cls))
|
||||||
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||||
|
|
||||||
def to_string(self):
|
def to_string(self):
|
||||||
@ -253,7 +253,7 @@ class WriteableSSKFileURI(_BaseURI):
|
|||||||
base32.b2a(self.fingerprint))
|
base32.b2a(self.fingerprint))
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<%s %s>" % (self.__class__.__name__, self.abbrev())
|
return "<%s %r>" % (self.__class__.__name__, self.abbrev())
|
||||||
|
|
||||||
def abbrev(self):
|
def abbrev(self):
|
||||||
return base32.b2a(self.writekey[:5])
|
return base32.b2a(self.writekey[:5])
|
||||||
|
@ -25,6 +25,7 @@ else:
|
|||||||
def backwardscompat_bytes(b):
|
def backwardscompat_bytes(b):
|
||||||
return b
|
return b
|
||||||
maketrans = bytes.maketrans
|
maketrans = bytes.maketrans
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
@ -71,7 +72,7 @@ BASE32STR_3bytes = backwardscompat_bytes(BASE32CHAR+b'{4}'+BASE32CHAR_4bits)
|
|||||||
BASE32STR_4bytes = backwardscompat_bytes(BASE32CHAR+b'{6}'+BASE32CHAR_2bits)
|
BASE32STR_4bytes = backwardscompat_bytes(BASE32CHAR+b'{6}'+BASE32CHAR_2bits)
|
||||||
BASE32STR_anybytes = backwardscompat_bytes(bytes(b'((?:%s{8})*') % (BASE32CHAR,) + bytes(b"(?:|%s|%s|%s|%s))") % (BASE32STR_1byte, BASE32STR_2bytes, BASE32STR_3bytes, BASE32STR_4bytes))
|
BASE32STR_anybytes = backwardscompat_bytes(bytes(b'((?:%s{8})*') % (BASE32CHAR,) + bytes(b"(?:|%s|%s|%s|%s))") % (BASE32STR_1byte, BASE32STR_2bytes, BASE32STR_3bytes, BASE32STR_4bytes))
|
||||||
|
|
||||||
def b2a(os):
|
def b2a(os): # type: (bytes) -> bytes
|
||||||
"""
|
"""
|
||||||
@param os the data to be encoded (as bytes)
|
@param os the data to be encoded (as bytes)
|
||||||
|
|
||||||
@ -79,9 +80,10 @@ def b2a(os):
|
|||||||
"""
|
"""
|
||||||
return base64.b32encode(os).rstrip(b"=").lower()
|
return base64.b32encode(os).rstrip(b"=").lower()
|
||||||
|
|
||||||
def b2a_or_none(os):
|
def b2a_or_none(os): # type: (Optional[bytes]) -> Optional[bytes]
|
||||||
if os is not None:
|
if os is not None:
|
||||||
return b2a(os)
|
return b2a(os)
|
||||||
|
return None
|
||||||
|
|
||||||
# b2a() uses the minimal number of quintets sufficient to encode the binary
|
# b2a() uses the minimal number of quintets sufficient to encode the binary
|
||||||
# input. It just so happens that the relation is like this (everything is
|
# input. It just so happens that the relation is like this (everything is
|
||||||
@ -129,7 +131,7 @@ def could_be_base32_encoded(s, s8=s8, tr=bytes.translate, identitytranstable=ide
|
|||||||
s = bytes(s) # On Python 2, make sure we're using modern bytes
|
s = bytes(s) # On Python 2, make sure we're using modern bytes
|
||||||
return s8[len(s)%8][s[-1]] and not tr(s, identitytranstable, chars)
|
return s8[len(s)%8][s[-1]] and not tr(s, identitytranstable, chars)
|
||||||
|
|
||||||
def a2b(cs):
|
def a2b(cs): # type: (bytes) -> bytes
|
||||||
"""
|
"""
|
||||||
@param cs the base-32 encoded data (as bytes)
|
@param cs the base-32 encoded data (as bytes)
|
||||||
"""
|
"""
|
||||||
|
@ -1,27 +0,0 @@
|
|||||||
"""
|
|
||||||
Implement a work-around for <https://github.com/crossbario/autobahn-python/issues/1151>.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
from __future__ import (
|
|
||||||
print_function,
|
|
||||||
unicode_literals,
|
|
||||||
absolute_import,
|
|
||||||
division,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
from autobahn.websocket.protocol import WebSocketProtocol
|
|
||||||
_originalConnectionLost = WebSocketProtocol._connectionLost
|
|
||||||
|
|
||||||
def _connectionLost(self, reason):
|
|
||||||
if self.openHandshakeTimeoutCall is not None:
|
|
||||||
self.openHandshakeTimeoutCall.cancel()
|
|
||||||
self.openHandshakeTimeoutCall = None
|
|
||||||
return _originalConnectionLost(self, reason)
|
|
||||||
|
|
||||||
def patch():
|
|
||||||
"""
|
|
||||||
Monkey-patch the proposed fix into place.
|
|
||||||
"""
|
|
||||||
WebSocketProtocol._connectionLost = _connectionLost
|
|
@ -156,6 +156,9 @@ class ResultsBase(object):
|
|||||||
shares_on_server.add(s, shareid)
|
shares_on_server.add(s, shareid)
|
||||||
shareid_s = ""
|
shareid_s = ""
|
||||||
if i == 0:
|
if i == 0:
|
||||||
|
if isinstance(shareid, bytes):
|
||||||
|
shareid_s = str(shareid, "utf-8")
|
||||||
|
else:
|
||||||
shareid_s = str(shareid)
|
shareid_s = str(shareid)
|
||||||
d = tags.tr(tags.td(shareid_s),
|
d = tags.tr(tags.td(shareid_s),
|
||||||
tags.td(tags.div(s.get_nickname(), class_="nickname"),
|
tags.td(tags.div(s.get_nickname(), class_="nickname"),
|
||||||
@ -207,12 +210,12 @@ class ResultsBase(object):
|
|||||||
return [html.escape(w) for w in s]
|
return [html.escape(w) for w in s]
|
||||||
|
|
||||||
def _render_si_link(self, req, storage_index):
|
def _render_si_link(self, req, storage_index):
|
||||||
si_s = base32.b2a(storage_index)
|
si_s = str(base32.b2a(storage_index), "utf-8")
|
||||||
ophandle = req.prepath[-1]
|
ophandle = str(req.prepath[-1], "utf-8")
|
||||||
target = "%s/operations/%s/%s" % (get_root(req), ophandle, si_s)
|
target = "%s/operations/%s/%s" % (get_root(req), ophandle, si_s)
|
||||||
output = get_arg(req, "output")
|
output = get_arg(req, "output")
|
||||||
if output:
|
if output:
|
||||||
target = target + "?output=%s" % output
|
target = target + "?output=" + str(output, "utf-8")
|
||||||
return tags.a(si_s, href=target)
|
return tags.a(si_s, href=target)
|
||||||
|
|
||||||
|
|
||||||
|
@ -148,7 +148,7 @@ def get_format(req, default="CHK"):
|
|||||||
elif arg.upper() == b"MDMF":
|
elif arg.upper() == b"MDMF":
|
||||||
return "MDMF"
|
return "MDMF"
|
||||||
else:
|
else:
|
||||||
raise WebError("Unknown format: %s, I know CHK, SDMF, MDMF" % ensure_str(arg),
|
raise WebError("Unknown format: %s, I know CHK, SDMF, MDMF" % str(arg, "ascii"),
|
||||||
http.BAD_REQUEST)
|
http.BAD_REQUEST)
|
||||||
|
|
||||||
def get_mutable_type(file_format): # accepts result of get_format()
|
def get_mutable_type(file_format): # accepts result of get_format()
|
||||||
|
@ -705,7 +705,7 @@ class DirectoryAsHTML(Element):
|
|||||||
|
|
||||||
@renderer
|
@renderer
|
||||||
def title(self, req, tag):
|
def title(self, req, tag):
|
||||||
si_s = abbreviated_dirnode(self.node)
|
si_s = unicode(abbreviated_dirnode(self.node), "utf-8")
|
||||||
header = ["Tahoe-LAFS - Directory SI=%s" % si_s]
|
header = ["Tahoe-LAFS - Directory SI=%s" % si_s]
|
||||||
if self.node.is_unknown():
|
if self.node.is_unknown():
|
||||||
header.append(" (unknown)")
|
header.append(" (unknown)")
|
||||||
@ -719,7 +719,7 @@ class DirectoryAsHTML(Element):
|
|||||||
|
|
||||||
@renderer
|
@renderer
|
||||||
def header(self, req, tag):
|
def header(self, req, tag):
|
||||||
si_s = abbreviated_dirnode(self.node)
|
si_s = unicode(abbreviated_dirnode(self.node), "utf-8")
|
||||||
header = ["Tahoe-LAFS Directory SI=", tags.span(si_s, class_="data-chars")]
|
header = ["Tahoe-LAFS Directory SI=", tags.span(si_s, class_="data-chars")]
|
||||||
if self.node.is_unknown():
|
if self.node.is_unknown():
|
||||||
header.append(" (unknown)")
|
header.append(" (unknown)")
|
||||||
@ -1077,13 +1077,13 @@ class RenameForm(Element, object):
|
|||||||
|
|
||||||
@renderer
|
@renderer
|
||||||
def title(self, req, tag):
|
def title(self, req, tag):
|
||||||
return tag("Directory SI={}".format(abbreviated_dirnode(self.original)))
|
return tag("Directory SI={}".format(unicode(abbreviated_dirnode(self.original), "ascii")))
|
||||||
|
|
||||||
@renderer
|
@renderer
|
||||||
def header(self, req, tag):
|
def header(self, req, tag):
|
||||||
header = [
|
header = [
|
||||||
"Rename "
|
"Rename "
|
||||||
"in directory SI=%s" % abbreviated_dirnode(self.original),
|
"in directory SI=%s" % unicode(abbreviated_dirnode(self.original), "ascii"),
|
||||||
]
|
]
|
||||||
|
|
||||||
if self.original.is_readonly():
|
if self.original.is_readonly():
|
||||||
@ -1194,7 +1194,7 @@ class ManifestElement(ReloadableMonitorElement):
|
|||||||
si = self.monitor.origin_si
|
si = self.monitor.origin_si
|
||||||
if not si:
|
if not si:
|
||||||
return "<LIT>"
|
return "<LIT>"
|
||||||
return base32.b2a(si)[:6]
|
return unicode(base32.b2a(si)[:6], "utf-8")
|
||||||
|
|
||||||
@renderer
|
@renderer
|
||||||
def title(self, req, tag):
|
def title(self, req, tag):
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
from past.builtins import unicode, long
|
from past.builtins import unicode, long
|
||||||
from six import ensure_str
|
|
||||||
|
|
||||||
from twisted.web import http, static
|
from twisted.web import http, static
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
@ -131,7 +130,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
|
|||||||
if t == b"uri":
|
if t == b"uri":
|
||||||
return self.replace_me_with_a_childcap(req, self.client, replace)
|
return self.replace_me_with_a_childcap(req, self.client, replace)
|
||||||
|
|
||||||
raise WebError("PUT to a file: bad t=%s" % ensure_str(t))
|
raise WebError("PUT to a file: bad t=%s" % unicode(t, "utf-8"))
|
||||||
|
|
||||||
@render_exception
|
@render_exception
|
||||||
def render_POST(self, req):
|
def render_POST(self, req):
|
||||||
@ -148,7 +147,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
|
|||||||
# t=mkdir is handled in DirectoryNodeHandler._POST_mkdir, so
|
# t=mkdir is handled in DirectoryNodeHandler._POST_mkdir, so
|
||||||
# there are no other t= values left to be handled by the
|
# there are no other t= values left to be handled by the
|
||||||
# placeholder.
|
# placeholder.
|
||||||
raise WebError("POST to a file: bad t=%s" % t)
|
raise WebError("POST to a file: bad t=%s" % unicode(t, "utf-8"))
|
||||||
|
|
||||||
return handle_when_done(req, d)
|
return handle_when_done(req, d)
|
||||||
|
|
||||||
@ -288,7 +287,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
|||||||
assert self.parentnode and self.name
|
assert self.parentnode and self.name
|
||||||
return self.replace_me_with_a_childcap(req, self.client, replace)
|
return self.replace_me_with_a_childcap(req, self.client, replace)
|
||||||
|
|
||||||
raise WebError("PUT to a file: bad t=%s" % ensure_str(t))
|
raise WebError("PUT to a file: bad t=%s" % unicode(t, "utf-8"))
|
||||||
|
|
||||||
@render_exception
|
@render_exception
|
||||||
def render_POST(self, req):
|
def render_POST(self, req):
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from urllib.parse import quote as urlquote
|
from urllib.parse import quote as urlquote
|
||||||
@ -46,7 +47,7 @@ class MoreInfoElement(Element):
|
|||||||
|
|
||||||
def abbrev(self, storage_index_or_none):
|
def abbrev(self, storage_index_or_none):
|
||||||
if storage_index_or_none:
|
if storage_index_or_none:
|
||||||
return base32.b2a(storage_index_or_none)[:6]
|
return unicode(base32.b2a(storage_index_or_none)[:6], "ascii")
|
||||||
return "LIT file"
|
return "LIT file"
|
||||||
|
|
||||||
def get_type(self):
|
def get_type(self):
|
||||||
|
@ -21,11 +21,6 @@ from twisted.web.resource import (
|
|||||||
Resource,
|
Resource,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Hotfix work-around https://github.com/crossbario/autobahn-python/issues/1151
|
|
||||||
from . import _autobahn_1151
|
|
||||||
_autobahn_1151.patch()
|
|
||||||
del _autobahn_1151
|
|
||||||
|
|
||||||
|
|
||||||
class TokenAuthenticatedWebSocketServerProtocol(WebSocketServerProtocol):
|
class TokenAuthenticatedWebSocketServerProtocol(WebSocketServerProtocol):
|
||||||
"""
|
"""
|
||||||
|
@ -185,10 +185,10 @@ class FileHandler(resource.Resource, object):
|
|||||||
node = self.client.create_node_from_uri(name)
|
node = self.client.create_node_from_uri(name)
|
||||||
except (TypeError, AssertionError):
|
except (TypeError, AssertionError):
|
||||||
# I think this can no longer be reached
|
# I think this can no longer be reached
|
||||||
raise WebError("'%s' is not a valid file- or directory- cap"
|
raise WebError("%r is not a valid file- or directory- cap"
|
||||||
% name)
|
% name)
|
||||||
if not IFileNode.providedBy(node):
|
if not IFileNode.providedBy(node):
|
||||||
raise WebError("'%s' is not a file-cap" % name)
|
raise WebError("%r is not a file-cap" % name)
|
||||||
return filenode.FileNodeDownloadHandler(self.client, node)
|
return filenode.FileNodeDownloadHandler(self.client, node)
|
||||||
|
|
||||||
@render_exception
|
@render_exception
|
||||||
|
@ -197,8 +197,8 @@ def _logFormatter(logDateTime, request):
|
|||||||
template = "web: %(clientip)s %(method)s %(uri)s %(code)s %(length)s"
|
template = "web: %(clientip)s %(method)s %(uri)s %(code)s %(length)s"
|
||||||
return template % dict(
|
return template % dict(
|
||||||
clientip=_get_client_ip(request),
|
clientip=_get_client_ip(request),
|
||||||
method=request.method,
|
method=str(request.method, "utf-8"),
|
||||||
uri=uri,
|
uri=str(uri, "utf-8"),
|
||||||
code=request.code,
|
code=request.code,
|
||||||
length=(request.sentLength or "-"),
|
length=(request.sentLength or "-"),
|
||||||
facility="tahoe.webish",
|
facility="tahoe.webish",
|
||||||
|
4
tox.ini
4
tox.ini
@ -71,6 +71,10 @@ commands =
|
|||||||
coverage: coverage xml
|
coverage: coverage xml
|
||||||
coverage: coverage report
|
coverage: coverage report
|
||||||
|
|
||||||
|
# We also run tests with -bb to catch bugs like `"%s" % (some_bytes,)`.
|
||||||
|
# Eventually everything should run with this, but so far only parted
|
||||||
|
# some of the modules.
|
||||||
|
python -bb -m twisted.trial --rterrors allmydata.test.web
|
||||||
|
|
||||||
[testenv:integration]
|
[testenv:integration]
|
||||||
setenv =
|
setenv =
|
||||||
|
Loading…
Reference in New Issue
Block a user