mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-02-20 09:46:18 +00:00
Merge branch 'master' into 3603.scripts
This commit is contained in:
commit
0838133006
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
@ -6,6 +6,10 @@ on:
|
||||
- "master"
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
# Tell Hypothesis which configuration we want it to use.
|
||||
TAHOE_LAFS_HYPOTHESIS_PROFILE: "ci"
|
||||
|
||||
jobs:
|
||||
|
||||
coverage:
|
||||
|
@ -30,7 +30,7 @@ class Root(rend.Page):
|
||||
|
||||
def run(portnum):
|
||||
root = Root()
|
||||
root.putChild("tahoe.css", static.File("tahoe.css"))
|
||||
root.putChild(b"tahoe.css", static.File("tahoe.css"))
|
||||
site = appserver.NevowSite(root)
|
||||
s = strports.service("tcp:%d" % portnum, site)
|
||||
s.startService()
|
||||
|
0
newsfragments/3605.minor
Normal file
0
newsfragments/3605.minor
Normal file
0
newsfragments/3606.minor
Normal file
0
newsfragments/3606.minor
Normal file
0
newsfragments/3607.minor
Normal file
0
newsfragments/3607.minor
Normal file
0
newsfragments/3608.minor
Normal file
0
newsfragments/3608.minor
Normal file
0
newsfragments/3611.minor
Normal file
0
newsfragments/3611.minor
Normal file
0
newsfragments/3613.minor
Normal file
0
newsfragments/3613.minor
Normal file
0
newsfragments/3615.minor
Normal file
0
newsfragments/3615.minor
Normal file
0
newsfragments/3617.minor
Normal file
0
newsfragments/3617.minor
Normal file
0
newsfragments/3618.minor
Normal file
0
newsfragments/3618.minor
Normal file
0
newsfragments/3620.minor
Normal file
0
newsfragments/3620.minor
Normal file
1
newsfragments/3623.minor
Normal file
1
newsfragments/3623.minor
Normal file
@ -0,0 +1 @@
|
||||
|
0
newsfragments/3624.minor
Normal file
0
newsfragments/3624.minor
Normal file
4
setup.py
4
setup.py
@ -152,9 +152,7 @@ tor_requires = [
|
||||
|
||||
i2p_requires = [
|
||||
# txi2p has Python 3 support, but it's unreleased: https://github.com/str4d/txi2p/issues/10.
|
||||
# URL lookups are in PEP-508 (via https://stackoverflow.com/a/54794506).
|
||||
# Also see the comment in tor_requires.
|
||||
"txi2p @ git+https://github.com/str4d/txi2p@0611b9a86172cb70d2f5e415a88eee9f230590b3#egg=txi2p",
|
||||
"txi2p; python_version < '3.0'",
|
||||
]
|
||||
|
||||
if len(sys.argv) > 1 and sys.argv[1] == '--fakedependency':
|
||||
|
@ -63,7 +63,7 @@ class Blacklist(object):
|
||||
reason = self.entries.get(si, None)
|
||||
if reason is not None:
|
||||
# log this to logs/twistd.log, since web logs go there too
|
||||
twisted_log.msg("blacklist prohibited access to SI %s: %s" %
|
||||
twisted_log.msg("blacklist prohibited access to SI %r: %r" %
|
||||
(base32.b2a(si), reason))
|
||||
return reason
|
||||
|
||||
|
@ -1,3 +1,14 @@
|
||||
"""Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
from zope.interface import implementer
|
||||
|
@ -1,3 +1,13 @@
|
||||
"""Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os, time, tempfile
|
||||
from zope.interface import implementer
|
||||
@ -13,17 +23,17 @@ from twisted.python import log
|
||||
|
||||
def get_memory_usage():
|
||||
# this is obviously linux-specific
|
||||
stat_names = ("VmPeak",
|
||||
"VmSize",
|
||||
#"VmHWM",
|
||||
"VmData")
|
||||
stat_names = (b"VmPeak",
|
||||
b"VmSize",
|
||||
#b"VmHWM",
|
||||
b"VmData")
|
||||
stats = {}
|
||||
try:
|
||||
with open("/proc/self/status", "r") as f:
|
||||
with open("/proc/self/status", "rb") as f:
|
||||
for line in f:
|
||||
name, right = line.split(":",2)
|
||||
name, right = line.split(b":",2)
|
||||
if name in stat_names:
|
||||
assert right.endswith(" kB\n")
|
||||
assert right.endswith(b" kB\n")
|
||||
right = right[:-4]
|
||||
stats[name] = int(right) * 1024
|
||||
except:
|
||||
@ -34,8 +44,8 @@ def get_memory_usage():
|
||||
|
||||
def log_memory_usage(where=""):
|
||||
stats = get_memory_usage()
|
||||
log.msg("VmSize: %9d VmPeak: %9d %s" % (stats["VmSize"],
|
||||
stats["VmPeak"],
|
||||
log.msg("VmSize: %9d VmPeak: %9d %s" % (stats[b"VmSize"],
|
||||
stats[b"VmPeak"],
|
||||
where))
|
||||
|
||||
@implementer(IConsumer)
|
||||
@ -65,7 +75,7 @@ class ControlServer(Referenceable, service.Service):
|
||||
tempdir = tempfile.mkdtemp()
|
||||
filename = os.path.join(tempdir, "data")
|
||||
f = open(filename, "wb")
|
||||
block = "a" * 8192
|
||||
block = b"a" * 8192
|
||||
while size > 0:
|
||||
l = min(size, 8192)
|
||||
f.write(block[:l])
|
||||
@ -126,7 +136,7 @@ class ControlServer(Referenceable, service.Service):
|
||||
server_name = server.get_longname()
|
||||
storage_server = server.get_storage_server()
|
||||
start = time.time()
|
||||
d = storage_server.get_buckets("\x00" * 16)
|
||||
d = storage_server.get_buckets(b"\x00" * 16)
|
||||
def _done(ignored):
|
||||
stop = time.time()
|
||||
elapsed = stop - start
|
||||
@ -138,7 +148,7 @@ class ControlServer(Referenceable, service.Service):
|
||||
d.addCallback(self._do_one_ping, everyone_left, results)
|
||||
def _average(res):
|
||||
averaged = {}
|
||||
for server_name,times in results.iteritems():
|
||||
for server_name,times in results.items():
|
||||
averaged[server_name] = sum(times) / len(times)
|
||||
return averaged
|
||||
d.addCallback(_average)
|
||||
@ -168,19 +178,19 @@ class SpeedTest(object):
|
||||
fn = os.path.join(self.basedir, str(i))
|
||||
if os.path.exists(fn):
|
||||
os.unlink(fn)
|
||||
f = open(fn, "w")
|
||||
f = open(fn, "wb")
|
||||
f.write(os.urandom(8))
|
||||
s -= 8
|
||||
while s > 0:
|
||||
chunk = min(s, 4096)
|
||||
f.write("\x00" * chunk)
|
||||
f.write(b"\x00" * chunk)
|
||||
s -= chunk
|
||||
f.close()
|
||||
|
||||
def do_upload(self):
|
||||
d = defer.succeed(None)
|
||||
def _create_slot(res):
|
||||
d1 = self.parent.create_mutable_file("")
|
||||
d1 = self.parent.create_mutable_file(b"")
|
||||
def _created(n):
|
||||
self._n = n
|
||||
d1.addCallback(_created)
|
||||
|
@ -30,5 +30,5 @@ def remove_prefix(s_bytes, prefix):
|
||||
if s_bytes.startswith(prefix):
|
||||
return s_bytes[len(prefix):]
|
||||
raise BadPrefixError(
|
||||
"did not see expected '{}' prefix".format(prefix)
|
||||
"did not see expected '{!r}' prefix".format(prefix)
|
||||
)
|
||||
|
@ -74,6 +74,13 @@ ADD_FILE = ActionType(
|
||||
u"Add a new file as a child of a directory.",
|
||||
)
|
||||
|
||||
|
||||
class _OnlyFiles(object):
|
||||
"""Marker for replacement option of only replacing files."""
|
||||
|
||||
ONLY_FILES = _OnlyFiles()
|
||||
|
||||
|
||||
def update_metadata(metadata, new_metadata, now):
|
||||
"""Updates 'metadata' in-place with the information in 'new_metadata'.
|
||||
|
||||
@ -175,11 +182,16 @@ class MetadataSetter(object):
|
||||
|
||||
class Adder(object):
|
||||
def __init__(self, node, entries=None, overwrite=True, create_readonly_node=None):
|
||||
"""
|
||||
:param overwrite: Either True (allow overwriting anything existing),
|
||||
False (don't allow overwriting), or ONLY_FILES (only files can be
|
||||
overwritten).
|
||||
"""
|
||||
self.node = node
|
||||
if entries is None:
|
||||
entries = {}
|
||||
precondition(isinstance(entries, dict), entries)
|
||||
precondition(overwrite in (True, False, "only-files"), overwrite)
|
||||
precondition(overwrite in (True, False, ONLY_FILES), overwrite)
|
||||
# keys of 'entries' may not be normalized.
|
||||
self.entries = entries
|
||||
self.overwrite = overwrite
|
||||
@ -205,7 +217,7 @@ class Adder(object):
|
||||
if not self.overwrite:
|
||||
raise ExistingChildError("child %s already exists" % quote_output(name, encoding='utf-8'))
|
||||
|
||||
if self.overwrite == "only-files" and IDirectoryNode.providedBy(children[name][0]):
|
||||
if self.overwrite == ONLY_FILES and IDirectoryNode.providedBy(children[name][0]):
|
||||
raise ExistingChildError("child %s already exists as a directory" % quote_output(name, encoding='utf-8'))
|
||||
metadata = children[name][1].copy()
|
||||
|
||||
@ -316,7 +328,7 @@ class DirectoryNode(object):
|
||||
return "<%s %s-%s %s>" % (self.__class__.__name__,
|
||||
self.is_readonly() and "RO" or "RW",
|
||||
self.is_mutable() and "MUT" or "IMM",
|
||||
hasattr(self, '_uri') and self._uri.abbrev())
|
||||
hasattr(self, '_uri') and str(self._uri.abbrev(), "utf-8"))
|
||||
|
||||
def get_size(self):
|
||||
"""Return the size of our backing mutable file, in bytes, if we've
|
||||
@ -701,7 +713,7 @@ class DirectoryNode(object):
|
||||
'new_child_namex' and 'current_child_namex' need not be normalized.
|
||||
|
||||
The overwrite parameter may be True (overwrite any existing child),
|
||||
False (error if the new child link already exists), or "only-files"
|
||||
False (error if the new child link already exists), or ONLY_FILES
|
||||
(error if the new child link exists and points to a directory).
|
||||
"""
|
||||
if self.is_readonly() or new_parent.is_readonly():
|
||||
|
@ -164,8 +164,10 @@ class CompleteBinaryTreeMixin(object):
|
||||
def dump(self):
|
||||
lines = []
|
||||
for i,depth in self.depth_first():
|
||||
lines.append("%s%3d: %s" % (" "*depth, i,
|
||||
base32.b2a_or_none(self[i])))
|
||||
value = base32.b2a_or_none(self[i])
|
||||
if value is not None:
|
||||
value = str(value, "utf-8")
|
||||
lines.append("%s%3d: %s" % (" "*depth, i, value))
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
def get_leaf_index(self, leafnum):
|
||||
@ -430,8 +432,8 @@ class IncompleteHashTree(CompleteBinaryTreeMixin, list):
|
||||
for i,h in new_hashes.items():
|
||||
if self[i]:
|
||||
if self[i] != h:
|
||||
raise BadHashError("new hash %s does not match "
|
||||
"existing hash %s at %s"
|
||||
raise BadHashError("new hash %r does not match "
|
||||
"existing hash %r at %r"
|
||||
% (base32.b2a(h),
|
||||
base32.b2a(self[i]),
|
||||
self._name_hash(i)))
|
||||
|
@ -1,3 +1,14 @@
|
||||
"""Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
import weakref
|
||||
|
||||
|
@ -67,12 +67,12 @@ class ValidatedExtendedURIProxy(object):
|
||||
self.crypttext_hash = None
|
||||
|
||||
def __str__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self._verifycap.to_string())
|
||||
return "<%s %r>" % (self.__class__.__name__, self._verifycap.to_string())
|
||||
|
||||
def _check_integrity(self, data):
|
||||
h = uri_extension_hash(data)
|
||||
if h != self._verifycap.uri_extension_hash:
|
||||
msg = ("The copy of uri_extension we received from %s was bad: wanted %s, got %s" %
|
||||
msg = ("The copy of uri_extension we received from %s was bad: wanted %r, got %r" %
|
||||
(self._readbucketproxy,
|
||||
base32.b2a(self._verifycap.uri_extension_hash),
|
||||
base32.b2a(h)))
|
||||
@ -234,7 +234,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
|
||||
UEB"""
|
||||
precondition(share_hash_tree[0] is not None, share_hash_tree)
|
||||
prefix = "%d-%s-%s" % (sharenum, bucket,
|
||||
base32.b2a(share_hash_tree[0][:8])[:12])
|
||||
str(base32.b2a(share_hash_tree[0][:8])[:12], "ascii"))
|
||||
log.PrefixingLogMixin.__init__(self,
|
||||
facility="tahoe.immutable.download",
|
||||
prefix=prefix)
|
||||
@ -427,7 +427,7 @@ class ValidatedReadBucketProxy(log.PrefixingLogMixin):
|
||||
received from the remote peer were bad.""")
|
||||
self.log(" have candidate_share_hash: %s" % bool(candidate_share_hash))
|
||||
self.log(" block length: %d" % len(blockdata))
|
||||
self.log(" block hash: %s" % base32.b2a_or_none(blockhash))
|
||||
self.log(" block hash: %r" % base32.b2a_or_none(blockhash))
|
||||
if len(blockdata) < 100:
|
||||
self.log(" block data: %r" % (blockdata,))
|
||||
else:
|
||||
@ -477,7 +477,7 @@ class Checker(log.PrefixingLogMixin):
|
||||
monitor):
|
||||
assert precondition(isinstance(verifycap, CHKFileVerifierURI), verifycap, type(verifycap))
|
||||
|
||||
prefix = "%s" % base32.b2a(verifycap.get_storage_index()[:8])[:12]
|
||||
prefix = str(base32.b2a(verifycap.get_storage_index()[:8])[:12], "utf-8")
|
||||
log.PrefixingLogMixin.__init__(self, facility="tahoe.immutable.checker", prefix=prefix)
|
||||
|
||||
self._verifycap = verifycap
|
||||
|
@ -63,7 +63,7 @@ class SegmentFetcher(object):
|
||||
self._running = True
|
||||
|
||||
def stop(self):
|
||||
log.msg("SegmentFetcher(%s).stop" % self._node._si_prefix,
|
||||
log.msg("SegmentFetcher(%r).stop" % self._node._si_prefix,
|
||||
level=log.NOISY, parent=self._lp, umid="LWyqpg")
|
||||
self._cancel_all_requests()
|
||||
self._running = False
|
||||
@ -127,7 +127,7 @@ class SegmentFetcher(object):
|
||||
# we could have sent something if we'd been allowed to pull
|
||||
# more shares per server. Increase the limit and try again.
|
||||
self._max_shares_per_server += 1
|
||||
log.msg("SegmentFetcher(%s) increasing diversity limit to %d"
|
||||
log.msg("SegmentFetcher(%r) increasing diversity limit to %d"
|
||||
% (self._node._si_prefix, self._max_shares_per_server),
|
||||
level=log.NOISY, umid="xY2pBA")
|
||||
# Also ask for more shares, in the hopes of achieving better
|
||||
@ -241,7 +241,7 @@ class SegmentFetcher(object):
|
||||
# called by Shares, in response to our s.send_request() calls.
|
||||
if not self._running:
|
||||
return
|
||||
log.msg("SegmentFetcher(%s)._block_request_activity: %s -> %s" %
|
||||
log.msg("SegmentFetcher(%r)._block_request_activity: %s -> %r" %
|
||||
(self._node._si_prefix, repr(share), state),
|
||||
level=log.NOISY, parent=self._lp, umid="vilNWA")
|
||||
# COMPLETE, CORRUPT, DEAD, BADSEGNUM are terminal. Remove the share
|
||||
|
@ -125,7 +125,7 @@ class DownloadNode(object):
|
||||
self.ciphertext_hash_tree_leaves = self.guessed_num_segments
|
||||
|
||||
def __repr__(self):
|
||||
return "ImmutableDownloadNode(%s)" % (self._si_prefix,)
|
||||
return "ImmutableDownloadNode(%r)" % (self._si_prefix,)
|
||||
|
||||
def stop(self):
|
||||
# called by the Terminator at shutdown, mostly for tests
|
||||
@ -500,7 +500,7 @@ class DownloadNode(object):
|
||||
return (offset, segment, decodetime)
|
||||
except (BadHashError, NotEnoughHashesError):
|
||||
format = ("hash failure in ciphertext_hash_tree:"
|
||||
" segnum=%(segnum)d, SI=%(si)s")
|
||||
" segnum=%(segnum)d, SI=%(si)r")
|
||||
log.msg(format=format, segnum=segnum, si=self._si_prefix,
|
||||
failure=Failure(),
|
||||
level=log.WEIRD, parent=self._lp, umid="MTwNnw")
|
||||
|
@ -120,7 +120,7 @@ class Segmentation(object):
|
||||
# we didn't get the first byte, so we can't use this segment
|
||||
log.msg("Segmentation handed wrong data:"
|
||||
" want [%d-%d), given [%d-%d), for segnum=%d,"
|
||||
" for si=%s"
|
||||
" for si=%r"
|
||||
% (self._offset, self._offset+self._size,
|
||||
segment_start, segment_start+len(segment),
|
||||
wanted_segnum, self._node._si_prefix),
|
||||
|
@ -108,7 +108,7 @@ class Share(object):
|
||||
self.had_corruption = False # for unit tests
|
||||
|
||||
def __repr__(self):
|
||||
return "Share(sh%d-on-%s)" % (self._shnum, self._server.get_name())
|
||||
return "Share(sh%d-on-%s)" % (self._shnum, str(self._server.get_name(), "utf-8"))
|
||||
|
||||
def is_alive(self):
|
||||
# XXX: reconsider. If the share sees a single error, should it remain
|
||||
|
@ -106,7 +106,7 @@ class Encoder(object):
|
||||
|
||||
def __repr__(self):
|
||||
if hasattr(self, "_storage_index"):
|
||||
return "<Encoder for %s>" % si_b2a(self._storage_index)[:5]
|
||||
return "<Encoder for %r>" % si_b2a(self._storage_index)[:5]
|
||||
return "<Encoder for unknown storage index>"
|
||||
|
||||
def log(self, *args, **kwargs):
|
||||
|
@ -175,7 +175,7 @@ class WriteBucketProxy(object):
|
||||
self._offset_data = offset_data
|
||||
|
||||
def __repr__(self):
|
||||
return "<WriteBucketProxy for node %s>" % self._server.get_name()
|
||||
return "<WriteBucketProxy for node %r>" % self._server.get_name()
|
||||
|
||||
def put_header(self):
|
||||
return self._write(0, self._offset_data)
|
||||
@ -317,7 +317,7 @@ class ReadBucketProxy(object):
|
||||
return self._server.get_serverid()
|
||||
|
||||
def __repr__(self):
|
||||
return "<ReadBucketProxy %s to peer [%s] SI %s>" % \
|
||||
return "<ReadBucketProxy %r to peer [%r] SI %r>" % \
|
||||
(id(self), self._server.get_name(), si_b2a(self._storage_index))
|
||||
|
||||
def _start_if_needed(self):
|
||||
|
@ -81,7 +81,7 @@ class CHKCheckerAndUEBFetcher(object):
|
||||
def _got_response(self, buckets, server):
|
||||
# buckets is a dict: maps shum to an rref of the server who holds it
|
||||
shnums_s = ",".join([str(shnum) for shnum in buckets])
|
||||
self.log("got_response: [%s] has %d shares (%s)" %
|
||||
self.log("got_response: [%r] has %d shares (%s)" %
|
||||
(server.get_name(), len(buckets), shnums_s),
|
||||
level=log.NOISY)
|
||||
self._found_shares.update(buckets.keys())
|
||||
@ -167,7 +167,7 @@ class CHKUploadHelper(Referenceable, upload.CHKUploader): # type: ignore # warn
|
||||
self._upload_status.set_storage_index(storage_index)
|
||||
self._upload_status.set_status("fetching ciphertext")
|
||||
self._upload_status.set_progress(0, 1.0)
|
||||
self._helper.log("CHKUploadHelper starting for SI %s" % self._upload_id,
|
||||
self._helper.log("CHKUploadHelper starting for SI %r" % self._upload_id,
|
||||
parent=log_number)
|
||||
|
||||
self._storage_broker = storage_broker
|
||||
|
@ -278,7 +278,7 @@ class ServerTracker(object):
|
||||
self.cancel_secret = bucket_cancel_secret
|
||||
|
||||
def __repr__(self):
|
||||
return ("<ServerTracker for server %s and SI %s>"
|
||||
return ("<ServerTracker for server %r and SI %r>"
|
||||
% (self._server.get_name(), si_b2a(self.storage_index)[:5]))
|
||||
|
||||
def get_server(self):
|
||||
@ -338,7 +338,7 @@ class ServerTracker(object):
|
||||
|
||||
|
||||
def str_shareloc(shnum, bucketwriter):
|
||||
return "%s: %s" % (shnum, bucketwriter.get_servername(),)
|
||||
return "%s: %s" % (shnum, ensure_str(bucketwriter.get_servername()),)
|
||||
|
||||
|
||||
@implementer(IPeerSelector)
|
||||
@ -437,7 +437,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
||||
self._reactor = reactor
|
||||
|
||||
def __repr__(self):
|
||||
return "<Tahoe2ServerSelector for upload %s>" % self.upload_id
|
||||
return "<Tahoe2ServerSelector for upload %r>" % self.upload_id
|
||||
|
||||
def _create_trackers(self, candidate_servers, allocated_size,
|
||||
file_renewal_secret, file_cancel_secret, create_server_tracker):
|
||||
@ -590,7 +590,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
||||
d = timeout_call(self._reactor, tracker.ask_about_existing_shares(), 15)
|
||||
d.addBoth(self._handle_existing_response, tracker)
|
||||
ds.append(d)
|
||||
self.log("asking server %s for any existing shares" %
|
||||
self.log("asking server %r for any existing shares" %
|
||||
(tracker.get_name(),), level=log.NOISY)
|
||||
|
||||
for tracker in write_trackers:
|
||||
@ -605,7 +605,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
||||
d.addErrback(timed_out, tracker)
|
||||
d.addBoth(self._handle_existing_write_response, tracker, set())
|
||||
ds.append(d)
|
||||
self.log("asking server %s for any existing shares" %
|
||||
self.log("asking server %r for any existing shares" %
|
||||
(tracker.get_name(),), level=log.NOISY)
|
||||
|
||||
trackers = set(write_trackers) | set(readonly_trackers)
|
||||
@ -749,7 +749,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
||||
buckets = res
|
||||
if buckets:
|
||||
self.serverids_with_shares.add(serverid)
|
||||
self.log("response to get_buckets() from server %s: alreadygot=%s"
|
||||
self.log("response to get_buckets() from server %r: alreadygot=%s"
|
||||
% (tracker.get_name(), tuple(sorted(buckets))),
|
||||
level=log.NOISY)
|
||||
for bucket in buckets:
|
||||
@ -818,7 +818,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
||||
self.homeless_shares.remove(shnum)
|
||||
|
||||
if self._status:
|
||||
self._status.set_status("Contacting Servers [%s] (first query),"
|
||||
self._status.set_status("Contacting Servers [%r] (first query),"
|
||||
" %d shares left.."
|
||||
% (tracker.get_name(),
|
||||
len(self.homeless_shares)))
|
||||
@ -845,7 +845,7 @@ class Tahoe2ServerSelector(log.PrefixingLogMixin):
|
||||
|
||||
else:
|
||||
(alreadygot, allocated) = res
|
||||
self.log("response to allocate_buckets() from server %s: alreadygot=%s, allocated=%s"
|
||||
self.log("response to allocate_buckets() from server %r: alreadygot=%s, allocated=%s"
|
||||
% (tracker.get_name(),
|
||||
tuple(sorted(alreadygot)), tuple(sorted(allocated))),
|
||||
level=log.NOISY)
|
||||
@ -1314,7 +1314,7 @@ class CHKUploader(object):
|
||||
storage_index = encoder.get_param("storage_index")
|
||||
self._storage_index = storage_index
|
||||
upload_id = si_b2a(storage_index)[:5]
|
||||
self.log("using storage index %s" % upload_id)
|
||||
self.log("using storage index %r" % upload_id)
|
||||
server_selector = Tahoe2ServerSelector(
|
||||
upload_id,
|
||||
self._log_number,
|
||||
|
@ -2858,7 +2858,7 @@ class RIControlClient(RemoteInterface):
|
||||
@return: a dictionary mapping peerid to a float (RTT time in seconds)
|
||||
"""
|
||||
|
||||
return DictOf(str, float)
|
||||
return DictOf(bytes, float)
|
||||
|
||||
|
||||
UploadResults = Any() #DictOf(bytes, bytes)
|
||||
|
@ -300,7 +300,7 @@ class IntroducerService(service.MultiService, Referenceable):
|
||||
level=log.UNUSUAL, umid="jfGMXQ")
|
||||
|
||||
def remote_subscribe_v2(self, subscriber, service_name, subscriber_info):
|
||||
self.log("introducer: subscription[%s] request at %s"
|
||||
self.log("introducer: subscription[%r] request at %r"
|
||||
% (service_name, subscriber), umid="U3uzLg")
|
||||
service_name = ensure_text(service_name)
|
||||
subscriber_info = dictutil.UnicodeKeyDict({
|
||||
|
@ -9,6 +9,7 @@ from __future__ import unicode_literals
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
from six import ensure_str
|
||||
|
||||
from allmydata.uri import from_string
|
||||
from allmydata.util import base32, log, dictutil
|
||||
@ -202,7 +203,7 @@ class MutableChecker(object):
|
||||
serverid = server.get_serverid()
|
||||
locator = (server, self._storage_index, shnum)
|
||||
corrupt_share_locators.append(locator)
|
||||
s = "%s-sh%d" % (server.get_name(), shnum)
|
||||
s = "%s-sh%d" % (ensure_str(server.get_name()), shnum)
|
||||
if f.check(CorruptShareError):
|
||||
ft = f.value.reason
|
||||
else:
|
||||
|
@ -63,7 +63,7 @@ class CorruptShareError(BadShareError):
|
||||
self.shnum = shnum
|
||||
self.reason = reason
|
||||
def __str__(self):
|
||||
return "<CorruptShareError server=%s shnum[%d]: %s" % \
|
||||
return "<CorruptShareError server=%r shnum[%d]: %s" % \
|
||||
(self.server.get_name(), self.shnum, self.reason)
|
||||
|
||||
class UnknownVersionError(BadShareError):
|
||||
|
@ -98,7 +98,7 @@ class MutableFileNode(object):
|
||||
|
||||
def __repr__(self):
|
||||
if hasattr(self, '_uri'):
|
||||
return "<%s %x %s %s>" % (self.__class__.__name__, id(self), self.is_readonly() and 'RO' or 'RW', self._uri.abbrev())
|
||||
return "<%s %x %s %r>" % (self.__class__.__name__, id(self), self.is_readonly() and 'RO' or 'RW', self._uri.abbrev())
|
||||
else:
|
||||
return "<%s %x %s %s>" % (self.__class__.__name__, id(self), None, None)
|
||||
|
||||
|
@ -127,7 +127,7 @@ class Publish(object):
|
||||
self._servermap = servermap
|
||||
self._storage_index = self._node.get_storage_index()
|
||||
self._log_prefix = prefix = si_b2a(self._storage_index)[:5]
|
||||
num = self.log("Publish(%s): starting" % prefix, parent=None)
|
||||
num = self.log("Publish(%r): starting" % prefix, parent=None)
|
||||
self._log_number = num
|
||||
self._running = True
|
||||
self._first_write_error = None
|
||||
@ -915,7 +915,7 @@ class Publish(object):
|
||||
def log_goal(self, goal, message=""):
|
||||
logmsg = [message]
|
||||
for (shnum, server) in sorted([(s,p) for (p,s) in goal], key=lambda t: (id(t[0]), id(t[1]))):
|
||||
logmsg.append("sh%d to [%s]" % (shnum, server.get_name()))
|
||||
logmsg.append("sh%d to [%r]" % (shnum, server.get_name()))
|
||||
self.log("current goal: %s" % (", ".join(logmsg)), level=log.NOISY)
|
||||
self.log("we are planning to push new seqnum=#%d" % self._new_seqnum,
|
||||
level=log.NOISY)
|
||||
@ -999,7 +999,7 @@ class Publish(object):
|
||||
return
|
||||
|
||||
server = writer.server
|
||||
lp = self.log("_got_write_answer from %s, share %d" %
|
||||
lp = self.log("_got_write_answer from %r, share %d" %
|
||||
(server.get_name(), writer.shnum))
|
||||
|
||||
now = time.time()
|
||||
@ -1135,14 +1135,14 @@ class Publish(object):
|
||||
(seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
|
||||
offsets_tuple) = expected_version
|
||||
msg = ("somebody modified the share on us:"
|
||||
" shnum=%d: I thought they had #%d:R=%s," %
|
||||
" shnum=%d: I thought they had #%d:R=%r," %
|
||||
(shnum,
|
||||
seqnum, base32.b2a(root_hash)[:4]))
|
||||
if unknown_format:
|
||||
msg += (" but I don't know how to read share"
|
||||
" format %d" % version)
|
||||
else:
|
||||
msg += " but testv reported #%d:R=%s" % \
|
||||
msg += " but testv reported #%d:R=%r" % \
|
||||
(other_seqnum, base32.b2a(other_roothash)[:4])
|
||||
self.log(msg, parent=lp, level=log.NOISY)
|
||||
# if expected_version==None, then we didn't expect to see a
|
||||
|
@ -122,7 +122,7 @@ class Retrieve(object):
|
||||
_assert(self._node.get_readkey())
|
||||
self._last_failure = None
|
||||
prefix = si_b2a(self._storage_index)[:5]
|
||||
self._log_number = log.msg("Retrieve(%s): starting" % prefix)
|
||||
self._log_number = log.msg("Retrieve(%r): starting" % prefix)
|
||||
self._running = True
|
||||
self._decoding = False
|
||||
self._bad_shares = set()
|
||||
@ -574,7 +574,7 @@ class Retrieve(object):
|
||||
remote server (with no guarantee of success) that its share is
|
||||
corrupt.
|
||||
"""
|
||||
self.log("marking share %d on server %s as bad" % \
|
||||
self.log("marking share %d on server %r as bad" % \
|
||||
(shnum, server.get_name()))
|
||||
prefix = self.verinfo[-2]
|
||||
self.servermap.mark_bad_share(server, shnum, prefix)
|
||||
|
@ -11,6 +11,7 @@ if PY2:
|
||||
# Doesn't import str to prevent API leakage on Python 2
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
|
||||
from past.builtins import unicode
|
||||
from six import ensure_str
|
||||
|
||||
import sys, time, copy
|
||||
from zope.interface import implementer
|
||||
@ -202,8 +203,8 @@ class ServerMap(object):
|
||||
(seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
|
||||
offsets_tuple) = verinfo
|
||||
print("[%s]: sh#%d seq%d-%s %d-of-%d len%d" %
|
||||
(server.get_name(), shnum,
|
||||
seqnum, base32.b2a(root_hash)[:4], k, N,
|
||||
(unicode(server.get_name(), "utf-8"), shnum,
|
||||
seqnum, unicode(base32.b2a(root_hash)[:4], "utf-8"), k, N,
|
||||
datalength), file=out)
|
||||
if self._problems:
|
||||
print("%d PROBLEMS" % len(self._problems), file=out)
|
||||
@ -275,7 +276,7 @@ class ServerMap(object):
|
||||
"""Take a versionid, return a string that describes it."""
|
||||
(seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
|
||||
offsets_tuple) = verinfo
|
||||
return "seq%d-%s" % (seqnum, base32.b2a(root_hash)[:4])
|
||||
return "seq%d-%s" % (seqnum, unicode(base32.b2a(root_hash)[:4], "utf-8"))
|
||||
|
||||
def summarize_versions(self):
|
||||
"""Return a string describing which versions we know about."""
|
||||
@ -868,8 +869,8 @@ class ServermapUpdater(object):
|
||||
# ok, it's a valid verinfo. Add it to the list of validated
|
||||
# versions.
|
||||
self.log(" found valid version %d-%s from %s-sh%d: %d-%d/%d/%d"
|
||||
% (seqnum, base32.b2a(root_hash)[:4],
|
||||
server.get_name(), shnum,
|
||||
% (seqnum, unicode(base32.b2a(root_hash)[:4], "utf-8"),
|
||||
ensure_str(server.get_name()), shnum,
|
||||
k, n, segsize, datalen),
|
||||
parent=lp)
|
||||
self._valid_versions.add(verinfo)
|
||||
@ -943,13 +944,13 @@ class ServermapUpdater(object):
|
||||
alleged_privkey_s = self._node._decrypt_privkey(enc_privkey)
|
||||
alleged_writekey = hashutil.ssk_writekey_hash(alleged_privkey_s)
|
||||
if alleged_writekey != self._node.get_writekey():
|
||||
self.log("invalid privkey from %s shnum %d" %
|
||||
self.log("invalid privkey from %r shnum %d" %
|
||||
(server.get_name(), shnum),
|
||||
parent=lp, level=log.WEIRD, umid="aJVccw")
|
||||
return
|
||||
|
||||
# it's good
|
||||
self.log("got valid privkey from shnum %d on serverid %s" %
|
||||
self.log("got valid privkey from shnum %d on serverid %r" %
|
||||
(shnum, server.get_name()),
|
||||
parent=lp)
|
||||
privkey, _ = rsa.create_signing_keypair_from_string(alleged_privkey_s)
|
||||
@ -1213,7 +1214,7 @@ class ServermapUpdater(object):
|
||||
|
||||
self.log(format="sending %(more)d more queries: %(who)s",
|
||||
more=len(more_queries),
|
||||
who=" ".join(["[%s]" % s.get_name() for s in more_queries]),
|
||||
who=" ".join(["[%r]" % s.get_name() for s in more_queries]),
|
||||
level=log.NOISY)
|
||||
|
||||
for server in more_queries:
|
||||
|
@ -915,7 +915,7 @@ def create_main_tub(config, tub_options,
|
||||
tubport,
|
||||
location,
|
||||
)
|
||||
log.msg("Tub location set to %s" % (location,))
|
||||
log.msg("Tub location set to %r" % (location,))
|
||||
return tub
|
||||
|
||||
|
||||
|
@ -6,6 +6,7 @@ except ImportError:
|
||||
pass
|
||||
|
||||
from future.utils import bchr
|
||||
from past.builtins import unicode
|
||||
|
||||
# do not import any allmydata modules at this level. Do that from inside
|
||||
# individual functions instead.
|
||||
@ -90,27 +91,34 @@ def dump_immutable_chk_share(f, out, options):
|
||||
"crypttext_hash", "crypttext_root_hash",
|
||||
"share_root_hash", "UEB_hash")
|
||||
display_keys = {"size": "file_size"}
|
||||
|
||||
def to_string(v):
|
||||
if isinstance(v, bytes):
|
||||
return unicode(v, "utf-8")
|
||||
else:
|
||||
return str(v)
|
||||
|
||||
for k in keys1:
|
||||
if k in unpacked:
|
||||
dk = display_keys.get(k, k)
|
||||
print("%20s: %s" % (dk, unpacked[k]), file=out)
|
||||
print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)
|
||||
print(file=out)
|
||||
for k in keys2:
|
||||
if k in unpacked:
|
||||
dk = display_keys.get(k, k)
|
||||
print("%20s: %s" % (dk, unpacked[k]), file=out)
|
||||
print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)
|
||||
print(file=out)
|
||||
for k in keys3:
|
||||
if k in unpacked:
|
||||
dk = display_keys.get(k, k)
|
||||
print("%20s: %s" % (dk, unpacked[k]), file=out)
|
||||
print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)
|
||||
|
||||
leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
|
||||
if leftover:
|
||||
print(file=out)
|
||||
print("LEFTOVER:", file=out)
|
||||
for k in sorted(leftover):
|
||||
print("%20s: %s" % (k, unpacked[k]), file=out)
|
||||
print("%20s: %s" % (k, to_string(unpacked[k])), file=out)
|
||||
|
||||
# the storage index isn't stored in the share itself, so we depend upon
|
||||
# knowing the parent directory name to get it
|
||||
@ -197,7 +205,7 @@ def dump_mutable_share(options):
|
||||
print(file=out)
|
||||
print("Mutable slot found:", file=out)
|
||||
print(" share_type: %s" % share_type, file=out)
|
||||
print(" write_enabler: %s" % base32.b2a(WE), file=out)
|
||||
print(" write_enabler: %s" % unicode(base32.b2a(WE), "utf-8"), file=out)
|
||||
print(" WE for nodeid: %s" % idlib.nodeid_b2a(nodeid), file=out)
|
||||
print(" num_extra_leases: %d" % num_extra_leases, file=out)
|
||||
print(" container_size: %d" % container_size, file=out)
|
||||
@ -209,8 +217,8 @@ def dump_mutable_share(options):
|
||||
print(" ownerid: %d" % lease.owner_num, file=out)
|
||||
when = format_expiration_time(lease.expiration_time)
|
||||
print(" expires in %s" % when, file=out)
|
||||
print(" renew_secret: %s" % base32.b2a(lease.renew_secret), file=out)
|
||||
print(" cancel_secret: %s" % base32.b2a(lease.cancel_secret), file=out)
|
||||
print(" renew_secret: %s" % unicode(base32.b2a(lease.renew_secret), "utf-8"), file=out)
|
||||
print(" cancel_secret: %s" % unicode(base32.b2a(lease.cancel_secret), "utf-8"), file=out)
|
||||
print(" secrets are for nodeid: %s" % idlib.nodeid_b2a(lease.nodeid), file=out)
|
||||
else:
|
||||
print("No leases.", file=out)
|
||||
@ -258,8 +266,8 @@ def dump_SDMF_share(m, length, options):
|
||||
|
||||
print(" SDMF contents:", file=out)
|
||||
print(" seqnum: %d" % seqnum, file=out)
|
||||
print(" root_hash: %s" % base32.b2a(root_hash), file=out)
|
||||
print(" IV: %s" % base32.b2a(IV), file=out)
|
||||
print(" root_hash: %s" % unicode(base32.b2a(root_hash), "utf-8"), file=out)
|
||||
print(" IV: %s" % unicode(base32.b2a(IV), "utf-8"), file=out)
|
||||
print(" required_shares: %d" % k, file=out)
|
||||
print(" total_shares: %d" % N, file=out)
|
||||
print(" segsize: %d" % segsize, file=out)
|
||||
@ -352,7 +360,7 @@ def dump_MDMF_share(m, length, options):
|
||||
|
||||
print(" MDMF contents:", file=out)
|
||||
print(" seqnum: %d" % seqnum, file=out)
|
||||
print(" root_hash: %s" % base32.b2a(root_hash), file=out)
|
||||
print(" root_hash: %s" % unicode(base32.b2a(root_hash), "utf-8"), file=out)
|
||||
#print(" IV: %s" % base32.b2a(IV), file=out)
|
||||
print(" required_shares: %d" % k, file=out)
|
||||
print(" total_shares: %d" % N, file=out)
|
||||
@ -645,7 +653,7 @@ def find_shares(options):
|
||||
from allmydata.util.encodingutil import listdir_unicode, quote_local_unicode_path
|
||||
|
||||
out = options.stdout
|
||||
sharedir = storage_index_to_dir(si_a2b(options.si_s))
|
||||
sharedir = storage_index_to_dir(si_a2b(options.si_s.encode("utf-8")))
|
||||
for d in options.nodedirs:
|
||||
d = os.path.join(d, "storage", "shares", sharedir)
|
||||
if os.path.exists(d):
|
||||
@ -745,7 +753,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
||||
|
||||
print("SDMF %s %d/%d %d #%d:%s %d %s" % \
|
||||
(si_s, k, N, datalen,
|
||||
seqnum, base32.b2a(root_hash),
|
||||
seqnum, unicode(base32.b2a(root_hash), "utf-8"),
|
||||
expiration, quote_output(abs_sharefile)), file=out)
|
||||
elif share_type == "MDMF":
|
||||
from allmydata.mutable.layout import MDMFSlotReadProxy
|
||||
@ -774,7 +782,7 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
||||
offsets) = verinfo
|
||||
print("MDMF %s %d/%d %d #%d:%s %d %s" % \
|
||||
(si_s, k, N, datalen,
|
||||
seqnum, base32.b2a(root_hash),
|
||||
seqnum, unicode(base32.b2a(root_hash), "utf-8"),
|
||||
expiration, quote_output(abs_sharefile)), file=out)
|
||||
else:
|
||||
print("UNKNOWN mutable %s" % quote_output(abs_sharefile), file=out)
|
||||
@ -808,8 +816,8 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
|
||||
ueb_hash = unpacked["UEB_hash"]
|
||||
|
||||
print("CHK %s %d/%d %d %s %d %s" % (si_s, k, N, filesize,
|
||||
ueb_hash, expiration,
|
||||
quote_output(abs_sharefile)), file=out)
|
||||
unicode(ueb_hash, "utf-8"), expiration,
|
||||
quote_output(abs_sharefile)), file=out)
|
||||
|
||||
else:
|
||||
print("UNKNOWN really-unknown %s" % quote_output(abs_sharefile), file=out)
|
||||
|
@ -17,7 +17,7 @@ from twisted.application.internet import TimerService
|
||||
from zope.interface import implementer
|
||||
from foolscap.api import eventually
|
||||
|
||||
from allmydata.util import log
|
||||
from allmydata.util import log, dictutil
|
||||
from allmydata.interfaces import IStatsProducer
|
||||
|
||||
@implementer(IStatsProducer)
|
||||
@ -79,15 +79,13 @@ class StatsProvider(service.MultiService):
|
||||
service.MultiService.__init__(self)
|
||||
self.node = node
|
||||
|
||||
self.counters = {}
|
||||
self.counters = dictutil.UnicodeKeyDict()
|
||||
self.stats_producers = []
|
||||
self.cpu_monitor = CPUUsageMonitor()
|
||||
self.cpu_monitor.setServiceParent(self)
|
||||
self.register_producer(self.cpu_monitor)
|
||||
|
||||
def count(self, name, delta=1):
|
||||
if isinstance(name, str):
|
||||
name = name.encode("utf-8")
|
||||
val = self.counters.setdefault(name, 0)
|
||||
self.counters[name] = val + delta
|
||||
|
||||
|
@ -271,7 +271,7 @@ class StorageServer(service.MultiService, Referenceable):
|
||||
si_dir = storage_index_to_dir(storage_index)
|
||||
si_s = si_b2a(storage_index)
|
||||
|
||||
log.msg("storage: allocate_buckets %s" % si_s)
|
||||
log.msg("storage: allocate_buckets %r" % si_s)
|
||||
|
||||
# in this implementation, the lease information (including secrets)
|
||||
# goes into the share files themselves. It could also be put into a
|
||||
@ -397,7 +397,7 @@ class StorageServer(service.MultiService, Referenceable):
|
||||
start = time.time()
|
||||
self.count("get")
|
||||
si_s = si_b2a(storage_index)
|
||||
log.msg("storage: get_buckets %s" % si_s)
|
||||
log.msg("storage: get_buckets %r" % si_s)
|
||||
bucketreaders = {} # k: sharenum, v: BucketReader
|
||||
for shnum, filename in self._get_bucket_shares(storage_index):
|
||||
bucketreaders[shnum] = BucketReader(self, filename,
|
||||
@ -602,7 +602,7 @@ class StorageServer(service.MultiService, Referenceable):
|
||||
start = time.time()
|
||||
self.count("writev")
|
||||
si_s = si_b2a(storage_index)
|
||||
log.msg("storage: slot_writev %s" % si_s)
|
||||
log.msg("storage: slot_writev %r" % si_s)
|
||||
si_dir = storage_index_to_dir(storage_index)
|
||||
(write_enabler, renew_secret, cancel_secret) = secrets
|
||||
bucketdir = os.path.join(self.sharedir, si_dir)
|
||||
@ -669,7 +669,7 @@ class StorageServer(service.MultiService, Referenceable):
|
||||
start = time.time()
|
||||
self.count("readv")
|
||||
si_s = si_b2a(storage_index)
|
||||
lp = log.msg("storage: slot_readv %s %s" % (si_s, shares),
|
||||
lp = log.msg("storage: slot_readv %r %r" % (si_s, shares),
|
||||
facility="tahoe.storage", level=log.OPERATIONAL)
|
||||
si_dir = storage_index_to_dir(storage_index)
|
||||
# shares exist if there is a file for them
|
||||
@ -703,7 +703,7 @@ class StorageServer(service.MultiService, Referenceable):
|
||||
si_s = si_b2a(storage_index)
|
||||
# windows can't handle colons in the filename
|
||||
fn = os.path.join(self.corruption_advisory_dir,
|
||||
"%s--%s-%d" % (now, si_s, shnum)).replace(":","")
|
||||
"%s--%s-%d" % (now, str(si_s, "utf-8"), shnum)).replace(":","")
|
||||
with open(fn, "w") as f:
|
||||
f.write("report: Share Corruption\n")
|
||||
f.write("type: %s\n" % bytes_to_native_str(share_type))
|
||||
|
@ -38,7 +38,6 @@ from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
|
||||
import re, time, hashlib
|
||||
|
||||
# On Python 2 this will be the backport.
|
||||
@ -237,11 +236,11 @@ class StorageFarmBroker(service.MultiService):
|
||||
for plugin
|
||||
in getPlugins(IFoolscapStoragePlugin)
|
||||
}
|
||||
return {
|
||||
return UnicodeKeyDict({
|
||||
name: plugins[name].get_client_resource(node_config)
|
||||
for (name, config)
|
||||
in self.storage_client_config.storage_plugins.items()
|
||||
}
|
||||
})
|
||||
|
||||
@log_call(
|
||||
action_type=u"storage-client:broker:make-storage-server",
|
||||
@ -820,7 +819,7 @@ class NativeStorageServer(service.MultiService):
|
||||
return self
|
||||
|
||||
def __repr__(self):
|
||||
return "<NativeStorageServer for %s>" % self.get_name()
|
||||
return "<NativeStorageServer for %r>" % self.get_name()
|
||||
def get_serverid(self):
|
||||
return self._server_id
|
||||
def get_version(self):
|
||||
@ -844,10 +843,10 @@ class NativeStorageServer(service.MultiService):
|
||||
version = self.get_version()
|
||||
if version is None:
|
||||
return None
|
||||
protocol_v1_version = version.get('http://allmydata.org/tahoe/protocols/storage/v1', UnicodeKeyDict())
|
||||
available_space = protocol_v1_version.get('available-space')
|
||||
protocol_v1_version = version.get(b'http://allmydata.org/tahoe/protocols/storage/v1', BytesKeyDict())
|
||||
available_space = protocol_v1_version.get(b'available-space')
|
||||
if available_space is None:
|
||||
available_space = protocol_v1_version.get('maximum-immutable-share-size', None)
|
||||
available_space = protocol_v1_version.get(b'maximum-immutable-share-size', None)
|
||||
return available_space
|
||||
|
||||
def start_connecting(self, trigger_cb):
|
||||
|
@ -14,13 +14,23 @@ Rather than defining interesting APIs for other code to use, this just causes
|
||||
some side-effects which make things better when the test suite runs.
|
||||
"""
|
||||
|
||||
from future.utils import PY3
|
||||
|
||||
import warnings
|
||||
from traceback import extract_stack, format_list
|
||||
|
||||
from foolscap.pb import Listener
|
||||
from twisted.python.log import err
|
||||
from twisted.application import service
|
||||
|
||||
|
||||
from foolscap.logging.incident import IncidentQualifier
|
||||
|
||||
if PY3:
|
||||
# Error on BytesWarnings, to catch things like str(b""), but only for
|
||||
# allmydata code.
|
||||
warnings.filterwarnings("error", category=BytesWarning, module="allmydata.*")
|
||||
|
||||
|
||||
class NonQualifier(IncidentQualifier, object):
|
||||
def check_event(self, ev):
|
||||
return False
|
||||
|
@ -863,7 +863,7 @@ class WebErrorMixin(object):
|
||||
response_body = f.value.response
|
||||
if response_substring:
|
||||
self.failUnless(response_substring in response_body,
|
||||
"%s: response substring '%s' not in '%s'"
|
||||
"%r: response substring %r not in %r"
|
||||
% (which, response_substring, response_body))
|
||||
return response_body
|
||||
d = defer.maybeDeferred(callable, *args, **kwargs)
|
||||
|
@ -56,7 +56,7 @@ def do_http(method, url, **kwargs):
|
||||
# https://github.com/twisted/treq/pull/159 has landed
|
||||
if 400 <= response.code < 600:
|
||||
raise VerboseError(
|
||||
response.code, response="For request {} to {}, got: {}".format(
|
||||
response.code, response="For request {!r} to {!r}, got: {!r}".format(
|
||||
method, url, body))
|
||||
returnValue(body)
|
||||
|
||||
|
@ -114,9 +114,9 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
|
||||
# with problems and display them separately
|
||||
gotmods = [mo.span() for mo in re.finditer(b'([A-Z]+)', got)]
|
||||
expmods = [mo.span() for mo in re.finditer(b'([A-Z]+)', expected)]
|
||||
gotspans = ["%d:%d=%s" % (start,end,got[start:end])
|
||||
gotspans = ["%d:%d=%r" % (start,end,got[start:end])
|
||||
for (start,end) in gotmods]
|
||||
expspans = ["%d:%d=%s" % (start,end,expected[start:end])
|
||||
expspans = ["%d:%d=%r" % (start,end,expected[start:end])
|
||||
for (start,end) in expmods]
|
||||
#print("expecting: %s" % expspans)
|
||||
|
||||
|
@ -86,7 +86,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
|
||||
def _debug(n):
|
||||
fso = debug.FindSharesOptions()
|
||||
storage_index = base32.b2a(n.get_storage_index())
|
||||
fso.si_s = storage_index
|
||||
fso.si_s = str(storage_index, "utf-8") # command-line options are unicode on Python 3
|
||||
fso.nodedirs = [os.path.dirname(abspath_expanduser_unicode(str(storedir)))
|
||||
for (i,ss,storedir)
|
||||
in self.iterate_servers()]
|
||||
|
@ -200,7 +200,8 @@ class NoNetworkServer(object):
|
||||
return self.serverid
|
||||
|
||||
def get_name(self):
|
||||
return idlib.shortnodeid_b2a(self.serverid)
|
||||
# Other implementations return bytes.
|
||||
return idlib.shortnodeid_b2a(self.serverid).encode("utf-8")
|
||||
def get_longname(self):
|
||||
return idlib.nodeid_b2a(self.serverid)
|
||||
def get_nickname(self):
|
||||
|
@ -1978,12 +1978,12 @@ class Adder(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
|
||||
overwrite=False))
|
||||
d.addCallback(lambda res:
|
||||
root_node.set_node(u'file1', filenode,
|
||||
overwrite="only-files"))
|
||||
overwrite=dirnode.ONLY_FILES))
|
||||
d.addCallback(lambda res:
|
||||
self.shouldFail(ExistingChildError, "set_node",
|
||||
"child 'dir1' already exists",
|
||||
root_node.set_node, u'dir1', filenode,
|
||||
overwrite="only-files"))
|
||||
overwrite=dirnode.ONLY_FILES))
|
||||
return d
|
||||
|
||||
d.addCallback(_test_adder)
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
from future.utils import native_str, PY2, bytes_to_native_str
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from six import ensure_str
|
||||
|
||||
import time
|
||||
import os.path
|
||||
@ -794,7 +794,7 @@ class Server(unittest.TestCase):
|
||||
reports = os.listdir(reportdir)
|
||||
self.failUnlessEqual(len(reports), 1)
|
||||
report_si0 = reports[0]
|
||||
self.failUnlessIn(native_str(si0_s), report_si0)
|
||||
self.failUnlessIn(ensure_str(si0_s), report_si0)
|
||||
f = open(os.path.join(reportdir, report_si0), "rb")
|
||||
report = f.read()
|
||||
f.close()
|
||||
|
@ -118,17 +118,17 @@ class NativeStorageServerWithVersion(NativeStorageServer): # type: ignore # ta
|
||||
class TestNativeStorageServer(unittest.TestCase):
|
||||
def test_get_available_space_new(self):
|
||||
nss = NativeStorageServerWithVersion(
|
||||
{ "http://allmydata.org/tahoe/protocols/storage/v1":
|
||||
{ "maximum-immutable-share-size": 111,
|
||||
"available-space": 222,
|
||||
{ b"http://allmydata.org/tahoe/protocols/storage/v1":
|
||||
{ b"maximum-immutable-share-size": 111,
|
||||
b"available-space": 222,
|
||||
}
|
||||
})
|
||||
self.failUnlessEqual(nss.get_available_space(), 222)
|
||||
|
||||
def test_get_available_space_old(self):
|
||||
nss = NativeStorageServerWithVersion(
|
||||
{ "http://allmydata.org/tahoe/protocols/storage/v1":
|
||||
{ "maximum-immutable-share-size": 111,
|
||||
{ b"http://allmydata.org/tahoe/protocols/storage/v1":
|
||||
{ b"maximum-immutable-share-size": 111,
|
||||
}
|
||||
})
|
||||
self.failUnlessEqual(nss.get_available_space(), 111)
|
||||
|
@ -1072,7 +1072,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
d.addCallback(_do_upload)
|
||||
def _upload_done(results):
|
||||
theuri = results.get_uri()
|
||||
log.msg("upload finished: uri is %s" % (theuri,))
|
||||
log.msg("upload finished: uri is %r" % (theuri,))
|
||||
self.uri = theuri
|
||||
assert isinstance(self.uri, bytes), self.uri
|
||||
self.cap = uri.from_string(self.uri)
|
||||
@ -1324,9 +1324,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
s = stats["stats"]
|
||||
self.failUnlessEqual(s["storage_server.accepting_immutable_shares"], 1)
|
||||
c = stats["counters"]
|
||||
# Probably this should be Unicode eventually? But we haven't ported
|
||||
# stats code yet.
|
||||
self.failUnless(b"storage_server.allocate" in c)
|
||||
self.failUnless("storage_server.allocate" in c)
|
||||
d.addCallback(_grab_stats)
|
||||
|
||||
return d
|
||||
@ -1631,7 +1629,6 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
# the key, which should cause the download to fail the post-download
|
||||
# plaintext_hash check.
|
||||
|
||||
@skipIf(PY3, "Python 3 web support hasn't happened yet.")
|
||||
def test_filesystem(self):
|
||||
self.basedir = "system/SystemTest/test_filesystem"
|
||||
self.data = LARGE_DATA
|
||||
@ -1669,7 +1666,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
d.addCallback(self.log, "did _check_publish_private")
|
||||
d.addCallback(self._test_web)
|
||||
d.addCallback(self._test_control)
|
||||
d.addCallback(self._test_cli)
|
||||
if PY2:
|
||||
# TODO when CLI is ported to Python 3, reenable.
|
||||
d.addCallback(self._test_cli)
|
||||
# P now has four top-level children:
|
||||
# P/personal/sekrit data
|
||||
# P/s2-ro/
|
||||
@ -1923,9 +1922,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
if isinstance(value, tuple):
|
||||
filename, value = value
|
||||
form.append(b'Content-Disposition: form-data; name="%s"; '
|
||||
b'filename="%s"' % (name, filename.encode("utf-8")))
|
||||
b'filename="%s"' % (name.encode("utf-8"), filename.encode("utf-8")))
|
||||
else:
|
||||
form.append(b'Content-Disposition: form-data; name="%s"' % name)
|
||||
form.append(b'Content-Disposition: form-data; name="%s"' % name.encode("utf-8"))
|
||||
form.append(b'')
|
||||
form.append(b"%s" % (value,))
|
||||
form.append(sep)
|
||||
@ -1982,22 +1981,22 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
d.addCallback(self.log, "done with _got_subdir1")
|
||||
d.addCallback(lambda res: self.GET(public + "/subdir1/mydata567"))
|
||||
def _got_data(page):
|
||||
self.failUnlessEqual(page, self.data)
|
||||
self.failUnlessEqual(page.encode("utf-8"), self.data)
|
||||
d.addCallback(_got_data)
|
||||
|
||||
# download from a URI embedded in a URL
|
||||
d.addCallback(self.log, "_get_from_uri")
|
||||
def _get_from_uri(res):
|
||||
return self.GET("uri/%s?filename=%s" % (self.uri, "mydata567"))
|
||||
return self.GET("uri/%s?filename=%s" % (str(self.uri, "utf-8"), "mydata567"))
|
||||
d.addCallback(_get_from_uri)
|
||||
def _got_from_uri(page):
|
||||
self.failUnlessEqual(page, self.data)
|
||||
self.failUnlessEqual(page.encode("utf-8"), self.data)
|
||||
d.addCallback(_got_from_uri)
|
||||
|
||||
# download from a URI embedded in a URL, second form
|
||||
d.addCallback(self.log, "_get_from_uri2")
|
||||
def _get_from_uri2(res):
|
||||
return self.GET("uri?uri=%s" % (self.uri,))
|
||||
return self.GET("uri?uri=%s" % (str(self.uri, "utf-8"),))
|
||||
d.addCallback(_get_from_uri2)
|
||||
d.addCallback(_got_from_uri)
|
||||
|
||||
@ -2006,9 +2005,9 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
@defer.inlineCallbacks
|
||||
def _get_from_bogus_uri(res):
|
||||
d1 = self.GET("uri/%s?filename=%s"
|
||||
% (self.mangle_uri(self.uri), "mydata567"))
|
||||
% (str(self.mangle_uri(self.uri), "utf-8"), "mydata567"))
|
||||
e = yield self.assertFailure(d1, Error)
|
||||
self.assertEquals(e.status, "410")
|
||||
self.assertEquals(e.status, b"410")
|
||||
d.addCallback(_get_from_bogus_uri)
|
||||
d.addCallback(self.log, "_got_from_bogus_uri", level=log.UNUSUAL)
|
||||
|
||||
@ -2092,14 +2091,14 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
workdir = os.path.join(self.getdir("client0"), "helper")
|
||||
incfile = os.path.join(workdir, "CHK_incoming", "spurious")
|
||||
f = open(incfile, "wb")
|
||||
f.write("small file")
|
||||
f.write(b"small file")
|
||||
f.close()
|
||||
then = time.time() - 86400*3
|
||||
now = time.time()
|
||||
os.utime(incfile, (now, then))
|
||||
encfile = os.path.join(workdir, "CHK_encoding", "spurious")
|
||||
f = open(encfile, "wb")
|
||||
f.write("less small file")
|
||||
f.write(b"less small file")
|
||||
f.close()
|
||||
os.utime(encfile, (now, then))
|
||||
d.addCallback(_got_helper_status)
|
||||
@ -2140,7 +2139,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
d.addCallback(lambda res: self.GET("statistics"))
|
||||
def _got_stats(res):
|
||||
self.failUnlessIn("Operational Statistics", res)
|
||||
self.failUnlessIn(" 'downloader.files_downloaded': 5,", res)
|
||||
self.failUnlessIn(' "downloader.files_downloaded": 5,', res)
|
||||
d.addCallback(_got_stats)
|
||||
d.addCallback(lambda res: self.GET("statistics?t=json"))
|
||||
def _got_stats_json(res):
|
||||
@ -2348,7 +2347,7 @@ class SystemTest(SystemTestMixin, RunBinTahoeMixin, unittest.TestCase):
|
||||
for i in range(10):
|
||||
fn = os.path.join(self.basedir, "file%d" % i)
|
||||
files.append(fn)
|
||||
data = "data to be uploaded: file%d\n" % i
|
||||
data = b"data to be uploaded: file%d\n" % i
|
||||
datas.append(data)
|
||||
with open(fn, "wb") as f:
|
||||
f.write(data)
|
||||
|
@ -12,17 +12,18 @@ if PY2:
|
||||
|
||||
from twisted.trial import unittest
|
||||
from allmydata.web import status, common
|
||||
from allmydata.dirnode import ONLY_FILES
|
||||
from ..common import ShouldFailMixin
|
||||
from .. import common_util as testutil
|
||||
|
||||
class Util(ShouldFailMixin, testutil.ReallyEqualMixin, unittest.TestCase):
|
||||
|
||||
def test_parse_replace_arg(self):
|
||||
self.failUnlessReallyEqual(common.parse_replace_arg("true"), True)
|
||||
self.failUnlessReallyEqual(common.parse_replace_arg("false"), False)
|
||||
self.failUnlessReallyEqual(common.parse_replace_arg("only-files"),
|
||||
"only-files")
|
||||
self.failUnlessRaises(common.WebError, common.parse_replace_arg, "only_fles")
|
||||
self.failUnlessReallyEqual(common.parse_replace_arg(b"true"), True)
|
||||
self.failUnlessReallyEqual(common.parse_replace_arg(b"false"), False)
|
||||
self.failUnlessReallyEqual(common.parse_replace_arg(b"only-files"),
|
||||
ONLY_FILES)
|
||||
self.failUnlessRaises(common.WebError, common.parse_replace_arg, b"only_fles")
|
||||
|
||||
def test_abbreviate_time(self):
|
||||
self.failUnlessReallyEqual(common.abbreviate_time(None), "")
|
||||
|
@ -628,7 +628,7 @@ class WebMixin(TimezoneMixin):
|
||||
if response_substring:
|
||||
self.failUnlessIn(response_substring, res.value.response, which)
|
||||
else:
|
||||
self.fail("%s was supposed to raise %s, not get '%s'" %
|
||||
self.fail("%r was supposed to raise %s, not get %r" %
|
||||
(which, expected_failure, res))
|
||||
|
||||
def shouldFail2(self, expected_failure, which, substring,
|
||||
@ -642,7 +642,7 @@ class WebMixin(TimezoneMixin):
|
||||
res.trap(expected_failure)
|
||||
if substring:
|
||||
self.failUnlessIn(substring, str(res),
|
||||
"'%s' not in '%s' (response is '%s') for test '%s'" % \
|
||||
"%r not in %r (response is %r) for test %r" % \
|
||||
(substring, str(res),
|
||||
getattr(res.value, "response", ""),
|
||||
which))
|
||||
@ -651,11 +651,11 @@ class WebMixin(TimezoneMixin):
|
||||
if isinstance(response, bytes):
|
||||
response = str(response, "utf-8")
|
||||
self.failUnlessIn(response_substring, response,
|
||||
"'%s' not in '%s' for test '%s'" % \
|
||||
"%r not in %r for test %r" % \
|
||||
(response_substring, res.value.response,
|
||||
which))
|
||||
else:
|
||||
self.fail("%s was supposed to raise %s, not get '%s'" %
|
||||
self.fail("%r was supposed to raise %s, not get %r" %
|
||||
(which, expected_failure, res))
|
||||
d.addBoth(done)
|
||||
return d
|
||||
@ -1760,7 +1760,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
def test_PUT_NEWFILEURL_unlinked_bad_format(self):
|
||||
contents = self.NEWFILE_CONTENTS * 300000
|
||||
yield self.assertHTTPError(self.webish_url + "/uri?format=foo", 400,
|
||||
"Unknown format:",
|
||||
"Unknown format: foo",
|
||||
method="put", data=contents)
|
||||
|
||||
def test_PUT_NEWFILEURL_range_bad(self):
|
||||
@ -1813,7 +1813,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
|
||||
def test_PUT_NEWFILEURL_bad_t(self):
|
||||
d = self.shouldFail2(error.Error, "PUT_bad_t", "400 Bad Request",
|
||||
"PUT to a file: bad t=",
|
||||
"PUT to a file: bad t=bogus",
|
||||
self.PUT, self.public_url + "/foo/bar.txt?t=bogus",
|
||||
b"contents")
|
||||
return d
|
||||
@ -2344,7 +2344,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
def test_PUT_NEWDIRURL_bad_format(self):
|
||||
url = (self.webish_url + self.public_url +
|
||||
"/foo/newdir=?t=mkdir&format=foo")
|
||||
yield self.assertHTTPError(url, 400, "Unknown format:",
|
||||
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
||||
method="put", data="")
|
||||
|
||||
def test_POST_NEWDIRURL(self):
|
||||
@ -2377,7 +2377,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
def test_POST_NEWDIRURL_bad_format(self):
|
||||
url = (self.webish_url + self.public_url +
|
||||
"/foo/newdir?t=mkdir&format=foo")
|
||||
yield self.assertHTTPError(url, 400, "Unknown format:",
|
||||
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
||||
method="post", data="")
|
||||
|
||||
def test_POST_NEWDIRURL_emptyname(self):
|
||||
@ -2454,7 +2454,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
(newkids, caps) = self._create_initial_children()
|
||||
url = (self.webish_url + self.public_url +
|
||||
"/foo/newdir?t=mkdir-with-children&format=foo")
|
||||
yield self.assertHTTPError(url, 400, "Unknown format:",
|
||||
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
||||
method="post", data=json.dumps(newkids).encode("utf-8"))
|
||||
|
||||
def test_POST_NEWDIRURL_immutable(self):
|
||||
@ -2578,7 +2578,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
def test_PUT_NEWDIRURL_mkdirs_bad_format(self):
|
||||
url = (self.webish_url + self.public_url +
|
||||
"/foo/subdir/newdir?t=mkdir&format=foo")
|
||||
yield self.assertHTTPError(url, 400, "Unknown format:",
|
||||
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
||||
method="put", data="")
|
||||
|
||||
def test_DELETE_DIRURL(self):
|
||||
@ -2857,7 +2857,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
url = self.webish_url + "/uri?t=upload&format=foo"
|
||||
body, headers = self.build_form(file=("foo.txt", self.NEWFILE_CONTENTS * 300000))
|
||||
yield self.assertHTTPError(url, 400,
|
||||
"Unknown format:",
|
||||
"Unknown format: foo",
|
||||
method="post", data=body, headers=headers)
|
||||
|
||||
def test_POST_upload_format(self):
|
||||
@ -2892,7 +2892,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
def test_POST_upload_bad_format(self):
|
||||
url = self.webish_url + self.public_url + "/foo?t=upload&format=foo"
|
||||
body, headers = self.build_form(file=("foo.txt", self.NEWFILE_CONTENTS * 300000))
|
||||
yield self.assertHTTPError(url, 400, "Unknown format:",
|
||||
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
||||
method="post", data=body, headers=headers)
|
||||
|
||||
def test_POST_upload_mutable(self):
|
||||
@ -3388,7 +3388,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
def test_POST_mkdir_bad_format(self):
|
||||
url = (self.webish_url + self.public_url +
|
||||
"/foo?t=mkdir&name=newdir&format=foo")
|
||||
yield self.assertHTTPError(url, 400, "Unknown format:",
|
||||
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
||||
method="post")
|
||||
|
||||
def test_POST_mkdir_initial_children(self):
|
||||
@ -3440,7 +3440,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
(newkids, caps) = self._create_initial_children()
|
||||
url = (self.webish_url + self.public_url +
|
||||
"/foo?t=mkdir-with-children&name=newdir&format=foo")
|
||||
yield self.assertHTTPError(url, 400, "Unknown format:",
|
||||
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
||||
method="post", data=json.dumps(newkids).encode("utf-8"))
|
||||
|
||||
def test_POST_mkdir_immutable(self):
|
||||
@ -3519,7 +3519,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
@inlineCallbacks
|
||||
def test_POST_mkdir_no_parentdir_noredirect_bad_format(self):
|
||||
url = self.webish_url + self.public_url + "/uri?t=mkdir&format=foo"
|
||||
yield self.assertHTTPError(url, 400, "Unknown format:",
|
||||
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
||||
method="post")
|
||||
|
||||
def test_POST_mkdir_no_parentdir_noredirect2(self):
|
||||
@ -4462,7 +4462,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
def test_PUT_NEWFILEURL_bad_format(self):
|
||||
new_contents = self.NEWFILE_CONTENTS * 300000
|
||||
url = self.webish_url + self.public_url + "/foo/foo.txt?format=foo"
|
||||
yield self.assertHTTPError(url, 400, "Unknown format:",
|
||||
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
||||
method="put", data=new_contents)
|
||||
|
||||
def test_PUT_NEWFILEURL_uri_replace(self):
|
||||
@ -4595,7 +4595,7 @@ class Web(WebMixin, WebErrorMixin, testutil.StallMixin, testutil.ReallyEqualMixi
|
||||
@inlineCallbacks
|
||||
def test_PUT_mkdir_bad_format(self):
|
||||
url = self.webish_url + "/uri?t=mkdir&format=foo"
|
||||
yield self.assertHTTPError(url, 400, "Unknown format:",
|
||||
yield self.assertHTTPError(url, 400, "Unknown format: foo",
|
||||
method="put", data=b"")
|
||||
|
||||
def test_POST_check(self):
|
||||
|
@ -99,7 +99,7 @@ class CHKFileURI(_BaseURI):
|
||||
def init_from_string(cls, uri):
|
||||
mo = cls.STRING_RE.search(uri)
|
||||
if not mo:
|
||||
raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
|
||||
raise BadURIError("%r doesn't look like a %s cap" % (uri, cls))
|
||||
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)),
|
||||
int(mo.group(3)), int(mo.group(4)), int(mo.group(5)))
|
||||
|
||||
@ -243,7 +243,7 @@ class WriteableSSKFileURI(_BaseURI):
|
||||
def init_from_string(cls, uri):
|
||||
mo = cls.STRING_RE.search(uri)
|
||||
if not mo:
|
||||
raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
|
||||
raise BadURIError("%r doesn't look like a %s cap" % (uri, cls))
|
||||
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
def to_string(self):
|
||||
@ -253,7 +253,7 @@ class WriteableSSKFileURI(_BaseURI):
|
||||
base32.b2a(self.fingerprint))
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.abbrev())
|
||||
return "<%s %r>" % (self.__class__.__name__, self.abbrev())
|
||||
|
||||
def abbrev(self):
|
||||
return base32.b2a(self.writekey[:5])
|
||||
@ -290,7 +290,7 @@ class ReadonlySSKFileURI(_BaseURI):
|
||||
def init_from_string(cls, uri):
|
||||
mo = cls.STRING_RE.search(uri)
|
||||
if not mo:
|
||||
raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
|
||||
raise BadURIError("%r doesn't look like a %s cap" % (uri, cls))
|
||||
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
def to_string(self):
|
||||
@ -300,7 +300,7 @@ class ReadonlySSKFileURI(_BaseURI):
|
||||
base32.b2a(self.fingerprint))
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.abbrev())
|
||||
return "<%s %r>" % (self.__class__.__name__, self.abbrev())
|
||||
|
||||
def abbrev(self):
|
||||
return base32.b2a(self.readkey[:5])
|
||||
@ -336,7 +336,7 @@ class SSKVerifierURI(_BaseURI):
|
||||
def init_from_string(cls, uri):
|
||||
mo = cls.STRING_RE.search(uri)
|
||||
if not mo:
|
||||
raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
|
||||
raise BadURIError("%r doesn't look like a %s cap" % (uri, cls))
|
||||
return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
def to_string(self):
|
||||
@ -375,7 +375,7 @@ class WriteableMDMFFileURI(_BaseURI):
|
||||
def init_from_string(cls, uri):
|
||||
mo = cls.STRING_RE.search(uri)
|
||||
if not mo:
|
||||
raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
|
||||
raise BadURIError("%r doesn't look like a %s cap" % (uri, cls))
|
||||
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
def to_string(self):
|
||||
@ -386,7 +386,7 @@ class WriteableMDMFFileURI(_BaseURI):
|
||||
return ret
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.abbrev())
|
||||
return "<%s %r>" % (self.__class__.__name__, self.abbrev())
|
||||
|
||||
def abbrev(self):
|
||||
return base32.b2a(self.writekey[:5])
|
||||
@ -423,7 +423,7 @@ class ReadonlyMDMFFileURI(_BaseURI):
|
||||
def init_from_string(cls, uri):
|
||||
mo = cls.STRING_RE.search(uri)
|
||||
if not mo:
|
||||
raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
|
||||
raise BadURIError("%r doesn't look like a %s cap" % (uri, cls))
|
||||
|
||||
return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
@ -435,7 +435,7 @@ class ReadonlyMDMFFileURI(_BaseURI):
|
||||
return ret
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.abbrev())
|
||||
return "<%s %r>" % (self.__class__.__name__, self.abbrev())
|
||||
|
||||
def abbrev(self):
|
||||
return base32.b2a(self.readkey[:5])
|
||||
@ -471,7 +471,7 @@ class MDMFVerifierURI(_BaseURI):
|
||||
def init_from_string(cls, uri):
|
||||
mo = cls.STRING_RE.search(uri)
|
||||
if not mo:
|
||||
raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
|
||||
raise BadURIError("%r doesn't look like a %s cap" % (uri, cls))
|
||||
return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2)))
|
||||
|
||||
def to_string(self):
|
||||
@ -500,13 +500,13 @@ class _DirectoryBaseURI(_BaseURI):
|
||||
self._filenode_uri = filenode_uri
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.abbrev())
|
||||
return "<%s %r>" % (self.__class__.__name__, self.abbrev())
|
||||
|
||||
@classmethod
|
||||
def init_from_string(cls, uri):
|
||||
mo = cls.BASE_STRING_RE.search(uri)
|
||||
if not mo:
|
||||
raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
|
||||
raise BadURIError("%r doesn't look like a %s cap" % (uri, cls))
|
||||
bits = uri[mo.end():]
|
||||
fn = cls.INNER_URI_CLASS.init_from_string(
|
||||
cls.INNER_URI_CLASS.BASE_STRING+bits)
|
||||
|
@ -28,7 +28,9 @@ PORTED_MODULES = [
|
||||
"allmydata._auto_deps",
|
||||
"allmydata._monkeypatch",
|
||||
"allmydata.blacklist",
|
||||
"allmydata.check_results",
|
||||
"allmydata.codec",
|
||||
"allmydata.control",
|
||||
"allmydata.crypto",
|
||||
"allmydata.crypto.aes",
|
||||
"allmydata.crypto.ed25519",
|
||||
@ -39,6 +41,7 @@ PORTED_MODULES = [
|
||||
"allmydata.dirnode",
|
||||
"allmydata.frontends.sftpd",
|
||||
"allmydata.hashtree",
|
||||
"allmydata.history",
|
||||
"allmydata.immutable.checker",
|
||||
"allmydata.immutable.downloader",
|
||||
"allmydata.immutable.downloader.common",
|
||||
@ -117,7 +120,20 @@ PORTED_MODULES = [
|
||||
"allmydata.util.spans",
|
||||
"allmydata.util.statistics",
|
||||
"allmydata.util.time_format",
|
||||
"allmydata.web.check_results",
|
||||
"allmydata.web.common",
|
||||
"allmydata.web.directory",
|
||||
"allmydata.web.filenode",
|
||||
"allmydata.web.info",
|
||||
"allmydata.web.introweb",
|
||||
"allmydata.web.logs",
|
||||
"allmydata.web.operations",
|
||||
"allmydata.web.private",
|
||||
"allmydata.web.root",
|
||||
"allmydata.web.status",
|
||||
"allmydata.web.storage",
|
||||
"allmydata.web.storage_plugins",
|
||||
"allmydata.web.unlinked",
|
||||
"allmydata.webish",
|
||||
]
|
||||
|
||||
@ -182,9 +198,9 @@ PORTED_TEST_MODULES = [
|
||||
"allmydata.test.test_storage_client",
|
||||
"allmydata.test.test_storage_web",
|
||||
|
||||
# Only partially ported, test_filesystem_with_cli_in_subprocess and
|
||||
# test_filesystem methods aren't ported yet, should be done once CLI and
|
||||
# web are ported respectively.
|
||||
# Only partially ported, test_filesystem_with_cli_in_subprocess isn't
|
||||
# ported yet, nor is part of test_filesystem (the call to _test_cli). This
|
||||
# should be done once CLI is ported.
|
||||
"allmydata.test.test_system",
|
||||
|
||||
"allmydata.test.test_time_format",
|
||||
|
@ -25,6 +25,7 @@ else:
|
||||
def backwardscompat_bytes(b):
|
||||
return b
|
||||
maketrans = bytes.maketrans
|
||||
from typing import Optional
|
||||
|
||||
import base64
|
||||
|
||||
@ -71,7 +72,7 @@ BASE32STR_3bytes = backwardscompat_bytes(BASE32CHAR+b'{4}'+BASE32CHAR_4bits)
|
||||
BASE32STR_4bytes = backwardscompat_bytes(BASE32CHAR+b'{6}'+BASE32CHAR_2bits)
|
||||
BASE32STR_anybytes = backwardscompat_bytes(bytes(b'((?:%s{8})*') % (BASE32CHAR,) + bytes(b"(?:|%s|%s|%s|%s))") % (BASE32STR_1byte, BASE32STR_2bytes, BASE32STR_3bytes, BASE32STR_4bytes))
|
||||
|
||||
def b2a(os):
|
||||
def b2a(os): # type: (bytes) -> bytes
|
||||
"""
|
||||
@param os the data to be encoded (as bytes)
|
||||
|
||||
@ -79,9 +80,10 @@ def b2a(os):
|
||||
"""
|
||||
return base64.b32encode(os).rstrip(b"=").lower()
|
||||
|
||||
def b2a_or_none(os):
|
||||
def b2a_or_none(os): # type: (Optional[bytes]) -> Optional[bytes]
|
||||
if os is not None:
|
||||
return b2a(os)
|
||||
return None
|
||||
|
||||
# b2a() uses the minimal number of quintets sufficient to encode the binary
|
||||
# input. It just so happens that the relation is like this (everything is
|
||||
@ -129,7 +131,7 @@ def could_be_base32_encoded(s, s8=s8, tr=bytes.translate, identitytranstable=ide
|
||||
s = bytes(s) # On Python 2, make sure we're using modern bytes
|
||||
return s8[len(s)%8][s[-1]] and not tr(s, identitytranstable, chars)
|
||||
|
||||
def a2b(cs):
|
||||
def a2b(cs): # type: (bytes) -> bytes
|
||||
"""
|
||||
@param cs the base-32 encoded data (as bytes)
|
||||
"""
|
||||
|
@ -14,14 +14,19 @@ if PY2:
|
||||
|
||||
from past.builtins import long
|
||||
|
||||
try:
|
||||
from typing import Optional, Tuple, List # noqa: F401
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
def netstring(s):
|
||||
|
||||
def netstring(s): # type: (bytes) -> bytes
|
||||
assert isinstance(s, bytes), s # no unicode here
|
||||
return b"%d:%s," % (len(s), s,)
|
||||
|
||||
def split_netstring(data, numstrings,
|
||||
position=0,
|
||||
required_trailer=None):
|
||||
required_trailer=None): # type (bytes, init, int, Optional[bytes]) -> Tuple[List[bytes], int]
|
||||
"""like string.split(), but extracts netstrings. Ignore all bytes of data
|
||||
before the 'position' byte. Return a tuple of (list of elements (numstrings
|
||||
in length), new position index). The new position index points to the first
|
||||
|
@ -1,27 +0,0 @@
|
||||
"""
|
||||
Implement a work-around for <https://github.com/crossbario/autobahn-python/issues/1151>.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import (
|
||||
print_function,
|
||||
unicode_literals,
|
||||
absolute_import,
|
||||
division,
|
||||
)
|
||||
|
||||
|
||||
from autobahn.websocket.protocol import WebSocketProtocol
|
||||
_originalConnectionLost = WebSocketProtocol._connectionLost
|
||||
|
||||
def _connectionLost(self, reason):
|
||||
if self.openHandshakeTimeoutCall is not None:
|
||||
self.openHandshakeTimeoutCall.cancel()
|
||||
self.openHandshakeTimeoutCall = None
|
||||
return _originalConnectionLost(self, reason)
|
||||
|
||||
def patch():
|
||||
"""
|
||||
Monkey-patch the proposed fix into place.
|
||||
"""
|
||||
WebSocketProtocol._connectionLost = _connectionLost
|
@ -1,4 +1,14 @@
|
||||
from future.builtins import str
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time
|
||||
|
||||
@ -156,7 +166,10 @@ class ResultsBase(object):
|
||||
shares_on_server.add(s, shareid)
|
||||
shareid_s = ""
|
||||
if i == 0:
|
||||
shareid_s = str(shareid)
|
||||
if isinstance(shareid, bytes):
|
||||
shareid_s = str(shareid, "utf-8")
|
||||
else:
|
||||
shareid_s = str(shareid)
|
||||
d = tags.tr(tags.td(shareid_s),
|
||||
tags.td(tags.div(s.get_nickname(), class_="nickname"),
|
||||
tags.div(tags.tt(s.get_name()), class_="nodeid")))
|
||||
@ -207,12 +220,12 @@ class ResultsBase(object):
|
||||
return [html.escape(w) for w in s]
|
||||
|
||||
def _render_si_link(self, req, storage_index):
|
||||
si_s = base32.b2a(storage_index)
|
||||
ophandle = req.prepath[-1]
|
||||
si_s = str(base32.b2a(storage_index), "utf-8")
|
||||
ophandle = str(req.prepath[-1], "utf-8")
|
||||
target = "%s/operations/%s/%s" % (get_root(req), ophandle, si_s)
|
||||
output = get_arg(req, "output")
|
||||
if output:
|
||||
target = target + "?output=%s" % output
|
||||
target = target + "?output=" + str(output, "utf-8")
|
||||
return tags.a(si_s, href=target)
|
||||
|
||||
|
||||
|
@ -1,5 +1,22 @@
|
||||
from past.builtins import unicode
|
||||
from six import ensure_text, ensure_str
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import division
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
|
||||
from past.builtins import unicode as str # prevent leaking newbytes/newstr into code that can't handle it
|
||||
|
||||
from six import ensure_str
|
||||
|
||||
try:
|
||||
from typing import Optional, Union, Tuple, Any
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
import time
|
||||
import json
|
||||
@ -51,6 +68,7 @@ from twisted.web.resource import (
|
||||
IResource,
|
||||
)
|
||||
|
||||
from allmydata.dirnode import ONLY_FILES, _OnlyFiles
|
||||
from allmydata import blacklist
|
||||
from allmydata.interfaces import (
|
||||
EmptyPathnameComponentError,
|
||||
@ -74,11 +92,13 @@ from allmydata.util.encodingutil import (
|
||||
quote_output,
|
||||
to_bytes,
|
||||
)
|
||||
from allmydata.util import abbreviate
|
||||
|
||||
# Originally part of this module, so still part of its API:
|
||||
from .common_py3 import ( # noqa: F401
|
||||
get_arg, abbreviate_time, MultiFormatResource, WebError,
|
||||
)
|
||||
|
||||
class WebError(Exception):
|
||||
def __init__(self, text, code=http.BAD_REQUEST):
|
||||
self.text = text
|
||||
self.code = code
|
||||
|
||||
|
||||
def get_filenode_metadata(filenode):
|
||||
@ -98,17 +118,17 @@ def get_filenode_metadata(filenode):
|
||||
metadata['size'] = size
|
||||
return metadata
|
||||
|
||||
def boolean_of_arg(arg):
|
||||
# TODO: ""
|
||||
arg = ensure_text(arg)
|
||||
if arg.lower() not in ("true", "t", "1", "false", "f", "0", "on", "off"):
|
||||
def boolean_of_arg(arg): # type: (bytes) -> bool
|
||||
assert isinstance(arg, bytes)
|
||||
if arg.lower() not in (b"true", b"t", b"1", b"false", b"f", b"0", b"on", b"off"):
|
||||
raise WebError("invalid boolean argument: %r" % (arg,), http.BAD_REQUEST)
|
||||
return arg.lower() in ("true", "t", "1", "on")
|
||||
return arg.lower() in (b"true", b"t", b"1", b"on")
|
||||
|
||||
def parse_replace_arg(replace):
|
||||
replace = ensure_text(replace)
|
||||
if replace.lower() == "only-files":
|
||||
return replace
|
||||
|
||||
def parse_replace_arg(replace): # type: (bytes) -> Union[bool,_OnlyFiles]
|
||||
assert isinstance(replace, bytes)
|
||||
if replace.lower() == b"only-files":
|
||||
return ONLY_FILES
|
||||
try:
|
||||
return boolean_of_arg(replace)
|
||||
except WebError:
|
||||
@ -128,7 +148,7 @@ def get_format(req, default="CHK"):
|
||||
elif arg.upper() == b"MDMF":
|
||||
return "MDMF"
|
||||
else:
|
||||
raise WebError("Unknown format: %s, I know CHK, SDMF, MDMF" % arg,
|
||||
raise WebError("Unknown format: %s, I know CHK, SDMF, MDMF" % str(arg, "ascii"),
|
||||
http.BAD_REQUEST)
|
||||
|
||||
def get_mutable_type(file_format): # accepts result of get_format()
|
||||
@ -145,19 +165,19 @@ def get_mutable_type(file_format): # accepts result of get_format()
|
||||
return None
|
||||
|
||||
|
||||
def parse_offset_arg(offset):
|
||||
def parse_offset_arg(offset): # type: (bytes) -> Union[int,None]
|
||||
# XXX: This will raise a ValueError when invoked on something that
|
||||
# is not an integer. Is that okay? Or do we want a better error
|
||||
# message? Since this call is going to be used by programmers and
|
||||
# their tools rather than users (through the wui), it is not
|
||||
# inconsistent to return that, I guess.
|
||||
if offset is not None:
|
||||
offset = int(offset)
|
||||
return int(offset)
|
||||
|
||||
return offset
|
||||
|
||||
|
||||
def get_root(req):
|
||||
def get_root(req): # type: (IRequest) -> str
|
||||
"""
|
||||
Get a relative path with parent directory segments that refers to the root
|
||||
location known to the given request. This seems a lot like the constant
|
||||
@ -186,8 +206,8 @@ def convert_children_json(nodemaker, children_json):
|
||||
children = {}
|
||||
if children_json:
|
||||
data = json.loads(children_json)
|
||||
for (namex, (ctype, propdict)) in data.items():
|
||||
namex = unicode(namex)
|
||||
for (namex, (ctype, propdict)) in list(data.items()):
|
||||
namex = str(namex)
|
||||
writecap = to_bytes(propdict.get("rw_uri"))
|
||||
readcap = to_bytes(propdict.get("ro_uri"))
|
||||
metadata = propdict.get("metadata", {})
|
||||
@ -208,7 +228,8 @@ def compute_rate(bytes, seconds):
|
||||
assert bytes > -1
|
||||
assert seconds > 0
|
||||
|
||||
return 1.0 * bytes / seconds
|
||||
return bytes / seconds
|
||||
|
||||
|
||||
def abbreviate_rate(data):
|
||||
"""
|
||||
@ -229,6 +250,7 @@ def abbreviate_rate(data):
|
||||
return u"%.1fkBps" % (r/1000)
|
||||
return u"%.0fBps" % r
|
||||
|
||||
|
||||
def abbreviate_size(data):
|
||||
"""
|
||||
Convert number of bytes into human readable strings (unicode).
|
||||
@ -265,7 +287,7 @@ def text_plain(text, req):
|
||||
return text
|
||||
|
||||
def spaces_to_nbsp(text):
|
||||
return unicode(text).replace(u' ', u'\u00A0')
|
||||
return str(text).replace(u' ', u'\u00A0')
|
||||
|
||||
def render_time_delta(time_1, time_2):
|
||||
return spaces_to_nbsp(format_delta(time_1, time_2))
|
||||
@ -283,7 +305,7 @@ def render_time_attr(t):
|
||||
# actual exception). The latter is growing increasingly annoying.
|
||||
|
||||
def should_create_intermediate_directories(req):
|
||||
t = unicode(get_arg(req, "t", "").strip(), "ascii")
|
||||
t = str(get_arg(req, "t", "").strip(), "ascii")
|
||||
return bool(req.method in (b"PUT", b"POST") and
|
||||
t not in ("delete", "rename", "rename-form", "check"))
|
||||
|
||||
@ -565,7 +587,7 @@ def _finish(result, render, request):
|
||||
resource=fullyQualifiedName(type(result)),
|
||||
)
|
||||
result.render(request)
|
||||
elif isinstance(result, unicode):
|
||||
elif isinstance(result, str):
|
||||
Message.log(
|
||||
message_type=u"allmydata:web:common-render:unicode",
|
||||
)
|
||||
@ -647,7 +669,7 @@ def _renderHTTP_exception(request, failure):
|
||||
def _renderHTTP_exception_simple(request, text, code):
|
||||
request.setResponseCode(code)
|
||||
request.setHeader("content-type", "text/plain;charset=utf-8")
|
||||
if isinstance(text, unicode):
|
||||
if isinstance(text, str):
|
||||
text = text.encode("utf-8")
|
||||
request.setHeader("content-length", b"%d" % len(text))
|
||||
return text
|
||||
@ -689,3 +711,124 @@ def url_for_string(req, url_string):
|
||||
port=port,
|
||||
)
|
||||
return url
|
||||
|
||||
|
||||
def get_arg(req, argname, default=None, multiple=False): # type: (IRequest, Union[bytes,str], Any, bool) -> Union[bytes,Tuple[bytes],Any]
|
||||
"""Extract an argument from either the query args (req.args) or the form
|
||||
body fields (req.fields). If multiple=False, this returns a single value
|
||||
(or the default, which defaults to None), and the query args take
|
||||
precedence. If multiple=True, this returns a tuple of arguments (possibly
|
||||
empty), starting with all those in the query args.
|
||||
|
||||
:param TahoeLAFSRequest req: The request to consider.
|
||||
|
||||
:return: Either bytes or tuple of bytes.
|
||||
"""
|
||||
if isinstance(argname, str):
|
||||
argname = argname.encode("utf-8")
|
||||
if isinstance(default, str):
|
||||
default = default.encode("utf-8")
|
||||
results = []
|
||||
if argname in req.args:
|
||||
results.extend(req.args[argname])
|
||||
argname_unicode = str(argname, "utf-8")
|
||||
if req.fields and argname_unicode in req.fields:
|
||||
value = req.fields[argname_unicode].value
|
||||
if isinstance(value, str):
|
||||
value = value.encode("utf-8")
|
||||
results.append(value)
|
||||
if multiple:
|
||||
return tuple(results)
|
||||
if results:
|
||||
return results[0]
|
||||
return default
|
||||
|
||||
|
||||
class MultiFormatResource(resource.Resource, object):
|
||||
"""
|
||||
``MultiFormatResource`` is a ``resource.Resource`` that can be rendered in
|
||||
a number of different formats.
|
||||
|
||||
Rendered format is controlled by a query argument (given by
|
||||
``self.formatArgument``). Different resources may support different
|
||||
formats but ``json`` is a pretty common one. ``html`` is the default
|
||||
format if nothing else is given as the ``formatDefault``.
|
||||
"""
|
||||
formatArgument = "t"
|
||||
formatDefault = None # type: Optional[str]
|
||||
|
||||
def render(self, req):
|
||||
"""
|
||||
Dispatch to a renderer for a particular format, as selected by a query
|
||||
argument.
|
||||
|
||||
A renderer for the format given by the query argument matching
|
||||
``formatArgument`` will be selected and invoked. render_HTML will be
|
||||
used as a default if no format is selected (either by query arguments
|
||||
or by ``formatDefault``).
|
||||
|
||||
:return: The result of the selected renderer.
|
||||
"""
|
||||
t = get_arg(req, self.formatArgument, self.formatDefault)
|
||||
# It's either bytes or None.
|
||||
if isinstance(t, bytes):
|
||||
t = str(t, "ascii")
|
||||
renderer = self._get_renderer(t)
|
||||
result = renderer(req)
|
||||
# On Python 3, json.dumps() returns Unicode for example, but
|
||||
# twisted.web expects bytes. Instead of updating every single render
|
||||
# method, just handle Unicode one time here.
|
||||
if isinstance(result, str):
|
||||
result = result.encode("utf-8")
|
||||
return result
|
||||
|
||||
def _get_renderer(self, fmt):
|
||||
"""
|
||||
Get the renderer for the indicated format.
|
||||
|
||||
:param str fmt: The format. If a method with a prefix of ``render_``
|
||||
and a suffix of this format (upper-cased) is found, it will be
|
||||
used.
|
||||
|
||||
:return: A callable which takes a twisted.web Request and renders a
|
||||
response.
|
||||
"""
|
||||
renderer = None
|
||||
|
||||
if fmt is not None:
|
||||
try:
|
||||
renderer = getattr(self, "render_{}".format(fmt.upper()))
|
||||
except AttributeError:
|
||||
return resource.ErrorPage(
|
||||
http.BAD_REQUEST,
|
||||
"Bad Format",
|
||||
"Unknown {} value: {!r}".format(self.formatArgument, fmt),
|
||||
).render
|
||||
|
||||
if renderer is None:
|
||||
renderer = self.render_HTML
|
||||
|
||||
return renderer
|
||||
|
||||
|
||||
def abbreviate_time(data):
|
||||
"""
|
||||
Convert number of seconds into human readable string.
|
||||
|
||||
:param data: Either ``None`` or integer or float, seconds.
|
||||
|
||||
:return: Unicode string.
|
||||
"""
|
||||
# 1.23s, 790ms, 132us
|
||||
if data is None:
|
||||
return u""
|
||||
s = float(data)
|
||||
if s >= 10:
|
||||
return abbreviate.abbreviate_time(data)
|
||||
if s >= 1.0:
|
||||
return u"%.2fs" % s
|
||||
if s >= 0.01:
|
||||
return u"%.0fms" % (1000*s)
|
||||
if s >= 0.001:
|
||||
return u"%.1fms" % (1000*s)
|
||||
return u"%.0fus" % (1000000*s)
|
||||
|
@ -1,143 +0,0 @@
|
||||
"""
|
||||
Common utilities that are available from Python 3.
|
||||
|
||||
Can eventually be merged back into allmydata.web.common.
|
||||
"""
|
||||
|
||||
from past.builtins import unicode
|
||||
|
||||
try:
|
||||
from typing import Optional
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from twisted.web import resource, http
|
||||
|
||||
from allmydata.util import abbreviate
|
||||
|
||||
|
||||
class WebError(Exception):
|
||||
def __init__(self, text, code=http.BAD_REQUEST):
|
||||
self.text = text
|
||||
self.code = code
|
||||
|
||||
|
||||
def get_arg(req, argname, default=None, multiple=False):
|
||||
"""Extract an argument from either the query args (req.args) or the form
|
||||
body fields (req.fields). If multiple=False, this returns a single value
|
||||
(or the default, which defaults to None), and the query args take
|
||||
precedence. If multiple=True, this returns a tuple of arguments (possibly
|
||||
empty), starting with all those in the query args.
|
||||
|
||||
:param TahoeLAFSRequest req: The request to consider.
|
||||
|
||||
:return: Either bytes or tuple of bytes.
|
||||
"""
|
||||
if isinstance(argname, unicode):
|
||||
argname = argname.encode("utf-8")
|
||||
if isinstance(default, unicode):
|
||||
default = default.encode("utf-8")
|
||||
results = []
|
||||
if argname in req.args:
|
||||
results.extend(req.args[argname])
|
||||
argname_unicode = unicode(argname, "utf-8")
|
||||
if req.fields and argname_unicode in req.fields:
|
||||
value = req.fields[argname_unicode].value
|
||||
if isinstance(value, unicode):
|
||||
value = value.encode("utf-8")
|
||||
results.append(value)
|
||||
if multiple:
|
||||
return tuple(results)
|
||||
if results:
|
||||
return results[0]
|
||||
return default
|
||||
|
||||
|
||||
class MultiFormatResource(resource.Resource, object):
|
||||
"""
|
||||
``MultiFormatResource`` is a ``resource.Resource`` that can be rendered in
|
||||
a number of different formats.
|
||||
|
||||
Rendered format is controlled by a query argument (given by
|
||||
``self.formatArgument``). Different resources may support different
|
||||
formats but ``json`` is a pretty common one. ``html`` is the default
|
||||
format if nothing else is given as the ``formatDefault``.
|
||||
"""
|
||||
formatArgument = "t"
|
||||
formatDefault = None # type: Optional[str]
|
||||
|
||||
def render(self, req):
|
||||
"""
|
||||
Dispatch to a renderer for a particular format, as selected by a query
|
||||
argument.
|
||||
|
||||
A renderer for the format given by the query argument matching
|
||||
``formatArgument`` will be selected and invoked. render_HTML will be
|
||||
used as a default if no format is selected (either by query arguments
|
||||
or by ``formatDefault``).
|
||||
|
||||
:return: The result of the selected renderer.
|
||||
"""
|
||||
t = get_arg(req, self.formatArgument, self.formatDefault)
|
||||
# It's either bytes or None.
|
||||
if isinstance(t, bytes):
|
||||
t = unicode(t, "ascii")
|
||||
renderer = self._get_renderer(t)
|
||||
result = renderer(req)
|
||||
# On Python 3, json.dumps() returns Unicode for example, but
|
||||
# twisted.web expects bytes. Instead of updating every single render
|
||||
# method, just handle Unicode one time here.
|
||||
if isinstance(result, unicode):
|
||||
result = result.encode("utf-8")
|
||||
return result
|
||||
|
||||
def _get_renderer(self, fmt):
|
||||
"""
|
||||
Get the renderer for the indicated format.
|
||||
|
||||
:param str fmt: The format. If a method with a prefix of ``render_``
|
||||
and a suffix of this format (upper-cased) is found, it will be
|
||||
used.
|
||||
|
||||
:return: A callable which takes a twisted.web Request and renders a
|
||||
response.
|
||||
"""
|
||||
renderer = None
|
||||
|
||||
if fmt is not None:
|
||||
try:
|
||||
renderer = getattr(self, "render_{}".format(fmt.upper()))
|
||||
except AttributeError:
|
||||
return resource.ErrorPage(
|
||||
http.BAD_REQUEST,
|
||||
"Bad Format",
|
||||
"Unknown {} value: {!r}".format(self.formatArgument, fmt),
|
||||
).render
|
||||
|
||||
if renderer is None:
|
||||
renderer = self.render_HTML
|
||||
|
||||
return renderer
|
||||
|
||||
|
||||
def abbreviate_time(data):
|
||||
"""
|
||||
Convert number of seconds into human readable string.
|
||||
|
||||
:param data: Either ``None`` or integer or float, seconds.
|
||||
|
||||
:return: Unicode string.
|
||||
"""
|
||||
# 1.23s, 790ms, 132us
|
||||
if data is None:
|
||||
return u""
|
||||
s = float(data)
|
||||
if s >= 10:
|
||||
return abbreviate.abbreviate_time(data)
|
||||
if s >= 1.0:
|
||||
return u"%.2fs" % s
|
||||
if s >= 0.01:
|
||||
return u"%.0fms" % (1000*s)
|
||||
if s >= 0.001:
|
||||
return u"%.1fms" % (1000*s)
|
||||
return u"%.0fus" % (1000000*s)
|
@ -1,12 +1,16 @@
|
||||
"""
|
||||
TODO: When porting to Python 3, the filename handling logic seems wrong. On
|
||||
Python 3 filename will _already_ be correctly decoded. So only decode if it's
|
||||
bytes.
|
||||
|
||||
Also there's a lot of code duplication I think.
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from past.builtins import unicode
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
|
||||
# Don't use Future's str so that we don't get leaks into bad byte formatting
|
||||
from past.builtins import unicode as str
|
||||
|
||||
from urllib.parse import quote as url_quote
|
||||
from datetime import timedelta
|
||||
@ -143,7 +147,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
terminal = (req.prepath + req.postpath)[-1].decode('utf8') == name
|
||||
nonterminal = not terminal #len(req.postpath) > 0
|
||||
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
t = str(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
if isinstance(node_or_failure, Failure):
|
||||
f = node_or_failure
|
||||
f.trap(NoSuchChildError)
|
||||
@ -225,7 +229,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
@render_exception
|
||||
def render_GET(self, req):
|
||||
# This is where all of the directory-related ?t=* code goes.
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
t = str(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
|
||||
# t=info contains variable ophandles, t=rename-form contains the name
|
||||
# of the child being renamed. Neither is allowed an ETag.
|
||||
@ -263,7 +267,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
|
||||
@render_exception
|
||||
def render_PUT(self, req):
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
t = str(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
|
||||
if t == "mkdir":
|
||||
@ -283,7 +287,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
|
||||
@render_exception
|
||||
def render_POST(self, req):
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
t = str(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
|
||||
if t == "mkdir":
|
||||
d = self._POST_mkdir(req)
|
||||
@ -372,11 +376,17 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
return d
|
||||
|
||||
def _POST_upload(self, req):
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "utf-8")
|
||||
charset = str(get_arg(req, "_charset", b"utf-8"), "utf-8")
|
||||
contents = req.fields["file"]
|
||||
assert contents.filename is None or isinstance(contents.filename, str)
|
||||
name = get_arg(req, "name")
|
||||
name = name or contents.filename
|
||||
|
||||
# The filename embedded in the MIME file upload will be bytes on Python
|
||||
# 2, Unicode on Python 3, or missing (i.e. None). The "name" field in
|
||||
# the upload will be bytes on Python 2, Unicode on Python 3, or missing
|
||||
# (i.e. None). We go through all these variations until we have a name
|
||||
# that is Unicode.
|
||||
assert contents.filename is None or isinstance(contents.filename, (bytes, str))
|
||||
name = get_arg(req, "name") # returns bytes or None
|
||||
name = name or contents.filename # unicode, bytes or None
|
||||
if name is not None:
|
||||
name = name.strip()
|
||||
if not name:
|
||||
@ -384,9 +394,9 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
raise WebError("upload requires a name")
|
||||
if isinstance(name, bytes):
|
||||
name = name.decode(charset)
|
||||
assert isinstance(name, str)
|
||||
if "/" in name:
|
||||
raise WebError("name= may not contain a slash", http.BAD_REQUEST)
|
||||
assert isinstance(name, unicode)
|
||||
|
||||
# since POST /uri/path/file?t=upload is equivalent to
|
||||
# POST /uri/path/dir?t=upload&name=foo, just do the same thing that
|
||||
@ -421,7 +431,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
name = get_arg(req, "name")
|
||||
if not name:
|
||||
raise WebError("set-uri requires a name")
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
charset = str(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
name = name.decode(charset)
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
|
||||
@ -445,7 +455,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
# without a name= field. For our own HTML this isn't a big
|
||||
# deal, because we create the 'unlink' POST buttons ourselves.
|
||||
name = b''
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
charset = str(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
name = name.decode(charset)
|
||||
d = self.node.delete(name)
|
||||
d.addCallback(lambda res: "thing unlinked")
|
||||
@ -461,14 +471,14 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
return self._POST_relink(req)
|
||||
|
||||
def _POST_relink(self, req):
|
||||
charset = unicode(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
charset = str(get_arg(req, "_charset", b"utf-8"), "ascii")
|
||||
replace = parse_replace_arg(get_arg(req, "replace", "true"))
|
||||
|
||||
from_name = get_arg(req, "from_name")
|
||||
if from_name is not None:
|
||||
from_name = from_name.strip()
|
||||
from_name = from_name.decode(charset)
|
||||
assert isinstance(from_name, unicode)
|
||||
assert isinstance(from_name, str)
|
||||
else:
|
||||
raise WebError("from_name= is required")
|
||||
|
||||
@ -476,7 +486,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
if to_name is not None:
|
||||
to_name = to_name.strip()
|
||||
to_name = to_name.decode(charset)
|
||||
assert isinstance(to_name, unicode)
|
||||
assert isinstance(to_name, str)
|
||||
else:
|
||||
to_name = from_name
|
||||
|
||||
@ -493,7 +503,7 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
if to_dir is not None and to_dir != self.node.get_write_uri():
|
||||
to_dir = to_dir.strip()
|
||||
to_dir = to_dir.decode(charset)
|
||||
assert isinstance(to_dir, unicode)
|
||||
assert isinstance(to_dir, str)
|
||||
to_path = to_dir.split(u"/")
|
||||
to_root = self.client.nodemaker.create_from_cap(to_bytes(to_path[0]))
|
||||
if not IDirectoryNode.providedBy(to_root):
|
||||
@ -632,8 +642,8 @@ class DirectoryNodeHandler(ReplaceMeMixin, Resource, object):
|
||||
# TODO test handling of bad JSON
|
||||
raise
|
||||
cs = {}
|
||||
for name, (file_or_dir, mddict) in children.items():
|
||||
name = unicode(name) # json returns str *or* unicode
|
||||
for name, (file_or_dir, mddict) in list(children.items()):
|
||||
name = str(name) # json returns str *or* unicode
|
||||
writecap = mddict.get('rw_uri')
|
||||
if writecap is not None:
|
||||
writecap = writecap.encode("utf-8")
|
||||
@ -705,7 +715,7 @@ class DirectoryAsHTML(Element):
|
||||
|
||||
@renderer
|
||||
def title(self, req, tag):
|
||||
si_s = abbreviated_dirnode(self.node)
|
||||
si_s = str(abbreviated_dirnode(self.node), "utf-8")
|
||||
header = ["Tahoe-LAFS - Directory SI=%s" % si_s]
|
||||
if self.node.is_unknown():
|
||||
header.append(" (unknown)")
|
||||
@ -719,7 +729,7 @@ class DirectoryAsHTML(Element):
|
||||
|
||||
@renderer
|
||||
def header(self, req, tag):
|
||||
si_s = abbreviated_dirnode(self.node)
|
||||
si_s = str(abbreviated_dirnode(self.node), "utf-8")
|
||||
header = ["Tahoe-LAFS Directory SI=", tags.span(si_s, class_="data-chars")]
|
||||
if self.node.is_unknown():
|
||||
header.append(" (unknown)")
|
||||
@ -1013,7 +1023,7 @@ def _directory_json_metadata(req, dirnode):
|
||||
d = dirnode.list()
|
||||
def _got(children):
|
||||
kids = {}
|
||||
for name, (childnode, metadata) in children.items():
|
||||
for name, (childnode, metadata) in list(children.items()):
|
||||
assert IFilesystemNode.providedBy(childnode), childnode
|
||||
rw_uri = childnode.get_write_uri()
|
||||
ro_uri = childnode.get_readonly_uri()
|
||||
@ -1077,13 +1087,13 @@ class RenameForm(Element, object):
|
||||
|
||||
@renderer
|
||||
def title(self, req, tag):
|
||||
return tag("Directory SI={}".format(abbreviated_dirnode(self.original)))
|
||||
return tag("Directory SI={}".format(str(abbreviated_dirnode(self.original), "ascii")))
|
||||
|
||||
@renderer
|
||||
def header(self, req, tag):
|
||||
header = [
|
||||
"Rename "
|
||||
"in directory SI=%s" % abbreviated_dirnode(self.original),
|
||||
"in directory SI=%s" % str(abbreviated_dirnode(self.original), "ascii"),
|
||||
]
|
||||
|
||||
if self.original.is_readonly():
|
||||
@ -1194,7 +1204,7 @@ class ManifestElement(ReloadableMonitorElement):
|
||||
si = self.monitor.origin_si
|
||||
if not si:
|
||||
return "<LIT>"
|
||||
return base32.b2a(si)[:6]
|
||||
return str(base32.b2a(si)[:6], "utf-8")
|
||||
|
||||
@renderer
|
||||
def title(self, req, tag):
|
||||
@ -1472,7 +1482,7 @@ class UnknownNodeHandler(Resource, object):
|
||||
|
||||
@render_exception
|
||||
def render_GET(self, req):
|
||||
t = unicode(get_arg(req, "t", "").strip(), "ascii")
|
||||
t = str(get_arg(req, "t", "").strip(), "ascii")
|
||||
if t == "info":
|
||||
return MoreInfo(self.node)
|
||||
if t == "json":
|
||||
|
@ -1,4 +1,18 @@
|
||||
from past.builtins import unicode, long
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, max, min # noqa: F401
|
||||
# Use native unicode() as str() to prevent leaking futurebytes in ways that
|
||||
# break string formattin.
|
||||
from past.builtins import unicode as str
|
||||
from past.builtins import long
|
||||
|
||||
from twisted.web import http, static
|
||||
from twisted.internet import defer
|
||||
@ -130,7 +144,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
|
||||
if t == b"uri":
|
||||
return self.replace_me_with_a_childcap(req, self.client, replace)
|
||||
|
||||
raise WebError("PUT to a file: bad t=%s" % t)
|
||||
raise WebError("PUT to a file: bad t=%s" % str(t, "utf-8"))
|
||||
|
||||
@render_exception
|
||||
def render_POST(self, req):
|
||||
@ -147,7 +161,7 @@ class PlaceHolderNodeHandler(Resource, ReplaceMeMixin):
|
||||
# t=mkdir is handled in DirectoryNodeHandler._POST_mkdir, so
|
||||
# there are no other t= values left to be handled by the
|
||||
# placeholder.
|
||||
raise WebError("POST to a file: bad t=%s" % t)
|
||||
raise WebError("POST to a file: bad t=%s" % str(t, "utf-8"))
|
||||
|
||||
return handle_when_done(req, d)
|
||||
|
||||
@ -180,7 +194,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||
|
||||
@render_exception
|
||||
def render_GET(self, req):
|
||||
t = unicode(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
t = str(get_arg(req, b"t", b"").strip(), "ascii")
|
||||
|
||||
# t=info contains variable ophandles, so is not allowed an ETag.
|
||||
FIXED_OUTPUT_TYPES = ["", "json", "uri", "readonly-uri"]
|
||||
@ -287,7 +301,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||
assert self.parentnode and self.name
|
||||
return self.replace_me_with_a_childcap(req, self.client, replace)
|
||||
|
||||
raise WebError("PUT to a file: bad t=%s" % t)
|
||||
raise WebError("PUT to a file: bad t=%s" % str(t, "utf-8"))
|
||||
|
||||
@render_exception
|
||||
def render_POST(self, req):
|
||||
@ -309,7 +323,7 @@ class FileNodeHandler(Resource, ReplaceMeMixin, object):
|
||||
assert self.parentnode and self.name
|
||||
d = self.replace_me_with_a_formpost(req, self.client, replace)
|
||||
else:
|
||||
raise WebError("POST to file: bad t=%s" % unicode(t, "ascii"))
|
||||
raise WebError("POST to file: bad t=%s" % str(t, "ascii"))
|
||||
|
||||
return handle_when_done(req, d)
|
||||
|
||||
@ -374,7 +388,7 @@ class FileDownloader(Resource, object):
|
||||
self.filenode = filenode
|
||||
self.filename = filename
|
||||
|
||||
def parse_range_header(self, range):
|
||||
def parse_range_header(self, range_header):
|
||||
# Parse a byte ranges according to RFC 2616 "14.35.1 Byte
|
||||
# Ranges". Returns None if the range doesn't make sense so it
|
||||
# can be ignored (per the spec). When successful, returns a
|
||||
@ -385,7 +399,7 @@ class FileDownloader(Resource, object):
|
||||
|
||||
try:
|
||||
# byte-ranges-specifier
|
||||
units, rangeset = range.split('=', 1)
|
||||
units, rangeset = range_header.split('=', 1)
|
||||
if units != 'bytes':
|
||||
return None # nothing else supported
|
||||
|
||||
|
@ -1,3 +1,14 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
from urllib.parse import quote as urlquote
|
||||
@ -46,7 +57,7 @@ class MoreInfoElement(Element):
|
||||
|
||||
def abbrev(self, storage_index_or_none):
|
||||
if storage_index_or_none:
|
||||
return base32.b2a(storage_index_or_none)[:6]
|
||||
return str(base32.b2a(storage_index_or_none)[:6], "ascii")
|
||||
return "LIT file"
|
||||
|
||||
def get_type(self):
|
||||
|
@ -1,3 +1,14 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time, os
|
||||
from pkg_resources import resource_filename
|
||||
|
@ -21,11 +21,6 @@ from twisted.web.resource import (
|
||||
Resource,
|
||||
)
|
||||
|
||||
# Hotfix work-around https://github.com/crossbario/autobahn-python/issues/1151
|
||||
from . import _autobahn_1151
|
||||
_autobahn_1151.patch()
|
||||
del _autobahn_1151
|
||||
|
||||
|
||||
class TokenAuthenticatedWebSocketServerProtocol(WebSocketServerProtocol):
|
||||
"""
|
||||
|
@ -1,4 +1,14 @@
|
||||
from past.builtins import unicode
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time
|
||||
from hyperlink import (
|
||||
@ -91,7 +101,7 @@ class OphandleTable(resource.Resource, service.Service):
|
||||
"""
|
||||
ophandle = get_arg(req, "ophandle").decode("utf-8")
|
||||
assert ophandle
|
||||
here = DecodedURL.from_text(unicode(URLPath.fromRequest(req)))
|
||||
here = DecodedURL.from_text(str(URLPath.fromRequest(req)))
|
||||
target = here.click(u"/").child(u"operations", ophandle)
|
||||
output = get_arg(req, "output")
|
||||
if output:
|
||||
@ -102,7 +112,7 @@ class OphandleTable(resource.Resource, service.Service):
|
||||
def getChild(self, name, req):
|
||||
ophandle = name
|
||||
if ophandle not in self.handles:
|
||||
raise WebError("unknown/expired handle '%s'" % escape(unicode(ophandle, "utf-8")),
|
||||
raise WebError("unknown/expired handle '%s'" % escape(str(ophandle, "utf-8")),
|
||||
NOT_FOUND)
|
||||
(monitor, renderer, when_added) = self.handles[ophandle]
|
||||
|
||||
@ -152,7 +162,7 @@ class ReloadMixin(object):
|
||||
@renderer
|
||||
def refresh(self, req, tag):
|
||||
if self.monitor.is_finished():
|
||||
return b""
|
||||
return ""
|
||||
tag.attributes["http-equiv"] = "refresh"
|
||||
tag.attributes["content"] = str(self.REFRESH_TIME)
|
||||
return tag
|
||||
|
@ -1,10 +1,14 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from __future__ import (
|
||||
print_function,
|
||||
unicode_literals,
|
||||
absolute_import,
|
||||
division,
|
||||
)
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import attr
|
||||
|
||||
|
@ -1,5 +1,14 @@
|
||||
from future.utils import PY3
|
||||
from past.builtins import unicode
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2, PY3
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
import time
|
||||
@ -98,7 +107,7 @@ class URIHandler(resource.Resource, object):
|
||||
either "PUT /uri" to create an unlinked file, or
|
||||
"PUT /uri?t=mkdir" to create an unlinked directory
|
||||
"""
|
||||
t = unicode(get_arg(req, "t", "").strip(), "utf-8")
|
||||
t = str(get_arg(req, "t", "").strip(), "utf-8")
|
||||
if t == "":
|
||||
file_format = get_format(req, "CHK")
|
||||
mutable_type = get_mutable_type(file_format)
|
||||
@ -121,7 +130,7 @@ class URIHandler(resource.Resource, object):
|
||||
unlinked file or "POST /uri?t=mkdir" to create a
|
||||
new directory
|
||||
"""
|
||||
t = unicode(get_arg(req, "t", "").strip(), "ascii")
|
||||
t = str(get_arg(req, "t", "").strip(), "ascii")
|
||||
if t in ("", "upload"):
|
||||
file_format = get_format(req)
|
||||
mutable_type = get_mutable_type(file_format)
|
||||
@ -185,10 +194,10 @@ class FileHandler(resource.Resource, object):
|
||||
node = self.client.create_node_from_uri(name)
|
||||
except (TypeError, AssertionError):
|
||||
# I think this can no longer be reached
|
||||
raise WebError("'%s' is not a valid file- or directory- cap"
|
||||
raise WebError("%r is not a valid file- or directory- cap"
|
||||
% name)
|
||||
if not IFileNode.providedBy(node):
|
||||
raise WebError("'%s' is not a file-cap" % name)
|
||||
raise WebError("%r is not a file-cap" % name)
|
||||
return filenode.FileNodeDownloadHandler(self.client, node)
|
||||
|
||||
@render_exception
|
||||
|
@ -1,6 +1,17 @@
|
||||
from past.builtins import long, unicode
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import division
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
from past.builtins import long
|
||||
|
||||
import pprint
|
||||
import itertools
|
||||
import hashlib
|
||||
from twisted.internet import defer
|
||||
@ -61,7 +72,7 @@ class UploadResultsRendererMixin(Element):
|
||||
return "None"
|
||||
ul = tags.ul()
|
||||
for shnum, servers in sorted(sharemap.items()):
|
||||
server_names = ', '.join([s.get_name() for s in servers])
|
||||
server_names = ', '.join([str(s.get_name(), "utf-8") for s in servers])
|
||||
ul(tags.li("%d -> placed on [%s]" % (shnum, server_names)))
|
||||
return ul
|
||||
d.addCallback(_render)
|
||||
@ -75,9 +86,9 @@ class UploadResultsRendererMixin(Element):
|
||||
if servermap is None:
|
||||
return "None"
|
||||
ul = tags.ul()
|
||||
for server, shnums in sorted(servermap.items()):
|
||||
for server, shnums in sorted(servermap.items(), key=id):
|
||||
shares_s = ",".join(["#%d" % shnum for shnum in shnums])
|
||||
ul(tags.li("[%s] got share%s: %s" % (server.get_name(),
|
||||
ul(tags.li("[%s] got share%s: %s" % (str(server.get_name(), "utf-8"),
|
||||
plural(shnums), shares_s)))
|
||||
return ul
|
||||
d.addCallback(_render)
|
||||
@ -231,7 +242,9 @@ class UploadStatusElement(UploadResultsRendererMixin):
|
||||
si_s = base32.b2a_or_none(self._upload_status.get_storage_index())
|
||||
if si_s is None:
|
||||
si_s = "(None)"
|
||||
return tag(str(si_s))
|
||||
else:
|
||||
si_s = str(si_s, "utf-8")
|
||||
return tag(si_s)
|
||||
|
||||
@renderer
|
||||
def helper(self, req, tag):
|
||||
@ -466,10 +479,10 @@ class DownloadStatusElement(Element):
|
||||
return ""
|
||||
return "+%.6fs" % t
|
||||
|
||||
def _rate_and_time(self, bytes, seconds):
|
||||
def _rate_and_time(self, bytes_count, seconds):
|
||||
time_s = abbreviate_time(seconds)
|
||||
if seconds != 0:
|
||||
rate = abbreviate_rate(1.0 * bytes / seconds)
|
||||
rate = abbreviate_rate(bytes_count / seconds)
|
||||
return tags.span(time_s, title=rate)
|
||||
return tags.span(time_s)
|
||||
|
||||
@ -534,14 +547,14 @@ class DownloadStatusElement(Element):
|
||||
for r_ev in self._download_status.read_events:
|
||||
start = r_ev["start"]
|
||||
length = r_ev["length"]
|
||||
bytes = r_ev["bytes_returned"]
|
||||
bytes_returned = r_ev["bytes_returned"]
|
||||
decrypt_time = ""
|
||||
if bytes:
|
||||
decrypt_time = self._rate_and_time(bytes, r_ev["decrypt_time"])
|
||||
decrypt_time = self._rate_and_time(bytes_returned, r_ev["decrypt_time"])
|
||||
speed, rtt = "",""
|
||||
if r_ev["finish_time"] is not None:
|
||||
rtt = r_ev["finish_time"] - r_ev["start_time"] - r_ev["paused_time"]
|
||||
speed = abbreviate_rate(compute_rate(bytes, rtt))
|
||||
speed = abbreviate_rate(compute_rate(bytes_returned, rtt))
|
||||
rtt = abbreviate_time(rtt)
|
||||
paused = abbreviate_time(r_ev["paused_time"])
|
||||
|
||||
@ -549,7 +562,7 @@ class DownloadStatusElement(Element):
|
||||
tags.td("[%d:+%d]" % (start, length)),
|
||||
tags.td(srt(r_ev["start_time"])),
|
||||
tags.td(srt(r_ev["finish_time"])),
|
||||
tags.td(str(bytes)),
|
||||
tags.td(str(bytes_returned)),
|
||||
tags.td(rtt),
|
||||
tags.td(decrypt_time),
|
||||
tags.td(paused),
|
||||
@ -918,10 +931,10 @@ class RetrieveStatusElement(Element):
|
||||
if not per_server:
|
||||
return tag("")
|
||||
l = tags.ul()
|
||||
for server in sorted(per_server.keys(), key=lambda s: s.get_name()):
|
||||
for server in sorted(list(per_server.keys()), key=lambda s: s.get_name()):
|
||||
times_s = ", ".join([abbreviate_time(t)
|
||||
for t in per_server[server]])
|
||||
l(tags.li("[%s]: %s" % (server.get_name(), times_s)))
|
||||
l(tags.li("[%s]: %s" % (str(server.get_name(), "utf-8"), times_s)))
|
||||
return tags.li("Per-Server Fetch Response Times: ", l)
|
||||
|
||||
|
||||
@ -959,7 +972,9 @@ class PublishStatusElement(Element):
|
||||
si_s = base32.b2a_or_none(self._publish_status.get_storage_index())
|
||||
if si_s is None:
|
||||
si_s = "(None)"
|
||||
return tag(str(si_s))
|
||||
else:
|
||||
si_s = str(si_s, "utf-8")
|
||||
return tag(si_s)
|
||||
|
||||
@renderer
|
||||
def helper(self, req, tag):
|
||||
@ -997,7 +1012,7 @@ class PublishStatusElement(Element):
|
||||
sharemap = servermap.make_sharemap()
|
||||
for shnum in sorted(sharemap.keys()):
|
||||
l(tags.li("%d -> Placed on " % shnum,
|
||||
", ".join(["[%s]" % server.get_name()
|
||||
", ".join(["[%s]" % str(server.get_name(), "utf-8")
|
||||
for server in sharemap[shnum]])))
|
||||
return tag("Sharemap:", l)
|
||||
|
||||
@ -1076,10 +1091,10 @@ class PublishStatusElement(Element):
|
||||
if not per_server:
|
||||
return tag()
|
||||
l = tags.ul()
|
||||
for server in sorted(per_server.keys(), key=lambda s: s.get_name()):
|
||||
for server in sorted(list(per_server.keys()), key=lambda s: s.get_name()):
|
||||
times_s = ", ".join([abbreviate_time(t)
|
||||
for t in per_server[server]])
|
||||
l(tags.li("[%s]: %s" % (server.get_name(), times_s)))
|
||||
l(tags.li("[%s]: %s" % (str(server.get_name(), "utf-8"), times_s)))
|
||||
return tags.li("Per-Server Response Times: ", l)
|
||||
|
||||
|
||||
@ -1205,7 +1220,7 @@ class MapupdateStatusElement(Element):
|
||||
else:
|
||||
times.append("privkey(" + abbreviate_time(t) + ")")
|
||||
times_s = ", ".join(times)
|
||||
l(tags.li("[%s]: %s" % (server.get_name(), times_s)))
|
||||
l(tags.li("[%s]: %s" % (str(server.get_name(), "utf-8"), times_s)))
|
||||
return tags.li("Per-Server Response Times: ", l)
|
||||
|
||||
|
||||
@ -1295,9 +1310,9 @@ class Status(MultiFormatResource):
|
||||
try:
|
||||
stype, count_s = path.split(b"-")
|
||||
except ValueError:
|
||||
raise WebError("no '-' in '{}'".format(unicode(path, "utf-8")))
|
||||
raise WebError("no '-' in '{}'".format(str(path, "utf-8")))
|
||||
count = int(count_s)
|
||||
stype = unicode(stype, "ascii")
|
||||
stype = str(stype, "ascii")
|
||||
if stype == "up":
|
||||
for s in itertools.chain(h.list_all_upload_statuses(),
|
||||
h.list_all_helper_statuses()):
|
||||
@ -1595,5 +1610,5 @@ class StatisticsElement(Element):
|
||||
|
||||
@renderer
|
||||
def raw(self, req, tag):
|
||||
raw = pprint.pformat(self._stats)
|
||||
raw = json.dumps(self._stats, sort_keys=True, indent=4)
|
||||
return tag(raw)
|
||||
|
@ -1,4 +1,14 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import time
|
||||
from twisted.python.filepath import FilePath
|
||||
@ -9,7 +19,7 @@ from twisted.web.template import (
|
||||
renderer,
|
||||
renderElement
|
||||
)
|
||||
from allmydata.web.common_py3 import (
|
||||
from allmydata.web.common import (
|
||||
abbreviate_time,
|
||||
MultiFormatResource
|
||||
)
|
||||
@ -318,7 +328,4 @@ class StorageStatus(MultiFormatResource):
|
||||
"lease-checker": self._storage.lease_checker.get_state(),
|
||||
"lease-checker-progress": self._storage.lease_checker.get_progress(),
|
||||
}
|
||||
result = json.dumps(d, indent=1) + "\n"
|
||||
if PY2:
|
||||
result = result.decode("utf-8")
|
||||
return result.encode("utf-8")
|
||||
return json.dumps(d, indent=1) + "\n"
|
||||
|
@ -1,7 +1,17 @@
|
||||
"""
|
||||
This module implements a resource which has as children the web resources
|
||||
of all enabled storage client plugins.
|
||||
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.web.resource import (
|
||||
Resource,
|
||||
|
@ -1,4 +1,14 @@
|
||||
from past.builtins import unicode
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from urllib.parse import quote as urlquote
|
||||
|
||||
@ -119,8 +129,8 @@ class UploadResultsElement(status.UploadResultsRendererMixin):
|
||||
def download_link(self, req, tag):
|
||||
d = self.upload_results()
|
||||
d.addCallback(lambda res:
|
||||
tags.a("/uri/" + unicode(res.get_uri(), "utf-8"),
|
||||
href="/uri/" + urlquote(unicode(res.get_uri(), "utf-8"))))
|
||||
tags.a("/uri/" + str(res.get_uri(), "utf-8"),
|
||||
href="/uri/" + urlquote(str(res.get_uri(), "utf-8"))))
|
||||
return d
|
||||
|
||||
|
||||
|
@ -197,8 +197,8 @@ def _logFormatter(logDateTime, request):
|
||||
template = "web: %(clientip)s %(method)s %(uri)s %(code)s %(length)s"
|
||||
return template % dict(
|
||||
clientip=_get_client_ip(request),
|
||||
method=request.method,
|
||||
uri=uri,
|
||||
method=str(request.method, "utf-8"),
|
||||
uri=str(uri, "utf-8"),
|
||||
code=request.code,
|
||||
length=(request.sentLength or "-"),
|
||||
facility="tahoe.webish",
|
||||
|
13
tox.ini
13
tox.ini
@ -62,16 +62,19 @@ commands =
|
||||
|
||||
tahoe --version
|
||||
|
||||
!coverage: trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:{env:TEST_SUITE}}
|
||||
# Run tests with -b to catch bugs like `"%s" % (some_bytes,)`. -b makes
|
||||
# Python emit BytesWarnings, and warnings configuration in
|
||||
# src/allmydata/tests/__init__.py turns allmydata's BytesWarnings into
|
||||
# exceptions.
|
||||
!coverage: python -b -m twisted.trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors} {posargs:{env:TEST_SUITE}}
|
||||
|
||||
# measuring coverage is somewhat slower than not measuring coverage
|
||||
# so only do it on request.
|
||||
coverage: coverage run -m twisted.trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors --reporter=timing} {posargs:{env:TEST_SUITE}}
|
||||
coverage: python -b -m coverage run -m twisted.trial {env:TAHOE_LAFS_TRIAL_ARGS:--rterrors --reporter=timing} {posargs:{env:TEST_SUITE}}
|
||||
coverage: coverage combine
|
||||
coverage: coverage xml
|
||||
coverage: coverage report
|
||||
|
||||
|
||||
[testenv:integration]
|
||||
setenv =
|
||||
COVERAGE_PROCESS_START=.coveragerc
|
||||
@ -120,6 +123,10 @@ deps =
|
||||
mypy
|
||||
git+https://github.com/Shoobx/mypy-zope
|
||||
git+https://github.com/warner/foolscap
|
||||
# Twisted 21.2.0 introduces some type hints which we are not yet
|
||||
# compatible with.
|
||||
# https://tahoe-lafs.org/trac/tahoe-lafs/ticket/3622
|
||||
twisted<21.2.0
|
||||
commands = mypy src
|
||||
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user