mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-11 23:42:38 +00:00
Merge remote-tracking branch 'origin/master' into 3468.offloaded-test-coverage
This commit is contained in:
commit
a403d25146
@ -154,6 +154,8 @@ jobs:
|
|||||||
# we maintain.
|
# we maintain.
|
||||||
WHEELHOUSE_PATH: &WHEELHOUSE_PATH "/tmp/wheelhouse"
|
WHEELHOUSE_PATH: &WHEELHOUSE_PATH "/tmp/wheelhouse"
|
||||||
PIP_FIND_LINKS: "file:///tmp/wheelhouse"
|
PIP_FIND_LINKS: "file:///tmp/wheelhouse"
|
||||||
|
# Upload the coverage report.
|
||||||
|
UPLOAD_COVERAGE: "yes"
|
||||||
|
|
||||||
# pip cannot install packages if the working directory is not readable.
|
# pip cannot install packages if the working directory is not readable.
|
||||||
# We want to run a lot of steps as nobody instead of as root.
|
# We want to run a lot of steps as nobody instead of as root.
|
||||||
@ -202,7 +204,9 @@ jobs:
|
|||||||
- run: &SUBMIT_COVERAGE
|
- run: &SUBMIT_COVERAGE
|
||||||
name: "Submit coverage results"
|
name: "Submit coverage results"
|
||||||
command: |
|
command: |
|
||||||
|
if [ -n "${UPLOAD_COVERAGE}" ]; then
|
||||||
/tmp/venv/bin/codecov
|
/tmp/venv/bin/codecov
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
debian-8:
|
debian-8:
|
||||||
@ -222,6 +226,8 @@ jobs:
|
|||||||
<<: *UTF_8_ENVIRONMENT
|
<<: *UTF_8_ENVIRONMENT
|
||||||
# We don't do coverage since it makes PyPy far too slow:
|
# We don't do coverage since it makes PyPy far too slow:
|
||||||
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
|
TAHOE_LAFS_TOX_ENVIRONMENT: "pypy27"
|
||||||
|
# Since we didn't collect it, don't upload it.
|
||||||
|
UPLOAD_COVERAGE: ""
|
||||||
|
|
||||||
|
|
||||||
c-locale:
|
c-locale:
|
||||||
@ -250,6 +256,8 @@ jobs:
|
|||||||
TAHOE_LAFS_TOX_ENVIRONMENT: "deprecations,upcoming-deprecations"
|
TAHOE_LAFS_TOX_ENVIRONMENT: "deprecations,upcoming-deprecations"
|
||||||
# Put the logs somewhere we can report them.
|
# Put the logs somewhere we can report them.
|
||||||
TAHOE_LAFS_WARNINGS_LOG: "/tmp/artifacts/deprecation-warnings.log"
|
TAHOE_LAFS_WARNINGS_LOG: "/tmp/artifacts/deprecation-warnings.log"
|
||||||
|
# The deprecations tox environments don't do coverage measurement.
|
||||||
|
UPLOAD_COVERAGE: ""
|
||||||
|
|
||||||
|
|
||||||
integration:
|
integration:
|
||||||
|
11
.coveragerc
11
.coveragerc
@ -14,3 +14,14 @@ branch = True
|
|||||||
[report]
|
[report]
|
||||||
show_missing = True
|
show_missing = True
|
||||||
skip_covered = True
|
skip_covered = True
|
||||||
|
|
||||||
|
[paths]
|
||||||
|
source =
|
||||||
|
# It looks like this in the checkout
|
||||||
|
src/
|
||||||
|
# It looks like this in the Windows build environment
|
||||||
|
D:/a/tahoe-lafs/tahoe-lafs/.tox/py*-coverage/Lib/site-packages/
|
||||||
|
# Although sometimes it looks like this instead. Also it looks like this on macOS.
|
||||||
|
.tox/py*-coverage/lib/python*/site-packages/
|
||||||
|
# On some Linux CI jobs it looks like this
|
||||||
|
/tmp/tahoe-lafs.tox/py*-coverage/lib/python*/site-packages/
|
||||||
|
0
newsfragments/3454.minor
Normal file
0
newsfragments/3454.minor
Normal file
0
newsfragments/3459.minor
Normal file
0
newsfragments/3459.minor
Normal file
0
newsfragments/3460.minor
Normal file
0
newsfragments/3460.minor
Normal file
0
newsfragments/3467.minor
Normal file
0
newsfragments/3467.minor
Normal file
0
newsfragments/3471.minor
Normal file
0
newsfragments/3471.minor
Normal file
0
newsfragments/3472.minor
Normal file
0
newsfragments/3472.minor
Normal file
0
newsfragments/3473.minor
Normal file
0
newsfragments/3473.minor
Normal file
@ -1,3 +1,4 @@
|
|||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
|
from allmydata.interfaces import ICheckResults, ICheckAndRepairResults, \
|
||||||
@ -56,7 +57,11 @@ class CheckResults(object):
|
|||||||
self._list_incompatible_shares = list_incompatible_shares
|
self._list_incompatible_shares = list_incompatible_shares
|
||||||
self._count_incompatible_shares = count_incompatible_shares
|
self._count_incompatible_shares = count_incompatible_shares
|
||||||
|
|
||||||
assert isinstance(summary, str) # should be a single string
|
# On Python 2, we can mix bytes and Unicode. On Python 3, we want
|
||||||
|
# unicode.
|
||||||
|
if isinstance(summary, bytes):
|
||||||
|
summary = unicode(summary, "utf-8")
|
||||||
|
assert isinstance(summary, unicode) # should be a single string
|
||||||
self._summary = summary
|
self._summary = summary
|
||||||
assert not isinstance(report, str) # should be list of strings
|
assert not isinstance(report, str) # should be list of strings
|
||||||
self._report = report
|
self._report = report
|
||||||
|
@ -616,7 +616,7 @@ class Checker(log.PrefixingLogMixin):
|
|||||||
d.addCallback(_got_ueb)
|
d.addCallback(_got_ueb)
|
||||||
|
|
||||||
def _discard_result(r):
|
def _discard_result(r):
|
||||||
assert isinstance(r, str), r
|
assert isinstance(r, bytes), r
|
||||||
# to free up the RAM
|
# to free up the RAM
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -152,7 +152,6 @@ class CiphertextFileNode(object):
|
|||||||
for server in servers:
|
for server in servers:
|
||||||
sm.add(shnum, server)
|
sm.add(shnum, server)
|
||||||
servers_responding.add(server)
|
servers_responding.add(server)
|
||||||
servers_responding = sorted(servers_responding)
|
|
||||||
|
|
||||||
good_hosts = len(reduce(set.union, sm.values(), set()))
|
good_hosts = len(reduce(set.union, sm.values(), set()))
|
||||||
is_healthy = bool(len(sm) >= verifycap.total_shares)
|
is_healthy = bool(len(sm) >= verifycap.total_shares)
|
||||||
|
@ -147,9 +147,9 @@ class MutableFileNode(object):
|
|||||||
|
|
||||||
def _get_initial_contents(self, contents):
|
def _get_initial_contents(self, contents):
|
||||||
if contents is None:
|
if contents is None:
|
||||||
return MutableData("")
|
return MutableData(b"")
|
||||||
|
|
||||||
if isinstance(contents, str):
|
if isinstance(contents, bytes):
|
||||||
return MutableData(contents)
|
return MutableData(contents)
|
||||||
|
|
||||||
if IMutableUploadable.providedBy(contents):
|
if IMutableUploadable.providedBy(contents):
|
||||||
@ -884,9 +884,9 @@ class MutableFileVersion(object):
|
|||||||
d = self._try_to_download_data()
|
d = self._try_to_download_data()
|
||||||
def _apply(old_contents):
|
def _apply(old_contents):
|
||||||
new_contents = modifier(old_contents, self._servermap, first_time)
|
new_contents = modifier(old_contents, self._servermap, first_time)
|
||||||
precondition((isinstance(new_contents, str) or
|
precondition((isinstance(new_contents, bytes) or
|
||||||
new_contents is None),
|
new_contents is None),
|
||||||
"Modifier function must return a string "
|
"Modifier function must return bytes "
|
||||||
"or None")
|
"or None")
|
||||||
|
|
||||||
if new_contents is None or new_contents == old_contents:
|
if new_contents is None or new_contents == old_contents:
|
||||||
@ -960,7 +960,7 @@ class MutableFileVersion(object):
|
|||||||
c = consumer.MemoryConsumer()
|
c = consumer.MemoryConsumer()
|
||||||
# modify will almost certainly write, so we need the privkey.
|
# modify will almost certainly write, so we need the privkey.
|
||||||
d = self._read(c, fetch_privkey=True)
|
d = self._read(c, fetch_privkey=True)
|
||||||
d.addCallback(lambda mc: "".join(mc.chunks))
|
d.addCallback(lambda mc: b"".join(mc.chunks))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
@ -1076,7 +1076,7 @@ class MutableFileVersion(object):
|
|||||||
start = offset
|
start = offset
|
||||||
rest = offset + data.get_size()
|
rest = offset + data.get_size()
|
||||||
new = old[:start]
|
new = old[:start]
|
||||||
new += "".join(data.read(data.get_size()))
|
new += b"".join(data.read(data.get_size()))
|
||||||
new += old[rest:]
|
new += old[rest:]
|
||||||
return new
|
return new
|
||||||
return self._modify(m, None)
|
return self._modify(m, None)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import os, time
|
import os, time
|
||||||
from six.moves import cStringIO as StringIO
|
from io import BytesIO
|
||||||
from itertools import count
|
from itertools import count
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
@ -46,7 +46,7 @@ class PublishStatus(object):
|
|||||||
self.size = None
|
self.size = None
|
||||||
self.status = "Not started"
|
self.status = "Not started"
|
||||||
self.progress = 0.0
|
self.progress = 0.0
|
||||||
self.counter = self.statusid_counter.next()
|
self.counter = next(self.statusid_counter)
|
||||||
self.started = time.time()
|
self.started = time.time()
|
||||||
|
|
||||||
def add_per_server_time(self, server, elapsed):
|
def add_per_server_time(self, server, elapsed):
|
||||||
@ -305,7 +305,7 @@ class Publish(object):
|
|||||||
# Our update process fetched these for us. We need to update
|
# Our update process fetched these for us. We need to update
|
||||||
# them in place as publishing happens.
|
# them in place as publishing happens.
|
||||||
self.blockhashes = {} # (shnum, [blochashes])
|
self.blockhashes = {} # (shnum, [blochashes])
|
||||||
for (i, bht) in blockhashes.iteritems():
|
for (i, bht) in list(blockhashes.items()):
|
||||||
# We need to extract the leaves from our old hash tree.
|
# We need to extract the leaves from our old hash tree.
|
||||||
old_segcount = mathutil.div_ceil(version[4],
|
old_segcount = mathutil.div_ceil(version[4],
|
||||||
version[3])
|
version[3])
|
||||||
@ -313,7 +313,7 @@ class Publish(object):
|
|||||||
bht = dict(enumerate(bht))
|
bht = dict(enumerate(bht))
|
||||||
h.set_hashes(bht)
|
h.set_hashes(bht)
|
||||||
leaves = h[h.get_leaf_index(0):]
|
leaves = h[h.get_leaf_index(0):]
|
||||||
for j in xrange(self.num_segments - len(leaves)):
|
for j in range(self.num_segments - len(leaves)):
|
||||||
leaves.append(None)
|
leaves.append(None)
|
||||||
|
|
||||||
assert len(leaves) >= self.num_segments
|
assert len(leaves) >= self.num_segments
|
||||||
@ -509,10 +509,10 @@ class Publish(object):
|
|||||||
# This will eventually hold the block hash chain for each share
|
# This will eventually hold the block hash chain for each share
|
||||||
# that we publish. We define it this way so that empty publishes
|
# that we publish. We define it this way so that empty publishes
|
||||||
# will still have something to write to the remote slot.
|
# will still have something to write to the remote slot.
|
||||||
self.blockhashes = dict([(i, []) for i in xrange(self.total_shares)])
|
self.blockhashes = dict([(i, []) for i in range(self.total_shares)])
|
||||||
for i in xrange(self.total_shares):
|
for i in range(self.total_shares):
|
||||||
blocks = self.blockhashes[i]
|
blocks = self.blockhashes[i]
|
||||||
for j in xrange(self.num_segments):
|
for j in range(self.num_segments):
|
||||||
blocks.append(None)
|
blocks.append(None)
|
||||||
self.sharehash_leaves = None # eventually [sharehashes]
|
self.sharehash_leaves = None # eventually [sharehashes]
|
||||||
self.sharehashes = {} # shnum -> [sharehash leaves necessary to
|
self.sharehashes = {} # shnum -> [sharehash leaves necessary to
|
||||||
@ -526,7 +526,7 @@ class Publish(object):
|
|||||||
return self.done_deferred
|
return self.done_deferred
|
||||||
|
|
||||||
def _get_some_writer(self):
|
def _get_some_writer(self):
|
||||||
return list(self.writers.values()[0])[0]
|
return list(list(self.writers.values())[0])[0]
|
||||||
|
|
||||||
def _update_status(self):
|
def _update_status(self):
|
||||||
self._status.set_status("Sending Shares: %d placed out of %d, "
|
self._status.set_status("Sending Shares: %d placed out of %d, "
|
||||||
@ -684,7 +684,7 @@ class Publish(object):
|
|||||||
salt = os.urandom(16)
|
salt = os.urandom(16)
|
||||||
assert self._version == SDMF_VERSION
|
assert self._version == SDMF_VERSION
|
||||||
|
|
||||||
for shnum, writers in self.writers.iteritems():
|
for shnum, writers in self.writers.items():
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
writer.put_salt(salt)
|
writer.put_salt(salt)
|
||||||
|
|
||||||
@ -702,9 +702,9 @@ class Publish(object):
|
|||||||
|
|
||||||
|
|
||||||
self.log("Pushing segment %d of %d" % (segnum + 1, self.num_segments))
|
self.log("Pushing segment %d of %d" % (segnum + 1, self.num_segments))
|
||||||
|
# XXX: Why does this return a list?
|
||||||
data = self.data.read(segsize)
|
data = self.data.read(segsize)
|
||||||
# XXX: This is dumb. Why return a list?
|
data = b"".join(data)
|
||||||
data = "".join(data)
|
|
||||||
|
|
||||||
assert len(data) == segsize, len(data)
|
assert len(data) == segsize, len(data)
|
||||||
|
|
||||||
@ -732,7 +732,7 @@ class Publish(object):
|
|||||||
for i in range(len(crypttext_pieces)):
|
for i in range(len(crypttext_pieces)):
|
||||||
offset = i * piece_size
|
offset = i * piece_size
|
||||||
piece = crypttext[offset:offset+piece_size]
|
piece = crypttext[offset:offset+piece_size]
|
||||||
piece = piece + "\x00"*(piece_size - len(piece)) # padding
|
piece = piece + b"\x00"*(piece_size - len(piece)) # padding
|
||||||
crypttext_pieces[i] = piece
|
crypttext_pieces[i] = piece
|
||||||
assert len(piece) == piece_size
|
assert len(piece) == piece_size
|
||||||
d = fec.encode(crypttext_pieces)
|
d = fec.encode(crypttext_pieces)
|
||||||
@ -751,7 +751,7 @@ class Publish(object):
|
|||||||
results, salt = encoded_and_salt
|
results, salt = encoded_and_salt
|
||||||
shares, shareids = results
|
shares, shareids = results
|
||||||
self._status.set_status("Pushing segment")
|
self._status.set_status("Pushing segment")
|
||||||
for i in xrange(len(shares)):
|
for i in range(len(shares)):
|
||||||
sharedata = shares[i]
|
sharedata = shares[i]
|
||||||
shareid = shareids[i]
|
shareid = shareids[i]
|
||||||
if self._version == MDMF_VERSION:
|
if self._version == MDMF_VERSION:
|
||||||
@ -786,7 +786,7 @@ class Publish(object):
|
|||||||
def push_encprivkey(self):
|
def push_encprivkey(self):
|
||||||
encprivkey = self._encprivkey
|
encprivkey = self._encprivkey
|
||||||
self._status.set_status("Pushing encrypted private key")
|
self._status.set_status("Pushing encrypted private key")
|
||||||
for shnum, writers in self.writers.iteritems():
|
for shnum, writers in self.writers.items():
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
writer.put_encprivkey(encprivkey)
|
writer.put_encprivkey(encprivkey)
|
||||||
|
|
||||||
@ -794,7 +794,7 @@ class Publish(object):
|
|||||||
def push_blockhashes(self):
|
def push_blockhashes(self):
|
||||||
self.sharehash_leaves = [None] * len(self.blockhashes)
|
self.sharehash_leaves = [None] * len(self.blockhashes)
|
||||||
self._status.set_status("Building and pushing block hash tree")
|
self._status.set_status("Building and pushing block hash tree")
|
||||||
for shnum, blockhashes in self.blockhashes.iteritems():
|
for shnum, blockhashes in list(self.blockhashes.items()):
|
||||||
t = hashtree.HashTree(blockhashes)
|
t = hashtree.HashTree(blockhashes)
|
||||||
self.blockhashes[shnum] = list(t)
|
self.blockhashes[shnum] = list(t)
|
||||||
# set the leaf for future use.
|
# set the leaf for future use.
|
||||||
@ -808,7 +808,7 @@ class Publish(object):
|
|||||||
def push_sharehashes(self):
|
def push_sharehashes(self):
|
||||||
self._status.set_status("Building and pushing share hash chain")
|
self._status.set_status("Building and pushing share hash chain")
|
||||||
share_hash_tree = hashtree.HashTree(self.sharehash_leaves)
|
share_hash_tree = hashtree.HashTree(self.sharehash_leaves)
|
||||||
for shnum in xrange(len(self.sharehash_leaves)):
|
for shnum in range(len(self.sharehash_leaves)):
|
||||||
needed_indices = share_hash_tree.needed_hashes(shnum)
|
needed_indices = share_hash_tree.needed_hashes(shnum)
|
||||||
self.sharehashes[shnum] = dict( [ (i, share_hash_tree[i])
|
self.sharehashes[shnum] = dict( [ (i, share_hash_tree[i])
|
||||||
for i in needed_indices] )
|
for i in needed_indices] )
|
||||||
@ -824,7 +824,7 @@ class Publish(object):
|
|||||||
# - Get the checkstring of the resulting layout; sign that.
|
# - Get the checkstring of the resulting layout; sign that.
|
||||||
# - Push the signature
|
# - Push the signature
|
||||||
self._status.set_status("Pushing root hashes and signature")
|
self._status.set_status("Pushing root hashes and signature")
|
||||||
for shnum in xrange(self.total_shares):
|
for shnum in range(self.total_shares):
|
||||||
writers = self.writers[shnum]
|
writers = self.writers[shnum]
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
writer.put_root_hash(self.root_hash)
|
writer.put_root_hash(self.root_hash)
|
||||||
@ -852,7 +852,7 @@ class Publish(object):
|
|||||||
signable = self._get_some_writer().get_signable()
|
signable = self._get_some_writer().get_signable()
|
||||||
self.signature = rsa.sign_data(self._privkey, signable)
|
self.signature = rsa.sign_data(self._privkey, signable)
|
||||||
|
|
||||||
for (shnum, writers) in self.writers.iteritems():
|
for (shnum, writers) in self.writers.items():
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
writer.put_signature(self.signature)
|
writer.put_signature(self.signature)
|
||||||
self._status.timings['sign'] = time.time() - started
|
self._status.timings['sign'] = time.time() - started
|
||||||
@ -867,7 +867,7 @@ class Publish(object):
|
|||||||
ds = []
|
ds = []
|
||||||
verification_key = rsa.der_string_from_verifying_key(self._pubkey)
|
verification_key = rsa.der_string_from_verifying_key(self._pubkey)
|
||||||
|
|
||||||
for (shnum, writers) in self.writers.copy().iteritems():
|
for (shnum, writers) in list(self.writers.copy().items()):
|
||||||
for writer in writers:
|
for writer in writers:
|
||||||
writer.put_verification_key(verification_key)
|
writer.put_verification_key(verification_key)
|
||||||
self.num_outstanding += 1
|
self.num_outstanding += 1
|
||||||
@ -1003,7 +1003,7 @@ class Publish(object):
|
|||||||
|
|
||||||
# TODO: Precompute this.
|
# TODO: Precompute this.
|
||||||
shares = []
|
shares = []
|
||||||
for shnum, writers in self.writers.iteritems():
|
for shnum, writers in self.writers.items():
|
||||||
shares.extend([x.shnum for x in writers if x.server == server])
|
shares.extend([x.shnum for x in writers if x.server == server])
|
||||||
known_shnums = set(shares)
|
known_shnums = set(shares)
|
||||||
surprise_shares -= known_shnums
|
surprise_shares -= known_shnums
|
||||||
@ -1198,7 +1198,7 @@ class Publish(object):
|
|||||||
class MutableFileHandle(object):
|
class MutableFileHandle(object):
|
||||||
"""
|
"""
|
||||||
I am a mutable uploadable built around a filehandle-like object,
|
I am a mutable uploadable built around a filehandle-like object,
|
||||||
usually either a StringIO instance or a handle to an actual file.
|
usually either a BytesIO instance or a handle to an actual file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, filehandle):
|
def __init__(self, filehandle):
|
||||||
@ -1268,14 +1268,14 @@ class MutableFileHandle(object):
|
|||||||
class MutableData(MutableFileHandle):
|
class MutableData(MutableFileHandle):
|
||||||
"""
|
"""
|
||||||
I am a mutable uploadable built around a string, which I then cast
|
I am a mutable uploadable built around a string, which I then cast
|
||||||
into a StringIO and treat as a filehandle.
|
into a BytesIO and treat as a filehandle.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, s):
|
def __init__(self, s):
|
||||||
# Take a string and return a file-like uploadable.
|
# Take a string and return a file-like uploadable.
|
||||||
assert isinstance(s, str)
|
assert isinstance(s, bytes)
|
||||||
|
|
||||||
MutableFileHandle.__init__(self, StringIO(s))
|
MutableFileHandle.__init__(self, BytesIO(s))
|
||||||
|
|
||||||
|
|
||||||
@implementer(IMutableUploadable)
|
@implementer(IMutableUploadable)
|
||||||
@ -1361,7 +1361,7 @@ class TransformingUploadable(object):
|
|||||||
|
|
||||||
self.log("reading %d bytes of new data" % length)
|
self.log("reading %d bytes of new data" % length)
|
||||||
new_data = self._newdata.read(length)
|
new_data = self._newdata.read(length)
|
||||||
new_data = "".join(new_data)
|
new_data = b"".join(new_data)
|
||||||
|
|
||||||
self._read_marker += len(old_start_data + new_data + old_end_data)
|
self._read_marker += len(old_start_data + new_data + old_end_data)
|
||||||
|
|
||||||
|
@ -189,7 +189,10 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
# this sorted order).
|
# this sorted order).
|
||||||
for (server_id, server) in sorted(servers.items()):
|
for (server_id, server) in sorted(servers.items()):
|
||||||
try:
|
try:
|
||||||
storage_server = self._make_storage_server(server_id, server)
|
storage_server = self._make_storage_server(
|
||||||
|
server_id.encode("utf-8"),
|
||||||
|
server,
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
# TODO: The _make_storage_server failure is logged but maybe
|
# TODO: The _make_storage_server failure is logged but maybe
|
||||||
# we should write a traceback here. Notably, tests don't
|
# we should write a traceback here. Notably, tests don't
|
||||||
@ -232,8 +235,19 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
include_result=False,
|
include_result=False,
|
||||||
)
|
)
|
||||||
def _make_storage_server(self, server_id, server):
|
def _make_storage_server(self, server_id, server):
|
||||||
assert isinstance(server_id, unicode) # from YAML
|
"""
|
||||||
server_id = server_id.encode("ascii")
|
Create a new ``IServer`` for the given storage server announcement.
|
||||||
|
|
||||||
|
:param bytes server_id: The unique identifier for the server.
|
||||||
|
|
||||||
|
:param dict server: The server announcement. See ``Static Server
|
||||||
|
Definitions`` in the configuration documentation for details about
|
||||||
|
the structure and contents.
|
||||||
|
|
||||||
|
:return IServer: The object-y representation of the server described
|
||||||
|
by the given announcement.
|
||||||
|
"""
|
||||||
|
assert isinstance(server_id, bytes)
|
||||||
handler_overrides = server.get("connections", {})
|
handler_overrides = server.get("connections", {})
|
||||||
s = NativeStorageServer(
|
s = NativeStorageServer(
|
||||||
server_id,
|
server_id,
|
||||||
@ -260,7 +274,7 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
# these two are used in unit tests
|
# these two are used in unit tests
|
||||||
def test_add_rref(self, serverid, rref, ann):
|
def test_add_rref(self, serverid, rref, ann):
|
||||||
s = self._make_storage_server(
|
s = self._make_storage_server(
|
||||||
serverid.decode("ascii"),
|
serverid,
|
||||||
{"ann": ann.copy()},
|
{"ann": ann.copy()},
|
||||||
)
|
)
|
||||||
s._rref = rref
|
s._rref = rref
|
||||||
@ -292,28 +306,71 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
remaining.append( (threshold, d) )
|
remaining.append( (threshold, d) )
|
||||||
self._threshold_listeners = remaining
|
self._threshold_listeners = remaining
|
||||||
|
|
||||||
def _got_announcement(self, key_s, ann):
|
def _should_ignore_announcement(self, server_id, ann):
|
||||||
precondition(isinstance(key_s, str), key_s)
|
"""
|
||||||
precondition(key_s.startswith("v0-"), key_s)
|
Determine whether a new storage announcement should be discarded or used
|
||||||
precondition(ann["service-name"] == "storage", ann["service-name"])
|
to update our collection of storage servers.
|
||||||
server_id = key_s
|
|
||||||
|
:param bytes server_id: The unique identifier for the storage server
|
||||||
|
which made the announcement.
|
||||||
|
|
||||||
|
:param dict ann: The announcement.
|
||||||
|
|
||||||
|
:return bool: ``True`` if the announcement should be ignored,
|
||||||
|
``False`` if it should be used to update our local storage server
|
||||||
|
state.
|
||||||
|
"""
|
||||||
|
# Let local static configuration always override any announcement for
|
||||||
|
# a particular server.
|
||||||
if server_id in self._static_server_ids:
|
if server_id in self._static_server_ids:
|
||||||
log.msg(format="ignoring announcement for static server '%(id)s'",
|
log.msg(format="ignoring announcement for static server '%(id)s'",
|
||||||
id=server_id,
|
id=server_id,
|
||||||
facility="tahoe.storage_broker", umid="AlxzqA",
|
facility="tahoe.storage_broker", umid="AlxzqA",
|
||||||
level=log.UNUSUAL)
|
level=log.UNUSUAL)
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
old = self.servers[server_id]
|
||||||
|
except KeyError:
|
||||||
|
# We don't know anything about this server. Let's use the
|
||||||
|
# announcement to change that.
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
# Determine if this announcement is at all difference from the
|
||||||
|
# announcement we already have for the server. If it is the same,
|
||||||
|
# we don't need to change anything.
|
||||||
|
return old.get_announcement() == ann
|
||||||
|
|
||||||
|
def _got_announcement(self, key_s, ann):
|
||||||
|
"""
|
||||||
|
This callback is given to the introducer and called any time an
|
||||||
|
announcement is received which has a valid signature and does not have
|
||||||
|
a sequence number less than or equal to a previous sequence number
|
||||||
|
seen for that server by that introducer.
|
||||||
|
|
||||||
|
Note sequence numbers are not considered between different introducers
|
||||||
|
so if we use more than one introducer it is possible for them to
|
||||||
|
deliver us stale announcements in some cases.
|
||||||
|
"""
|
||||||
|
precondition(isinstance(key_s, str), key_s)
|
||||||
|
precondition(key_s.startswith("v0-"), key_s)
|
||||||
|
precondition(ann["service-name"] == "storage", ann["service-name"])
|
||||||
|
server_id = key_s
|
||||||
|
|
||||||
|
if self._should_ignore_announcement(server_id, ann):
|
||||||
return
|
return
|
||||||
|
|
||||||
s = self._make_storage_server(
|
s = self._make_storage_server(
|
||||||
server_id.decode("utf-8"),
|
server_id,
|
||||||
{u"ann": ann},
|
{u"ann": ann},
|
||||||
)
|
)
|
||||||
server_id = s.get_serverid()
|
|
||||||
old = self.servers.get(server_id)
|
try:
|
||||||
if old:
|
old = self.servers.pop(server_id)
|
||||||
if old.get_announcement() == ann:
|
except KeyError:
|
||||||
return # duplicate
|
pass
|
||||||
# replacement
|
else:
|
||||||
del self.servers[server_id]
|
# It's a replacement, get rid of the old one.
|
||||||
old.stop_connecting()
|
old.stop_connecting()
|
||||||
old.disownServiceParent()
|
old.disownServiceParent()
|
||||||
# NOTE: this disownServiceParent() returns a Deferred that
|
# NOTE: this disownServiceParent() returns a Deferred that
|
||||||
@ -328,6 +385,7 @@ class StorageFarmBroker(service.MultiService):
|
|||||||
# until they have fired (but hopefully don't keep reference
|
# until they have fired (but hopefully don't keep reference
|
||||||
# cycles around when they fire earlier than that, which will
|
# cycles around when they fire earlier than that, which will
|
||||||
# almost always be the case for normal runtime).
|
# almost always be the case for normal runtime).
|
||||||
|
|
||||||
# now we forget about them and start using the new one
|
# now we forget about them and start using the new one
|
||||||
s.setServiceParent(self)
|
s.setServiceParent(self)
|
||||||
self.servers[server_id] = s
|
self.servers[server_id] = s
|
||||||
|
@ -1,17 +1,29 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from allmydata.mutable.publish import MutableData
|
from allmydata.mutable.publish import MutableData
|
||||||
|
|
||||||
class DataHandle(unittest.TestCase):
|
class DataHandle(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.test_data = "Test Data" * 50000
|
self.test_data = b"Test Data" * 50000
|
||||||
self.uploadable = MutableData(self.test_data)
|
self.uploadable = MutableData(self.test_data)
|
||||||
|
|
||||||
|
|
||||||
def test_datahandle_read(self):
|
def test_datahandle_read(self):
|
||||||
chunk_size = 10
|
chunk_size = 10
|
||||||
for i in xrange(0, len(self.test_data), chunk_size):
|
for i in range(0, len(self.test_data), chunk_size):
|
||||||
data = self.uploadable.read(chunk_size)
|
data = self.uploadable.read(chunk_size)
|
||||||
data = "".join(data)
|
data = b"".join(data)
|
||||||
start = i
|
start = i
|
||||||
end = i + chunk_size
|
end = i + chunk_size
|
||||||
self.failUnlessEqual(data, self.test_data[start:end])
|
self.failUnlessEqual(data, self.test_data[start:end])
|
||||||
@ -28,7 +40,7 @@ class DataHandle(unittest.TestCase):
|
|||||||
# disturbing the location of the seek pointer.
|
# disturbing the location of the seek pointer.
|
||||||
chunk_size = 100
|
chunk_size = 100
|
||||||
data = self.uploadable.read(chunk_size)
|
data = self.uploadable.read(chunk_size)
|
||||||
self.failUnlessEqual("".join(data), self.test_data[:chunk_size])
|
self.failUnlessEqual(b"".join(data), self.test_data[:chunk_size])
|
||||||
|
|
||||||
# Now get the size.
|
# Now get the size.
|
||||||
size = self.uploadable.get_size()
|
size = self.uploadable.get_size()
|
||||||
@ -38,4 +50,4 @@ class DataHandle(unittest.TestCase):
|
|||||||
more_data = self.uploadable.read(chunk_size)
|
more_data = self.uploadable.read(chunk_size)
|
||||||
start = chunk_size
|
start = chunk_size
|
||||||
end = chunk_size * 2
|
end = chunk_size * 2
|
||||||
self.failUnlessEqual("".join(more_data), self.test_data[start:end])
|
self.failUnlessEqual(b"".join(more_data), self.test_data[start:end])
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from .util import FakeStorage, make_nodemaker
|
from .util import FakeStorage, make_nodemaker
|
||||||
|
|
||||||
@ -10,7 +22,7 @@ class DifferentEncoding(unittest.TestCase):
|
|||||||
# create a file with 3-of-20, then modify it with a client configured
|
# create a file with 3-of-20, then modify it with a client configured
|
||||||
# to do 3-of-10. #1510 tracks a failure here
|
# to do 3-of-10. #1510 tracks a failure here
|
||||||
self.nodemaker.default_encoding_parameters["n"] = 20
|
self.nodemaker.default_encoding_parameters["n"] = 20
|
||||||
d = self.nodemaker.create_mutable_file("old contents")
|
d = self.nodemaker.create_mutable_file(b"old contents")
|
||||||
def _created(n):
|
def _created(n):
|
||||||
filecap = n.get_cap().to_string()
|
filecap = n.get_cap().to_string()
|
||||||
del n # we want a new object, not the cached one
|
del n # we want a new object, not the cached one
|
||||||
@ -19,6 +31,6 @@ class DifferentEncoding(unittest.TestCase):
|
|||||||
return n2
|
return n2
|
||||||
d.addCallback(_created)
|
d.addCallback(_created)
|
||||||
def modifier(old_contents, servermap, first_time):
|
def modifier(old_contents, servermap, first_time):
|
||||||
return "new contents"
|
return b"new contents"
|
||||||
d.addCallback(lambda n: n.modify(modifier))
|
d.addCallback(lambda n: n.modify(modifier))
|
||||||
return d
|
return d
|
||||||
|
@ -1,21 +1,33 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from six.moves import cStringIO as StringIO
|
from io import BytesIO
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from allmydata.mutable.publish import MutableFileHandle
|
from allmydata.mutable.publish import MutableFileHandle
|
||||||
|
|
||||||
class FileHandle(unittest.TestCase):
|
class FileHandle(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.test_data = "Test Data" * 50000
|
self.test_data = b"Test Data" * 50000
|
||||||
self.sio = StringIO(self.test_data)
|
self.sio = BytesIO(self.test_data)
|
||||||
self.uploadable = MutableFileHandle(self.sio)
|
self.uploadable = MutableFileHandle(self.sio)
|
||||||
|
|
||||||
|
|
||||||
def test_filehandle_read(self):
|
def test_filehandle_read(self):
|
||||||
self.basedir = "mutable/FileHandle/test_filehandle_read"
|
self.basedir = "mutable/FileHandle/test_filehandle_read"
|
||||||
chunk_size = 10
|
chunk_size = 10
|
||||||
for i in xrange(0, len(self.test_data), chunk_size):
|
for i in range(0, len(self.test_data), chunk_size):
|
||||||
data = self.uploadable.read(chunk_size)
|
data = self.uploadable.read(chunk_size)
|
||||||
data = "".join(data)
|
data = b"".join(data)
|
||||||
start = i
|
start = i
|
||||||
end = i + chunk_size
|
end = i + chunk_size
|
||||||
self.failUnlessEqual(data, self.test_data[start:end])
|
self.failUnlessEqual(data, self.test_data[start:end])
|
||||||
@ -33,7 +45,7 @@ class FileHandle(unittest.TestCase):
|
|||||||
# disturbing the location of the seek pointer.
|
# disturbing the location of the seek pointer.
|
||||||
chunk_size = 100
|
chunk_size = 100
|
||||||
data = self.uploadable.read(chunk_size)
|
data = self.uploadable.read(chunk_size)
|
||||||
self.failUnlessEqual("".join(data), self.test_data[:chunk_size])
|
self.failUnlessEqual(b"".join(data), self.test_data[:chunk_size])
|
||||||
|
|
||||||
# Now get the size.
|
# Now get the size.
|
||||||
size = self.uploadable.get_size()
|
size = self.uploadable.get_size()
|
||||||
@ -43,26 +55,26 @@ class FileHandle(unittest.TestCase):
|
|||||||
more_data = self.uploadable.read(chunk_size)
|
more_data = self.uploadable.read(chunk_size)
|
||||||
start = chunk_size
|
start = chunk_size
|
||||||
end = chunk_size * 2
|
end = chunk_size * 2
|
||||||
self.failUnlessEqual("".join(more_data), self.test_data[start:end])
|
self.failUnlessEqual(b"".join(more_data), self.test_data[start:end])
|
||||||
|
|
||||||
|
|
||||||
def test_filehandle_file(self):
|
def test_filehandle_file(self):
|
||||||
# Make sure that the MutableFileHandle works on a file as well
|
# Make sure that the MutableFileHandle works on a file as well
|
||||||
# as a StringIO object, since in some cases it will be asked to
|
# as a BytesIO object, since in some cases it will be asked to
|
||||||
# deal with files.
|
# deal with files.
|
||||||
self.basedir = self.mktemp()
|
self.basedir = self.mktemp()
|
||||||
# necessary? What am I doing wrong here?
|
# necessary? What am I doing wrong here?
|
||||||
os.mkdir(self.basedir)
|
os.mkdir(self.basedir)
|
||||||
f_path = os.path.join(self.basedir, "test_file")
|
f_path = os.path.join(self.basedir, "test_file")
|
||||||
f = open(f_path, "w")
|
f = open(f_path, "wb")
|
||||||
f.write(self.test_data)
|
f.write(self.test_data)
|
||||||
f.close()
|
f.close()
|
||||||
f = open(f_path, "r")
|
f = open(f_path, "rb")
|
||||||
|
|
||||||
uploadable = MutableFileHandle(f)
|
uploadable = MutableFileHandle(f)
|
||||||
|
|
||||||
data = uploadable.read(len(self.test_data))
|
data = uploadable.read(len(self.test_data))
|
||||||
self.failUnlessEqual("".join(data), self.test_data)
|
self.failUnlessEqual(b"".join(data), self.test_data)
|
||||||
size = uploadable.get_size()
|
size = uploadable.get_size()
|
||||||
self.failUnlessEqual(size, len(self.test_data))
|
self.failUnlessEqual(size, len(self.test_data))
|
||||||
|
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
from six.moves import cStringIO as StringIO
|
from past.builtins import long
|
||||||
|
|
||||||
|
from io import BytesIO
|
||||||
import attr
|
import attr
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from foolscap.api import eventually, fireEventually
|
from foolscap.api import eventually, fireEventually
|
||||||
@ -75,8 +77,8 @@ class FakeStorage(object):
|
|||||||
if peerid not in self._peers:
|
if peerid not in self._peers:
|
||||||
self._peers[peerid] = {}
|
self._peers[peerid] = {}
|
||||||
shares = self._peers[peerid]
|
shares = self._peers[peerid]
|
||||||
f = StringIO()
|
f = BytesIO()
|
||||||
f.write(shares.get(shnum, ""))
|
f.write(shares.get(shnum, b""))
|
||||||
f.seek(offset)
|
f.seek(offset)
|
||||||
f.write(data)
|
f.write(data)
|
||||||
shares[shnum] = f.getvalue()
|
shares[shnum] = f.getvalue()
|
||||||
@ -129,7 +131,7 @@ class FakeStorageServer(object):
|
|||||||
readv = {}
|
readv = {}
|
||||||
for shnum, (testv, writev, new_length) in tw_vectors.items():
|
for shnum, (testv, writev, new_length) in tw_vectors.items():
|
||||||
for (offset, length, op, specimen) in testv:
|
for (offset, length, op, specimen) in testv:
|
||||||
assert op in ("le", "eq", "ge")
|
assert op in (b"le", b"eq", b"ge")
|
||||||
# TODO: this isn't right, the read is controlled by read_vector,
|
# TODO: this isn't right, the read is controlled by read_vector,
|
||||||
# not by testv
|
# not by testv
|
||||||
readv[shnum] = [ specimen
|
readv[shnum] = [ specimen
|
||||||
@ -222,10 +224,10 @@ def make_peer(s, i):
|
|||||||
|
|
||||||
:rtype: ``Peer``
|
:rtype: ``Peer``
|
||||||
"""
|
"""
|
||||||
peerid = base32.b2a(tagged_hash("peerid", "%d" % i)[:20])
|
peerid = base32.b2a(tagged_hash(b"peerid", b"%d" % i)[:20])
|
||||||
fss = FakeStorageServer(peerid, s)
|
fss = FakeStorageServer(peerid, s)
|
||||||
ann = {
|
ann = {
|
||||||
"anonymous-storage-FURL": "pb://%s@nowhere/fake" % (peerid,),
|
"anonymous-storage-FURL": b"pb://%s@nowhere/fake" % (peerid,),
|
||||||
"permutation-seed-base32": peerid,
|
"permutation-seed-base32": peerid,
|
||||||
}
|
}
|
||||||
return Peer(peerid=peerid, storage_server=fss, announcement=ann)
|
return Peer(peerid=peerid, storage_server=fss, announcement=ann)
|
||||||
@ -297,7 +299,7 @@ def make_nodemaker_with_storage_broker(storage_broker, keysize):
|
|||||||
|
|
||||||
:param StorageFarmBroker peers: The storage broker to use.
|
:param StorageFarmBroker peers: The storage broker to use.
|
||||||
"""
|
"""
|
||||||
sh = client.SecretHolder("lease secret", "convergence secret")
|
sh = client.SecretHolder(b"lease secret", b"convergence secret")
|
||||||
keygen = client.KeyGenerator()
|
keygen = client.KeyGenerator()
|
||||||
if keysize:
|
if keysize:
|
||||||
keygen.set_default_keysize(keysize)
|
keygen.set_default_keysize(keysize)
|
||||||
|
@ -26,13 +26,19 @@ if PY2:
|
|||||||
from past.builtins import unicode
|
from past.builtins import unicode
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from base64 import b32encode
|
||||||
|
from functools import (
|
||||||
|
partial,
|
||||||
|
)
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
from twisted.web.error import Error
|
from twisted.web.error import Error
|
||||||
from foolscap.api import Referenceable, fireEventually, RemoteException
|
from foolscap.api import Referenceable, fireEventually, RemoteException
|
||||||
from base64 import b32encode
|
from foolscap.ipb import (
|
||||||
|
IRemoteReference,
|
||||||
|
)
|
||||||
import treq
|
import treq
|
||||||
|
|
||||||
from allmydata.util.assertutil import _assert
|
from allmydata.util.assertutil import _assert
|
||||||
@ -59,14 +65,29 @@ class IntentionalError(Exception):
|
|||||||
class Marker(object):
|
class Marker(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
fireNow = partial(defer.succeed, None)
|
||||||
|
|
||||||
|
@implementer(IRemoteReference)
|
||||||
class LocalWrapper(object):
|
class LocalWrapper(object):
|
||||||
def __init__(self, original):
|
"""
|
||||||
|
A ``LocalWrapper`` presents the remote reference interface to a local
|
||||||
|
object which implements a ``RemoteInterface``.
|
||||||
|
"""
|
||||||
|
def __init__(self, original, fireEventually=fireEventually):
|
||||||
|
"""
|
||||||
|
:param Callable[[], Deferred[None]] fireEventually: Get a Deferred
|
||||||
|
that will fire at some point. This is used to control when
|
||||||
|
``callRemote`` calls the remote method. The default value allows
|
||||||
|
the reactor to iterate before the call happens. Use ``fireNow``
|
||||||
|
to call the remote method synchronously.
|
||||||
|
"""
|
||||||
self.original = original
|
self.original = original
|
||||||
self.broken = False
|
self.broken = False
|
||||||
self.hung_until = None
|
self.hung_until = None
|
||||||
self.post_call_notifier = None
|
self.post_call_notifier = None
|
||||||
self.disconnectors = {}
|
self.disconnectors = {}
|
||||||
self.counter_by_methname = {}
|
self.counter_by_methname = {}
|
||||||
|
self._fireEventually = fireEventually
|
||||||
|
|
||||||
def _clear_counters(self):
|
def _clear_counters(self):
|
||||||
self.counter_by_methname = {}
|
self.counter_by_methname = {}
|
||||||
@ -82,7 +103,7 @@ class LocalWrapper(object):
|
|||||||
# selected return values.
|
# selected return values.
|
||||||
def wrap(a):
|
def wrap(a):
|
||||||
if isinstance(a, Referenceable):
|
if isinstance(a, Referenceable):
|
||||||
return LocalWrapper(a)
|
return self._wrap(a)
|
||||||
else:
|
else:
|
||||||
return a
|
return a
|
||||||
args = tuple([wrap(a) for a in args])
|
args = tuple([wrap(a) for a in args])
|
||||||
@ -110,7 +131,7 @@ class LocalWrapper(object):
|
|||||||
return d2
|
return d2
|
||||||
return _really_call()
|
return _really_call()
|
||||||
|
|
||||||
d = fireEventually()
|
d = self._fireEventually()
|
||||||
d.addCallback(lambda res: _call())
|
d.addCallback(lambda res: _call())
|
||||||
def _wrap_exception(f):
|
def _wrap_exception(f):
|
||||||
return Failure(RemoteException(f))
|
return Failure(RemoteException(f))
|
||||||
@ -124,10 +145,10 @@ class LocalWrapper(object):
|
|||||||
if methname == "allocate_buckets":
|
if methname == "allocate_buckets":
|
||||||
(alreadygot, allocated) = res
|
(alreadygot, allocated) = res
|
||||||
for shnum in allocated:
|
for shnum in allocated:
|
||||||
allocated[shnum] = LocalWrapper(allocated[shnum])
|
allocated[shnum] = self._wrap(allocated[shnum])
|
||||||
if methname == "get_buckets":
|
if methname == "get_buckets":
|
||||||
for shnum in res:
|
for shnum in res:
|
||||||
res[shnum] = LocalWrapper(res[shnum])
|
res[shnum] = self._wrap(res[shnum])
|
||||||
return res
|
return res
|
||||||
d.addCallback(_return_membrane)
|
d.addCallback(_return_membrane)
|
||||||
if self.post_call_notifier:
|
if self.post_call_notifier:
|
||||||
@ -141,6 +162,10 @@ class LocalWrapper(object):
|
|||||||
def dontNotifyOnDisconnect(self, marker):
|
def dontNotifyOnDisconnect(self, marker):
|
||||||
del self.disconnectors[marker]
|
del self.disconnectors[marker]
|
||||||
|
|
||||||
|
def _wrap(self, value):
|
||||||
|
return LocalWrapper(value, self._fireEventually)
|
||||||
|
|
||||||
|
|
||||||
def wrap_storage_server(original):
|
def wrap_storage_server(original):
|
||||||
# Much of the upload/download code uses rref.version (which normally
|
# Much of the upload/download code uses rref.version (which normally
|
||||||
# comes from rrefutil.add_version_to_remote_reference). To avoid using a
|
# comes from rrefutil.add_version_to_remote_reference). To avoid using a
|
||||||
|
@ -1,3 +1,16 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os.path, shutil
|
import os.path, shutil
|
||||||
@ -7,7 +20,14 @@ from bs4 import BeautifulSoup
|
|||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
# We need to use `nevow.inevow.IRequest` for now for compatibility
|
||||||
|
# with the code in web/common.py. Once nevow bits are gone from
|
||||||
|
# web/common.py, we can use `twisted.web.iweb.IRequest` here.
|
||||||
|
if PY2:
|
||||||
from nevow.inevow import IRequest
|
from nevow.inevow import IRequest
|
||||||
|
else:
|
||||||
|
from twisted.web.iweb import IRequest
|
||||||
|
|
||||||
from zope.interface import implementer
|
from zope.interface import implementer
|
||||||
from twisted.web.server import Request
|
from twisted.web.server import Request
|
||||||
from twisted.web.test.requesthelper import DummyChannel
|
from twisted.web.test.requesthelper import DummyChannel
|
||||||
@ -102,7 +122,7 @@ class FakeCheckResults(object):
|
|||||||
|
|
||||||
def get_corrupt_shares(self):
|
def get_corrupt_shares(self):
|
||||||
# returns a list of (IServer, storage_index, sharenum)
|
# returns a list of (IServer, storage_index, sharenum)
|
||||||
return [(FakeServer(), "<fake-si>", 0)]
|
return [(FakeServer(), b"<fake-si>", 0)]
|
||||||
|
|
||||||
|
|
||||||
@implementer(ICheckAndRepairResults)
|
@implementer(ICheckAndRepairResults)
|
||||||
@ -141,18 +161,18 @@ class WebResultsRendering(unittest.TestCase):
|
|||||||
sb = StorageFarmBroker(True, None, EMPTY_CLIENT_CONFIG)
|
sb = StorageFarmBroker(True, None, EMPTY_CLIENT_CONFIG)
|
||||||
# s.get_name() (the "short description") will be "v0-00000000".
|
# s.get_name() (the "short description") will be "v0-00000000".
|
||||||
# s.get_longname() will include the -long suffix.
|
# s.get_longname() will include the -long suffix.
|
||||||
servers = [("v0-00000000-long", "\x00"*20, "peer-0"),
|
servers = [(b"v0-00000000-long", b"\x00"*20, "peer-0"),
|
||||||
("v0-ffffffff-long", "\xff"*20, "peer-f"),
|
(b"v0-ffffffff-long", b"\xff"*20, "peer-f"),
|
||||||
("v0-11111111-long", "\x11"*20, "peer-11")]
|
(b"v0-11111111-long", b"\x11"*20, "peer-11")]
|
||||||
for (key_s, binary_tubid, nickname) in servers:
|
for (key_s, binary_tubid, nickname) in servers:
|
||||||
server_id = key_s
|
server_id = key_s
|
||||||
tubid_b32 = base32.b2a(binary_tubid)
|
tubid_b32 = base32.b2a(binary_tubid)
|
||||||
furl = "pb://%s@nowhere/fake" % tubid_b32
|
furl = b"pb://%s@nowhere/fake" % tubid_b32
|
||||||
ann = { "version": 0,
|
ann = { "version": 0,
|
||||||
"service-name": "storage",
|
"service-name": "storage",
|
||||||
"anonymous-storage-FURL": furl,
|
"anonymous-storage-FURL": furl,
|
||||||
"permutation-seed-base32": "",
|
"permutation-seed-base32": "",
|
||||||
"nickname": unicode(nickname),
|
"nickname": str(nickname),
|
||||||
"app-versions": {}, # need #466 and v2 introducer
|
"app-versions": {}, # need #466 and v2 introducer
|
||||||
"my-version": "ver",
|
"my-version": "ver",
|
||||||
"oldest-supported": "oldest",
|
"oldest-supported": "oldest",
|
||||||
@ -174,11 +194,11 @@ class WebResultsRendering(unittest.TestCase):
|
|||||||
lcr = web_check_results.LiteralCheckResultsRendererElement()
|
lcr = web_check_results.LiteralCheckResultsRendererElement()
|
||||||
|
|
||||||
html = self.render_element(lcr)
|
html = self.render_element(lcr)
|
||||||
self.failUnlessIn("Literal files are always healthy", html)
|
self.failUnlessIn(b"Literal files are always healthy", html)
|
||||||
|
|
||||||
html = self.render_element(lcr, args={"return_to": ["FOOURL"]})
|
html = self.render_element(lcr, args={"return_to": ["FOOURL"]})
|
||||||
self.failUnlessIn("Literal files are always healthy", html)
|
self.failUnlessIn(b"Literal files are always healthy", html)
|
||||||
self.failUnlessIn('<a href="FOOURL">Return to file.</a>', html)
|
self.failUnlessIn(b'<a href="FOOURL">Return to file.</a>', html)
|
||||||
|
|
||||||
c = self.create_fake_client()
|
c = self.create_fake_client()
|
||||||
lcr = web_check_results.LiteralCheckResultsRenderer(c)
|
lcr = web_check_results.LiteralCheckResultsRenderer(c)
|
||||||
@ -192,11 +212,11 @@ class WebResultsRendering(unittest.TestCase):
|
|||||||
def test_check(self):
|
def test_check(self):
|
||||||
c = self.create_fake_client()
|
c = self.create_fake_client()
|
||||||
sb = c.storage_broker
|
sb = c.storage_broker
|
||||||
serverid_1 = "\x00"*20
|
serverid_1 = b"\x00"*20
|
||||||
serverid_f = "\xff"*20
|
serverid_f = b"\xff"*20
|
||||||
server_1 = sb.get_stub_server(serverid_1)
|
server_1 = sb.get_stub_server(serverid_1)
|
||||||
server_f = sb.get_stub_server(serverid_f)
|
server_f = sb.get_stub_server(serverid_f)
|
||||||
u = uri.CHKFileURI("\x00"*16, "\x00"*32, 3, 10, 1234)
|
u = uri.CHKFileURI(b"\x00"*16, b"\x00"*32, 3, 10, 1234)
|
||||||
data = { "count_happiness": 8,
|
data = { "count_happiness": 8,
|
||||||
"count_shares_needed": 3,
|
"count_shares_needed": 3,
|
||||||
"count_shares_expected": 9,
|
"count_shares_expected": 9,
|
||||||
@ -260,7 +280,7 @@ class WebResultsRendering(unittest.TestCase):
|
|||||||
self.failUnlessIn("Not Recoverable! : rather dead", s)
|
self.failUnlessIn("Not Recoverable! : rather dead", s)
|
||||||
|
|
||||||
html = self.render_element(w, args={"return_to": ["FOOURL"]})
|
html = self.render_element(w, args={"return_to": ["FOOURL"]})
|
||||||
self.failUnlessIn('<a href="FOOURL">Return to file/directory.</a>',
|
self.failUnlessIn(b'<a href="FOOURL">Return to file/directory.</a>',
|
||||||
html)
|
html)
|
||||||
|
|
||||||
w = web_check_results.CheckResultsRenderer(c, cr)
|
w = web_check_results.CheckResultsRenderer(c, cr)
|
||||||
@ -301,9 +321,9 @@ class WebResultsRendering(unittest.TestCase):
|
|||||||
def test_check_and_repair(self):
|
def test_check_and_repair(self):
|
||||||
c = self.create_fake_client()
|
c = self.create_fake_client()
|
||||||
sb = c.storage_broker
|
sb = c.storage_broker
|
||||||
serverid_1 = "\x00"*20
|
serverid_1 = b"\x00"*20
|
||||||
serverid_f = "\xff"*20
|
serverid_f = b"\xff"*20
|
||||||
u = uri.CHKFileURI("\x00"*16, "\x00"*32, 3, 10, 1234)
|
u = uri.CHKFileURI(b"\x00"*16, b"\x00"*32, 3, 10, 1234)
|
||||||
|
|
||||||
data = { "count_happiness": 5,
|
data = { "count_happiness": 5,
|
||||||
"count_shares_needed": 3,
|
"count_shares_needed": 3,
|
||||||
@ -419,21 +439,21 @@ class WebResultsRendering(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
def test_deep_check_renderer(self):
|
def test_deep_check_renderer(self):
|
||||||
status = check_results.DeepCheckResults("fake-root-si")
|
status = check_results.DeepCheckResults(b"fake-root-si")
|
||||||
status.add_check(
|
status.add_check(
|
||||||
FakeCheckResults("<unhealthy/unrecoverable>", False, False),
|
FakeCheckResults(b"<unhealthy/unrecoverable>", False, False),
|
||||||
(u"fake", u"unhealthy", u"unrecoverable")
|
(u"fake", u"unhealthy", u"unrecoverable")
|
||||||
)
|
)
|
||||||
status.add_check(
|
status.add_check(
|
||||||
FakeCheckResults("<healthy/recoverable>", True, True),
|
FakeCheckResults(b"<healthy/recoverable>", True, True),
|
||||||
(u"fake", u"healthy", u"recoverable")
|
(u"fake", u"healthy", u"recoverable")
|
||||||
)
|
)
|
||||||
status.add_check(
|
status.add_check(
|
||||||
FakeCheckResults("<healthy/unrecoverable>", True, False),
|
FakeCheckResults(b"<healthy/unrecoverable>", True, False),
|
||||||
(u"fake", u"healthy", u"unrecoverable")
|
(u"fake", u"healthy", u"unrecoverable")
|
||||||
)
|
)
|
||||||
status.add_check(
|
status.add_check(
|
||||||
FakeCheckResults("<unhealthy/unrecoverable>", False, True),
|
FakeCheckResults(b"<unhealthy/unrecoverable>", False, True),
|
||||||
(u"fake", u"unhealthy", u"recoverable")
|
(u"fake", u"unhealthy", u"recoverable")
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -512,18 +532,18 @@ class WebResultsRendering(unittest.TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_deep_check_and_repair_renderer(self):
|
def test_deep_check_and_repair_renderer(self):
|
||||||
status = check_results.DeepCheckAndRepairResults("")
|
status = check_results.DeepCheckAndRepairResults(b"")
|
||||||
|
|
||||||
status.add_check_and_repair(
|
status.add_check_and_repair(
|
||||||
FakeCheckAndRepairResults("attempted/success", True, True),
|
FakeCheckAndRepairResults(b"attempted/success", True, True),
|
||||||
(u"attempted", u"success")
|
(u"attempted", u"success")
|
||||||
)
|
)
|
||||||
status.add_check_and_repair(
|
status.add_check_and_repair(
|
||||||
FakeCheckAndRepairResults("attempted/failure", True, False),
|
FakeCheckAndRepairResults(b"attempted/failure", True, False),
|
||||||
(u"attempted", u"failure")
|
(u"attempted", u"failure")
|
||||||
)
|
)
|
||||||
status.add_check_and_repair(
|
status.add_check_and_repair(
|
||||||
FakeCheckAndRepairResults("unattempted/failure", False, False),
|
FakeCheckAndRepairResults(b"unattempted/failure", False, False),
|
||||||
(u"unattempted", u"failure")
|
(u"unattempted", u"failure")
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -662,7 +682,7 @@ class BalancingAct(GridTestMixin, unittest.TestCase):
|
|||||||
"This little printing function is only meant for < 26 servers"
|
"This little printing function is only meant for < 26 servers"
|
||||||
shares_chart = {}
|
shares_chart = {}
|
||||||
names = dict(zip([ss.my_nodeid
|
names = dict(zip([ss.my_nodeid
|
||||||
for _,ss in self.g.servers_by_number.iteritems()],
|
for _,ss in self.g.servers_by_number.items()],
|
||||||
letters))
|
letters))
|
||||||
for shnum, serverid, _ in self.find_uri_shares(uri):
|
for shnum, serverid, _ in self.find_uri_shares(uri):
|
||||||
shares_chart.setdefault(shnum, []).append(names[serverid])
|
shares_chart.setdefault(shnum, []).append(names[serverid])
|
||||||
@ -676,8 +696,8 @@ class BalancingAct(GridTestMixin, unittest.TestCase):
|
|||||||
c0.encoding_params['n'] = 4
|
c0.encoding_params['n'] = 4
|
||||||
c0.encoding_params['k'] = 3
|
c0.encoding_params['k'] = 3
|
||||||
|
|
||||||
DATA = "data" * 100
|
DATA = b"data" * 100
|
||||||
d = c0.upload(Data(DATA, convergence=""))
|
d = c0.upload(Data(DATA, convergence=b""))
|
||||||
def _stash_immutable(ur):
|
def _stash_immutable(ur):
|
||||||
self.imm = c0.create_node_from_uri(ur.get_uri())
|
self.imm = c0.create_node_from_uri(ur.get_uri())
|
||||||
self.uri = self.imm.get_uri()
|
self.uri = self.imm.get_uri()
|
||||||
@ -742,13 +762,13 @@ class AddLease(GridTestMixin, unittest.TestCase):
|
|||||||
c0 = self.g.clients[0]
|
c0 = self.g.clients[0]
|
||||||
c0.encoding_params['happy'] = 1
|
c0.encoding_params['happy'] = 1
|
||||||
self.uris = {}
|
self.uris = {}
|
||||||
DATA = "data" * 100
|
DATA = b"data" * 100
|
||||||
d = c0.upload(Data(DATA, convergence=""))
|
d = c0.upload(Data(DATA, convergence=b""))
|
||||||
def _stash_immutable(ur):
|
def _stash_immutable(ur):
|
||||||
self.imm = c0.create_node_from_uri(ur.get_uri())
|
self.imm = c0.create_node_from_uri(ur.get_uri())
|
||||||
d.addCallback(_stash_immutable)
|
d.addCallback(_stash_immutable)
|
||||||
d.addCallback(lambda ign:
|
d.addCallback(lambda ign:
|
||||||
c0.create_mutable_file(MutableData("contents")))
|
c0.create_mutable_file(MutableData(b"contents")))
|
||||||
def _stash_mutable(node):
|
def _stash_mutable(node):
|
||||||
self.mut = node
|
self.mut = node
|
||||||
d.addCallback(_stash_mutable)
|
d.addCallback(_stash_mutable)
|
||||||
@ -834,8 +854,8 @@ class TooParallel(GridTestMixin, unittest.TestCase):
|
|||||||
"max_segment_size": 5,
|
"max_segment_size": 5,
|
||||||
}
|
}
|
||||||
self.uris = {}
|
self.uris = {}
|
||||||
DATA = "data" * 100 # 400/5 = 80 blocks
|
DATA = b"data" * 100 # 400/5 = 80 blocks
|
||||||
return self.c0.upload(Data(DATA, convergence=""))
|
return self.c0.upload(Data(DATA, convergence=b""))
|
||||||
d.addCallback(_start)
|
d.addCallback(_start)
|
||||||
def _do_check(ur):
|
def _do_check(ur):
|
||||||
n = self.c0.create_node_from_uri(ur.get_uri())
|
n = self.c0.create_node_from_uri(ur.get_uri())
|
||||||
|
@ -8,7 +8,7 @@ from __future__ import absolute_import
|
|||||||
from __future__ import division
|
from __future__ import division
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
from future.utils import PY2
|
from future.utils import PY2, native_str
|
||||||
if PY2:
|
if PY2:
|
||||||
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
from builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
@ -44,10 +44,9 @@ class Python3PortingEffortTests(SynchronousTestCase):
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
if PY2:
|
test_finished_porting.todo = native_str(
|
||||||
test_finished_porting.skip = "For some reason todo isn't working on Python 2 now"
|
"https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203 should be completed",
|
||||||
else:
|
)
|
||||||
test_finished_porting.todo = "https://tahoe-lafs.org/trac/tahoe-lafs/milestone/Support%20Python%203 should be completed"
|
|
||||||
|
|
||||||
def test_ported_modules_exist(self):
|
def test_ported_modules_exist(self):
|
||||||
"""
|
"""
|
||||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
|||||||
|
|
||||||
from future.utils import PY2
|
from future.utils import PY2
|
||||||
if PY2:
|
if PY2:
|
||||||
from future.builtins import filter, map, zip, ascii, chr, dict, hex, input, next, oct, open, pow, round, super, bytes, int, list, object, range, str, max, min # noqa: F401
|
from future.builtins import filter, map, zip, ascii, chr, dict, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
|
@ -9,20 +9,24 @@ from __future__ import unicode_literals
|
|||||||
|
|
||||||
from future.utils import PY2
|
from future.utils import PY2
|
||||||
if PY2:
|
if PY2:
|
||||||
|
# open is not here because we want to use native strings on Py2
|
||||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
import six
|
import six
|
||||||
import os, time, sys
|
import os, time, sys
|
||||||
import yaml
|
import yaml
|
||||||
|
import json
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
|
|
||||||
from allmydata.util import idlib, mathutil
|
from allmydata.util import idlib, mathutil
|
||||||
from allmydata.util import fileutil
|
from allmydata.util import fileutil
|
||||||
|
from allmydata.util import jsonbytes
|
||||||
from allmydata.util import pollmixin
|
from allmydata.util import pollmixin
|
||||||
from allmydata.util import yamlutil
|
from allmydata.util import yamlutil
|
||||||
from allmydata.util.fileutil import EncryptedTemporaryFile
|
from allmydata.util.fileutil import EncryptedTemporaryFile
|
||||||
from allmydata.test.common_util import ReallyEqualMixin
|
from allmydata.test.common_util import ReallyEqualMixin
|
||||||
|
|
||||||
|
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
long = int
|
long = int
|
||||||
|
|
||||||
@ -469,3 +473,29 @@ class YAML(unittest.TestCase):
|
|||||||
self.assertIsInstance(back[0], str)
|
self.assertIsInstance(back[0], str)
|
||||||
self.assertIsInstance(back[1], str)
|
self.assertIsInstance(back[1], str)
|
||||||
self.assertIsInstance(back[2], str)
|
self.assertIsInstance(back[2], str)
|
||||||
|
|
||||||
|
|
||||||
|
class JSONBytes(unittest.TestCase):
|
||||||
|
"""Tests for BytesJSONEncoder."""
|
||||||
|
|
||||||
|
def test_encode_bytes(self):
|
||||||
|
"""BytesJSONEncoder can encode bytes."""
|
||||||
|
data = {
|
||||||
|
b"hello": [1, b"cd"],
|
||||||
|
}
|
||||||
|
expected = {
|
||||||
|
u"hello": [1, u"cd"],
|
||||||
|
}
|
||||||
|
# Bytes get passed through as if they were UTF-8 Unicode:
|
||||||
|
encoded = jsonbytes.dumps(data)
|
||||||
|
self.assertEqual(json.loads(encoded), expected)
|
||||||
|
self.assertEqual(jsonbytes.loads(encoded), expected)
|
||||||
|
|
||||||
|
|
||||||
|
def test_encode_unicode(self):
|
||||||
|
"""BytesJSONEncoder encodes Unicode string as usual."""
|
||||||
|
expected = {
|
||||||
|
u"hello": [1, u"cd"],
|
||||||
|
}
|
||||||
|
encoded = jsonbytes.dumps(expected)
|
||||||
|
self.assertEqual(json.loads(encoded), expected)
|
||||||
|
@ -76,6 +76,7 @@ PORTED_MODULES = [
|
|||||||
"allmydata.util.hashutil",
|
"allmydata.util.hashutil",
|
||||||
"allmydata.util.humanreadable",
|
"allmydata.util.humanreadable",
|
||||||
"allmydata.util.iputil",
|
"allmydata.util.iputil",
|
||||||
|
"allmydata.util.jsonbytes",
|
||||||
"allmydata.util.log",
|
"allmydata.util.log",
|
||||||
"allmydata.util.mathutil",
|
"allmydata.util.mathutil",
|
||||||
"allmydata.util.namespace",
|
"allmydata.util.namespace",
|
||||||
@ -89,9 +90,13 @@ PORTED_MODULES = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
PORTED_TEST_MODULES = [
|
PORTED_TEST_MODULES = [
|
||||||
|
"allmydata.test.mutable.test_datahandle",
|
||||||
|
"allmydata.test.mutable.test_different_encoding",
|
||||||
|
"allmydata.test.mutable.test_filehandle",
|
||||||
"allmydata.test.test_abbreviate",
|
"allmydata.test.test_abbreviate",
|
||||||
"allmydata.test.test_base32",
|
"allmydata.test.test_base32",
|
||||||
"allmydata.test.test_base62",
|
"allmydata.test.test_base62",
|
||||||
|
"allmydata.test.test_checker",
|
||||||
"allmydata.test.test_codec",
|
"allmydata.test.test_codec",
|
||||||
"allmydata.test.test_common_util",
|
"allmydata.test.test_common_util",
|
||||||
"allmydata.test.test_configutil",
|
"allmydata.test.test_configutil",
|
||||||
|
51
src/allmydata/util/jsonbytes.py
Normal file
51
src/allmydata/util/jsonbytes.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
"""
|
||||||
|
A JSON encoder than can serialize bytes.
|
||||||
|
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class BytesJSONEncoder(json.JSONEncoder):
|
||||||
|
"""
|
||||||
|
A JSON encoder than can also encode bytes.
|
||||||
|
|
||||||
|
The bytes are assumed to be UTF-8 encoded Unicode strings.
|
||||||
|
"""
|
||||||
|
def default(self, o):
|
||||||
|
if isinstance(o, bytes):
|
||||||
|
return o.decode("utf-8")
|
||||||
|
return json.JSONEncoder.default(self, o)
|
||||||
|
|
||||||
|
|
||||||
|
def dumps(obj, *args, **kwargs):
|
||||||
|
"""Encode to JSON, supporting bytes as keys or values.
|
||||||
|
|
||||||
|
The bytes are assumed to be UTF-8 encoded Unicode strings.
|
||||||
|
"""
|
||||||
|
if isinstance(obj, dict):
|
||||||
|
new_obj = {}
|
||||||
|
for k, v in obj.items():
|
||||||
|
if isinstance(k, bytes):
|
||||||
|
k = k.decode("utf-8")
|
||||||
|
new_obj[k] = v
|
||||||
|
obj = new_obj
|
||||||
|
return json.dumps(obj, cls=BytesJSONEncoder, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
# To make this module drop-in compatible with json module:
|
||||||
|
loads = json.loads
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["dumps", "loads"]
|
@ -1,6 +1,6 @@
|
|||||||
|
from future.builtins import str
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import json
|
|
||||||
|
|
||||||
from twisted.web import (
|
from twisted.web import (
|
||||||
http,
|
http,
|
||||||
@ -31,6 +31,7 @@ from allmydata.interfaces import (
|
|||||||
from allmydata.util import (
|
from allmydata.util import (
|
||||||
base32,
|
base32,
|
||||||
dictutil,
|
dictutil,
|
||||||
|
jsonbytes as json, # Supporting dumping bytes
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -200,7 +201,7 @@ class ResultsBase(object):
|
|||||||
return tags.ul(r)
|
return tags.ul(r)
|
||||||
|
|
||||||
def _html(self, s):
|
def _html(self, s):
|
||||||
if isinstance(s, (str, unicode)):
|
if isinstance(s, (bytes, str)):
|
||||||
return html.escape(s)
|
return html.escape(s)
|
||||||
assert isinstance(s, (list, tuple))
|
assert isinstance(s, (list, tuple))
|
||||||
return [html.escape(w) for w in s]
|
return [html.escape(w) for w in s]
|
||||||
@ -522,7 +523,7 @@ class DeepCheckResultsRendererElement(Element, ResultsBase, ReloadMixin):
|
|||||||
summary = cr.get_summary()
|
summary = cr.get_summary()
|
||||||
if summary:
|
if summary:
|
||||||
summary_text = ": " + summary
|
summary_text = ": " + summary
|
||||||
summary_text += " [SI: %s]" % cr.get_storage_index_string()
|
summary_text += " [SI: %s]" % cr.get_storage_index_string().decode("ascii")
|
||||||
problems.append({
|
problems.append({
|
||||||
# Not sure self._join_pathstring(path) is the
|
# Not sure self._join_pathstring(path) is the
|
||||||
# right thing to use here.
|
# right thing to use here.
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from future.utils import PY2
|
||||||
|
from past.builtins import unicode
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
@ -9,9 +11,17 @@ from twisted.web import (
|
|||||||
server,
|
server,
|
||||||
template,
|
template,
|
||||||
)
|
)
|
||||||
|
from twisted.web.iweb import IRequest as ITwistedRequest
|
||||||
from twisted.python import log
|
from twisted.python import log
|
||||||
from nevow import appserver
|
if PY2:
|
||||||
from nevow.inevow import IRequest
|
from nevow.appserver import DefaultExceptionHandler
|
||||||
|
from nevow.inevow import IRequest as INevowRequest
|
||||||
|
else:
|
||||||
|
class DefaultExceptionHandler:
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
raise NotImplementedError("Still not ported to Python 3")
|
||||||
|
INevowRequest = None
|
||||||
|
|
||||||
from allmydata import blacklist
|
from allmydata import blacklist
|
||||||
from allmydata.interfaces import (
|
from allmydata.interfaces import (
|
||||||
EmptyPathnameComponentError,
|
EmptyPathnameComponentError,
|
||||||
@ -118,7 +128,10 @@ def parse_offset_arg(offset):
|
|||||||
|
|
||||||
|
|
||||||
def get_root(ctx_or_req):
|
def get_root(ctx_or_req):
|
||||||
req = IRequest(ctx_or_req)
|
if PY2:
|
||||||
|
req = INevowRequest(ctx_or_req)
|
||||||
|
else:
|
||||||
|
req = ITwistedRequest(ctx_or_req)
|
||||||
depth = len(req.prepath) + len(req.postpath)
|
depth = len(req.prepath) + len(req.postpath)
|
||||||
link = "/".join([".."] * depth)
|
link = "/".join([".."] * depth)
|
||||||
return link
|
return link
|
||||||
@ -319,9 +332,9 @@ def humanize_failure(f):
|
|||||||
return humanize_exception(f.value)
|
return humanize_exception(f.value)
|
||||||
|
|
||||||
|
|
||||||
class MyExceptionHandler(appserver.DefaultExceptionHandler, object):
|
class MyExceptionHandler(DefaultExceptionHandler, object):
|
||||||
def simple(self, ctx, text, code=http.BAD_REQUEST):
|
def simple(self, ctx, text, code=http.BAD_REQUEST):
|
||||||
req = IRequest(ctx)
|
req = INevowRequest(ctx)
|
||||||
req.setResponseCode(code)
|
req.setResponseCode(code)
|
||||||
#req.responseHeaders.setRawHeaders("content-encoding", [])
|
#req.responseHeaders.setRawHeaders("content-encoding", [])
|
||||||
#req.responseHeaders.setRawHeaders("content-disposition", [])
|
#req.responseHeaders.setRawHeaders("content-disposition", [])
|
||||||
@ -347,17 +360,17 @@ class MyExceptionHandler(appserver.DefaultExceptionHandler, object):
|
|||||||
# twisted.web.server.Request.render() has support for transforming
|
# twisted.web.server.Request.render() has support for transforming
|
||||||
# this into an appropriate 501 NOT_IMPLEMENTED or 405 NOT_ALLOWED
|
# this into an appropriate 501 NOT_IMPLEMENTED or 405 NOT_ALLOWED
|
||||||
# return code, but nevow does not.
|
# return code, but nevow does not.
|
||||||
req = IRequest(ctx)
|
req = INevowRequest(ctx)
|
||||||
method = req.method
|
method = req.method
|
||||||
return self.simple(ctx,
|
return self.simple(ctx,
|
||||||
"I don't know how to treat a %s request." % method,
|
"I don't know how to treat a %s request." % method,
|
||||||
http.NOT_IMPLEMENTED)
|
http.NOT_IMPLEMENTED)
|
||||||
req = IRequest(ctx)
|
req = INevowRequest(ctx)
|
||||||
accept = req.getHeader("accept")
|
accept = req.getHeader("accept")
|
||||||
if not accept:
|
if not accept:
|
||||||
accept = "*/*"
|
accept = "*/*"
|
||||||
if "*/*" in accept or "text/*" in accept or "text/html" in accept:
|
if "*/*" in accept or "text/*" in accept or "text/html" in accept:
|
||||||
super = appserver.DefaultExceptionHandler
|
super = DefaultExceptionHandler
|
||||||
return super.renderHTTP_exception(self, ctx, f)
|
return super.renderHTTP_exception(self, ctx, f)
|
||||||
# use plain text
|
# use plain text
|
||||||
traceback = f.getTraceback()
|
traceback = f.getTraceback()
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
|
from future.utils import PY2
|
||||||
import time
|
import time
|
||||||
|
if PY2:
|
||||||
from nevow import url
|
from nevow import url
|
||||||
|
else:
|
||||||
|
# This module still needs porting to Python 3
|
||||||
|
url = None
|
||||||
from twisted.web.template import (
|
from twisted.web.template import (
|
||||||
renderer,
|
renderer,
|
||||||
tags as T,
|
tags as T,
|
||||||
@ -160,12 +164,12 @@ class ReloadMixin(object):
|
|||||||
@renderer
|
@renderer
|
||||||
def reload(self, req, tag):
|
def reload(self, req, tag):
|
||||||
if self.monitor.is_finished():
|
if self.monitor.is_finished():
|
||||||
return ""
|
return b""
|
||||||
# url.gethere would break a proxy, so the correct thing to do is
|
# url.gethere would break a proxy, so the correct thing to do is
|
||||||
# req.path[-1] + queryargs
|
# req.path[-1] + queryargs
|
||||||
ophandle = req.prepath[-1]
|
ophandle = req.prepath[-1]
|
||||||
reload_target = ophandle + "?output=html"
|
reload_target = ophandle + b"?output=html"
|
||||||
cancel_target = ophandle + "?t=cancel"
|
cancel_target = ophandle + b"?t=cancel"
|
||||||
cancel_button = T.form(T.input(type="submit", value="Cancel"),
|
cancel_button = T.form(T.input(type="submit", value="Cancel"),
|
||||||
action=cancel_target,
|
action=cancel_target,
|
||||||
method="POST",
|
method="POST",
|
||||||
|
Loading…
Reference in New Issue
Block a user