Merge pull request #892 from tahoe-lafs/3500.mutable-tests-python-3

Port mutable tests to Python 3, part 4/4

Fixes ticket:3500
This commit is contained in:
Itamar Turner-Trauring 2020-11-11 14:46:27 -05:00 committed by GitHub
commit 4f10ecd9f7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 151 additions and 82 deletions

0
newsfragments/3500.minor Normal file
View File

View File

@ -1141,7 +1141,7 @@ class MutableFileVersion(object):
start_segments = {} # shnum -> start segment
end_segments = {} # shnum -> end segment
blockhashes = {} # shnum -> blockhash tree
for (shnum, original_data) in update_data.iteritems():
for (shnum, original_data) in update_data.items():
data = [d[1] for d in original_data if d[0] == self._version]
# data is [(blockhashes,start,end)..]

View File

@ -702,9 +702,10 @@ class Publish(object):
self.log("Pushing segment %d of %d" % (segnum + 1, self.num_segments))
# XXX: Why does this return a list?
data = self.data.read(segsize)
data = b"".join(data)
if not isinstance(data, bytes):
# XXX: Why does this return a list?
data = b"".join(data)
assert len(data) == segsize, len(data)
@ -1327,7 +1328,7 @@ class TransformingUploadable(object):
# are we in state 0?
self.log("reading %d bytes" % length)
old_start_data = ""
old_start_data = b""
old_data_length = self._first_segment_offset - self._read_marker
if old_data_length > 0:
if old_data_length > length:
@ -1345,7 +1346,7 @@ class TransformingUploadable(object):
# to pad the end of the data with data from our last segment.
old_end_length = length - \
(self._newdata.get_size() - self._newdata.pos())
old_end_data = ""
old_end_data = b""
if old_end_length > 0:
self.log("reading %d bytes of old end data" % old_end_length)

View File

@ -158,6 +158,7 @@ class ServerMap(object):
corrupted or badly signed) so that a repair operation can do the
test-and-set using it as a reference.
"""
assert isinstance(checkstring, bytes)
key = (server, shnum) # record checkstring
self._bad_shares[key] = checkstring
self._known_shares.pop(key, None)

View File

@ -180,10 +180,10 @@ def dump_mutable_share(options):
share_type = "unknown"
f.seek(m.DATA_OFFSET)
version = f.read(1)
if version == "\x00":
if version == b"\x00":
# this slot contains an SMDF share
share_type = "SDMF"
elif version == "\x01":
elif version == b"\x01":
share_type = "MDMF"
f.close()
@ -714,10 +714,10 @@ def describe_share(abs_sharefile, si_s, shnum_s, now, out):
share_type = "unknown"
f.seek(m.DATA_OFFSET)
version = f.read(1)
if version == "\x00":
if version == b"\x00":
# this slot contains an SMDF share
share_type = "SDMF"
elif version == "\x01":
elif version == b"\x01":
share_type = "MDMF"
if share_type == "SDMF":
@ -920,7 +920,7 @@ def corrupt_share(options):
f.seek(m.DATA_OFFSET)
data = f.read(2000)
# make sure this slot contains an SMDF share
assert data[0] == "\x00", "non-SDMF mutable shares not supported"
assert data[0] == b"\x00", "non-SDMF mutable shares not supported"
f.close()
(version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,

View File

@ -1,3 +1,15 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from allmydata.interfaces import IRepairResults, ICheckAndRepairResults
from allmydata.monitor import Monitor
@ -239,7 +251,7 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
def _get_readcap(res):
self._fn3 = self._fn.get_readonly()
# also delete some shares
for peerid,shares in self._storage._peers.items():
for peerid,shares in list(self._storage._peers.items()):
shares.pop(0, None)
d.addCallback(_get_readcap)
d.addCallback(lambda res: self._fn3.check_and_repair(Monitor()))
@ -258,7 +270,7 @@ class Repair(unittest.TestCase, PublishMixin, ShouldFailMixin):
# In the buggy version, the check that precedes the retrieve+publish
# cycle uses MODE_READ, instead of MODE_REPAIR, and fails to get the
# privkey that repair needs.
d = self.publish_sdmf("")
d = self.publish_sdmf(b"")
def _delete_one_share(ign):
shares = self._storage._peers
for peerid in shares:

View File

@ -1,4 +1,14 @@
"""
Ported to Python 3.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from six.moves import cStringIO as StringIO
from twisted.trial import unittest
@ -35,7 +45,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
def abbrev_verinfo_dict(self, verinfo_d):
output = {}
for verinfo,value in verinfo_d.items():
for verinfo,value in list(verinfo_d.items()):
(seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
offsets_tuple) = verinfo
output["%d-%s" % (seqnum, base32.b2a(root_hash)[:4])] = value
@ -54,7 +64,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
r = Retrieve(self._fn, self._storage_broker, servermap, version)
c = consumer.MemoryConsumer()
d = r.download(consumer=c)
d.addCallback(lambda mc: "".join(mc.chunks))
d.addCallback(lambda mc: b"".join(mc.chunks))
return d
@ -88,7 +98,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
def test_all_shares_vanished(self):
d = self.make_servermap()
def _remove_shares(servermap):
for shares in self._storage._peers.values():
for shares in list(self._storage._peers.values()):
shares.clear()
d1 = self.shouldFail(NotEnoughSharesError,
"test_all_shares_vanished",
@ -103,7 +113,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
d = self.make_servermap()
def _remove_shares(servermap):
self._version = servermap.best_recoverable_version()
for shares in self._storage._peers.values()[2:]:
for shares in list(self._storage._peers.values())[2:]:
shares.clear()
return self.make_servermap(servermap)
d.addCallback(_remove_shares)
@ -317,7 +327,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
N = self._fn.get_total_shares()
d = defer.succeed(None)
d.addCallback(corrupt, self._storage, "pubkey",
shnums_to_corrupt=range(0, N-k))
shnums_to_corrupt=list(range(0, N-k)))
d.addCallback(lambda res: self.make_servermap())
def _do_retrieve(servermap):
self.failUnless(servermap.get_problems())
@ -328,7 +338,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
c = consumer.MemoryConsumer()
return r.download(c)
d.addCallback(_do_retrieve)
d.addCallback(lambda mc: "".join(mc.chunks))
d.addCallback(lambda mc: b"".join(mc.chunks))
d.addCallback(lambda new_contents:
self.failUnlessEqual(new_contents, self.CONTENTS))
return d
@ -340,7 +350,7 @@ class Roundtrip(unittest.TestCase, testutil.ShouldFailMixin, PublishMixin):
else:
d = defer.succeed(None)
d.addCallback(lambda ignored:
corrupt(None, self._storage, offset, range(5)))
corrupt(None, self._storage, offset, list(range(5))))
d.addCallback(lambda ignored:
self.make_servermap())
def _do_retrieve(servermap):

View File

@ -1,3 +1,16 @@
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from twisted.trial import unittest
from twisted.internet import defer
from allmydata.monitor import Monitor
@ -36,7 +49,7 @@ class Servermap(unittest.TestCase, PublishMixin):
self.failUnlessEqual(sm.recoverable_versions(), set([best]))
self.failUnlessEqual(len(sm.shares_available()), 1)
self.failUnlessEqual(sm.shares_available()[best], (num_shares, 3, 10))
shnum, servers = sm.make_sharemap().items()[0]
shnum, servers = list(sm.make_sharemap().items())[0]
server = list(servers)[0]
self.failUnlessEqual(sm.version_on_server(server, shnum), best)
self.failUnlessEqual(sm.version_on_server(server, 666), None)
@ -83,7 +96,7 @@ class Servermap(unittest.TestCase, PublishMixin):
# create a new file, which is large enough to knock the privkey out
# of the early part of the file
LARGE = "These are Larger contents" * 200 # about 5KB
LARGE = b"These are Larger contents" * 200 # about 5KB
LARGE_uploadable = MutableData(LARGE)
d.addCallback(lambda res: self._nodemaker.create_mutable_file(LARGE_uploadable))
def _created(large_fn):
@ -112,7 +125,7 @@ class Servermap(unittest.TestCase, PublishMixin):
for (shnum, server, timestamp) in shares:
if shnum < 5:
self._corrupted.add( (server, shnum) )
sm.mark_bad_share(server, shnum, "")
sm.mark_bad_share(server, shnum, b"")
return self.update_servermap(sm, MODE_WRITE)
d.addCallback(_made_map)
def _check_map(sm):
@ -160,7 +173,7 @@ class Servermap(unittest.TestCase, PublishMixin):
best = sm.best_recoverable_version()
self.failUnlessEqual(best, None)
self.failUnlessEqual(len(sm.shares_available()), 1)
self.failUnlessEqual(sm.shares_available().values()[0], (2,3,10) )
self.failUnlessEqual(list(sm.shares_available().values())[0], (2,3,10) )
return sm
def test_not_quite_enough_shares(self):
@ -218,7 +231,7 @@ class Servermap(unittest.TestCase, PublishMixin):
# 10 shares
self.failUnlessEqual(len(sm.update_data), 10)
# one version
for data in sm.update_data.itervalues():
for data in sm.update_data.values():
self.failUnlessEqual(len(data), 1)
d.addCallback(_check_servermap)
return d

View File

@ -1,4 +1,14 @@
"""
Ported to Python 3.
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
from future.utils import PY2
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import re
from twisted.trial import unittest
@ -23,10 +33,10 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
self.c = self.g.clients[0]
self.nm = self.c.nodemaker
# self.data should be at least three segments long.
td = "testdata "
self.data = td*(int(3*SEGSIZE/len(td))+10) # currently about 400kB
td = b"testdata "
self.data = td*(int(3*SEGSIZE//len(td))+10) # currently about 400kB
assert len(self.data) > 3*SEGSIZE
self.small_data = "test data" * 10 # 90 B; SDMF
self.small_data = b"test data" * 10 # 90 B; SDMF
def do_upload_sdmf(self):
@ -68,42 +78,42 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
def test_append(self):
# We should be able to append data to a mutable file and get
# what we expect.
return self._test_replace(len(self.data), "appended")
return self._test_replace(len(self.data), b"appended")
def test_replace_middle(self):
# We should be able to replace data in the middle of a mutable
# file and get what we expect back.
return self._test_replace(100, "replaced")
return self._test_replace(100, b"replaced")
def test_replace_beginning(self):
# We should be able to replace data at the beginning of the file
# without truncating the file
return self._test_replace(0, "beginning")
return self._test_replace(0, b"beginning")
def test_replace_segstart1(self):
return self._test_replace(128*1024+1, "NNNN")
return self._test_replace(128*1024+1, b"NNNN")
def test_replace_zero_length_beginning(self):
return self._test_replace(0, "")
return self._test_replace(0, b"")
def test_replace_zero_length_middle(self):
return self._test_replace(50, "")
return self._test_replace(50, b"")
def test_replace_zero_length_segstart1(self):
return self._test_replace(128*1024+1, "")
return self._test_replace(128*1024+1, b"")
def test_replace_and_extend(self):
# We should be able to replace data in the middle of a mutable
# file and extend that mutable file and get what we expect.
return self._test_replace(100, "modified " * 100000)
return self._test_replace(100, b"modified " * 100000)
def _check_differences(self, got, expected):
# displaying arbitrary file corruption is tricky for a
# 1MB file of repeating data,, so look for likely places
# with problems and display them separately
gotmods = [mo.span() for mo in re.finditer('([A-Z]+)', got)]
expmods = [mo.span() for mo in re.finditer('([A-Z]+)', expected)]
gotmods = [mo.span() for mo in re.finditer(b'([A-Z]+)', got)]
expmods = [mo.span() for mo in re.finditer(b'([A-Z]+)', expected)]
gotspans = ["%d:%d=%s" % (start,end,got[start:end])
for (start,end) in gotmods]
expspans = ["%d:%d=%s" % (start,end,expected[start:end])
@ -131,14 +141,15 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
def test_replace_locations(self):
# exercise fencepost conditions
suspects = range(SEGSIZE-3, SEGSIZE+1)+range(2*SEGSIZE-3, 2*SEGSIZE+1)
suspects = list(range(SEGSIZE-3, SEGSIZE+1)) + list(
range(2*SEGSIZE-3, 2*SEGSIZE+1))
letters = iter("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
d0 = self.do_upload_mdmf()
def _run(ign):
expected = self.data
d = defer.succeed(None)
for offset in suspects:
new_data = letters.next()*2 # "AA", then "BB", etc
new_data = next(letters).encode("ascii") * 2 # "AA", then "BB", etc
expected = expected[:offset]+new_data+expected[offset+2:]
d.addCallback(lambda ign:
self.mdmf_node.get_best_mutable_version())
@ -164,7 +175,7 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
# long -- this is 7 segments in the default segment size. So we
# need to add 2 segments worth of data to push it over a
# power-of-two boundary.
segment = "a" * DEFAULT_MAX_SEGMENT_SIZE
segment = b"a" * DEFAULT_MAX_SEGMENT_SIZE
new_data = self.data + (segment * 2)
d0 = self.do_upload_mdmf()
def _run(ign):
@ -181,12 +192,12 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
def test_update_sdmf(self):
# Running update on a single-segment file should still work.
new_data = self.small_data + "appended"
new_data = self.small_data + b"appended"
d0 = self.do_upload_sdmf()
def _run(ign):
d = defer.succeed(None)
d.addCallback(lambda ign: self.sdmf_node.get_best_mutable_version())
d.addCallback(lambda mv: mv.update(MutableData("appended"),
d.addCallback(lambda mv: mv.update(MutableData(b"appended"),
len(self.small_data)))
d.addCallback(lambda ign: self.sdmf_node.download_best_version())
d.addCallback(lambda results:
@ -199,14 +210,14 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
# The wrapper should know how to handle the tail segment
# appropriately.
replace_offset = len(self.data) - 100
new_data = self.data[:replace_offset] + "replaced"
rest_offset = replace_offset + len("replaced")
new_data = self.data[:replace_offset] + b"replaced"
rest_offset = replace_offset + len(b"replaced")
new_data += self.data[rest_offset:]
d0 = self.do_upload_mdmf()
def _run(ign):
d = defer.succeed(None)
d.addCallback(lambda ign: self.mdmf_node.get_best_mutable_version())
d.addCallback(lambda mv: mv.update(MutableData("replaced"),
d.addCallback(lambda mv: mv.update(MutableData(b"replaced"),
replace_offset))
d.addCallback(lambda ign: self.mdmf_node.download_best_version())
d.addCallback(lambda results:
@ -218,16 +229,16 @@ class Update(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin):
def test_multiple_segment_replace(self):
replace_offset = 2 * DEFAULT_MAX_SEGMENT_SIZE
new_data = self.data[:replace_offset]
new_segment = "a" * DEFAULT_MAX_SEGMENT_SIZE
new_segment = b"a" * DEFAULT_MAX_SEGMENT_SIZE
new_data += 2 * new_segment
new_data += "replaced"
new_data += b"replaced"
rest_offset = len(new_data)
new_data += self.data[rest_offset:]
d0 = self.do_upload_mdmf()
def _run(ign):
d = defer.succeed(None)
d.addCallback(lambda ign: self.mdmf_node.get_best_mutable_version())
d.addCallback(lambda mv: mv.update(MutableData((2 * new_segment) + "replaced"),
d.addCallback(lambda mv: mv.update(MutableData((2 * new_segment) + b"replaced"),
replace_offset))
d.addCallback(lambda ignored: self.mdmf_node.download_best_version())
d.addCallback(lambda results:

View File

@ -1,11 +1,16 @@
"""
Ported to Python 3.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
# Python 2 compatibility
from future.utils import PY2
if PY2:
from future.builtins import str # noqa: F401
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
import os
from six.moves import cStringIO as StringIO
from twisted.internet import defer
@ -32,8 +37,8 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
self.set_up_grid()
self.c = self.g.clients[0]
self.nm = self.c.nodemaker
self.data = "test data" * 100000 # about 900 KiB; MDMF
self.small_data = "test data" * 10 # 90 B; SDMF
self.data = b"test data" * 100000 # about 900 KiB; MDMF
self.small_data = b"test data" * 10 # 90 B; SDMF
def do_upload_mdmf(self, data=None):
@ -62,7 +67,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
return d
def do_upload_empty_sdmf(self):
d = self.nm.create_mutable_file(MutableData(""))
d = self.nm.create_mutable_file(MutableData(b""))
def _then(n):
assert isinstance(n, MutableFileNode)
self.sdmf_zero_length_node = n
@ -107,9 +112,8 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
self.failUnless(" total_shares: 10" in lines, output)
self.failUnless(" segsize: 131073" in lines, output)
self.failUnless(" datalen: %d" % len(self.data) in lines, output)
vcap = n.get_verify_cap().to_string()
vcap = str(n.get_verify_cap().to_string(), "utf-8")
self.failUnless(" verify-cap: %s" % vcap in lines, output)
cso = debug.CatalogSharesOptions()
cso.nodedirs = fso.nodedirs
cso.stdout = StringIO()
@ -121,7 +125,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
self.failUnless(oneshare.startswith("MDMF"), oneshare)
fields = oneshare.split()
self.failUnlessEqual(fields[0], "MDMF")
self.failUnlessEqual(fields[1], storage_index)
self.failUnlessEqual(fields[1].encode("ascii"), storage_index)
self.failUnlessEqual(fields[2], "3/10")
self.failUnlessEqual(fields[3], "%d" % len(self.data))
self.failUnless(fields[4].startswith("#1:"), fields[3])
@ -144,8 +148,8 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
# Now update. The sequence number in both cases should be 1 in
# both cases.
def _do_update(ignored):
new_data = MutableData("foo bar baz" * 100000)
new_small_data = MutableData("foo bar baz" * 10)
new_data = MutableData(b"foo bar baz" * 100000)
new_small_data = MutableData(b"foo bar baz" * 10)
d1 = self.mdmf_node.overwrite(new_data)
d2 = self.sdmf_node.overwrite(new_small_data)
dl = gatherResults([d1, d2])
@ -221,38 +225,38 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
def test_toplevel_overwrite(self):
new_data = MutableData("foo bar baz" * 100000)
new_small_data = MutableData("foo bar baz" * 10)
new_data = MutableData(b"foo bar baz" * 100000)
new_small_data = MutableData(b"foo bar baz" * 10)
d = self.do_upload()
d.addCallback(lambda ign: self.mdmf_node.overwrite(new_data))
d.addCallback(lambda ignored:
self.mdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessEqual(data, "foo bar baz" * 100000))
self.failUnlessEqual(data, b"foo bar baz" * 100000))
d.addCallback(lambda ignored:
self.sdmf_node.overwrite(new_small_data))
d.addCallback(lambda ignored:
self.sdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessEqual(data, "foo bar baz" * 10))
self.failUnlessEqual(data, b"foo bar baz" * 10))
return d
def test_toplevel_modify(self):
d = self.do_upload()
def modifier(old_contents, servermap, first_time):
return old_contents + "modified"
return old_contents + b"modified"
d.addCallback(lambda ign: self.mdmf_node.modify(modifier))
d.addCallback(lambda ignored:
self.mdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessIn("modified", data))
self.failUnlessIn(b"modified", data))
d.addCallback(lambda ignored:
self.sdmf_node.modify(modifier))
d.addCallback(lambda ignored:
self.sdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessIn("modified", data))
self.failUnlessIn(b"modified", data))
return d
@ -262,18 +266,18 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
# test to see that the best recoverable version is that.
d = self.do_upload()
def modifier(old_contents, servermap, first_time):
return old_contents + "modified"
return old_contents + b"modified"
d.addCallback(lambda ign: self.mdmf_node.modify(modifier))
d.addCallback(lambda ignored:
self.mdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessIn("modified", data))
self.failUnlessIn(b"modified", data))
d.addCallback(lambda ignored:
self.sdmf_node.modify(modifier))
d.addCallback(lambda ignored:
self.sdmf_node.download_best_version())
d.addCallback(lambda data:
self.failUnlessIn("modified", data))
self.failUnlessIn(b"modified", data))
return d
@ -337,10 +341,10 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
def _read_data(version):
c = consumer.MemoryConsumer()
d2 = defer.succeed(None)
for i in xrange(0, len(expected), step):
for i in range(0, len(expected), step):
d2.addCallback(lambda ignored, i=i: version.read(c, i, step))
d2.addCallback(lambda ignored:
self.failUnlessEqual(expected, "".join(c.chunks)))
self.failUnlessEqual(expected, b"".join(c.chunks)))
return d2
d.addCallback(_read_data)
return d
@ -352,7 +356,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
expected_range = expected[offset:]
else:
expected_range = expected[offset:offset+length]
d.addCallback(lambda ignored: "".join(c.chunks))
d.addCallback(lambda ignored: b"".join(c.chunks))
def _check(results):
if results != expected_range:
print("read([%d]+%s) got %d bytes, not %d" % \
@ -365,7 +369,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
return d
def test_partial_read_mdmf_0(self):
data = ""
data = b""
d = self.do_upload_mdmf(data=data)
modes = [("all1", 0,0),
("all2", 0,None),
@ -388,7 +392,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
return d
def test_partial_read_sdmf_0(self):
data = ""
data = b""
modes = [("all1", 0,0),
("all2", 0,None),
]
@ -397,7 +401,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
return d
def test_partial_read_sdmf_2(self):
data = "hi"
data = b"hi"
modes = [("one_byte", 0, 1),
("last_byte", 1, 1),
("last_byte2", 1, None),
@ -422,7 +426,7 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
return d
def test_partial_read_sdmf_100(self):
data = "test data "*10
data = b"test data "*10
modes = [("start_at_middle", 50, 50),
("start_at_middle2", 50, None),
("zero_length_at_start", 0, 0),
@ -443,12 +447,12 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
d2 = defer.succeed(None)
d2.addCallback(lambda ignored: version.read(c))
d2.addCallback(lambda ignored:
self.failUnlessEqual(expected, "".join(c.chunks)))
self.failUnlessEqual(expected, b"".join(c.chunks)))
d2.addCallback(lambda ignored: version.read(c2, offset=0,
size=len(expected)))
d2.addCallback(lambda ignored:
self.failUnlessEqual(expected, "".join(c2.chunks)))
self.failUnlessEqual(expected, b"".join(c2.chunks)))
return d2
d.addCallback(_read_data)
d.addCallback(lambda ignored: node.download_best_version())
@ -467,5 +471,5 @@ class Version(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixin, \
def test_read_and_download_sdmf_zero_length(self):
d = self.do_upload_empty_sdmf()
d.addCallback(self._test_read_and_download, "")
d.addCallback(self._test_read_and_download, b"")
return d

View File

@ -1,4 +1,15 @@
from future.utils import bchr
"""
Ported to Python 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import PY2, bchr
if PY2:
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
from past.builtins import long
from io import BytesIO
@ -130,7 +141,7 @@ class FakeStorageServer(object):
tw_vectors, read_vector):
# always-pass: parrot the test vectors back to them.
readv = {}
for shnum, (testv, writev, new_length) in tw_vectors.items():
for shnum, (testv, writev, new_length) in list(tw_vectors.items()):
for (offset, length, op, specimen) in testv:
assert op in (b"le", b"eq", b"ge")
# TODO: this isn't right, the read is controlled by read_vector,

View File

@ -64,6 +64,7 @@ PORTED_MODULES = [
"allmydata.storage.server",
"allmydata.storage.shares",
"allmydata.test.no_network",
"allmydata.test.mutable.util",
"allmydata.uri",
"allmydata.util._python3",
"allmydata.util.abbreviate",
@ -105,6 +106,11 @@ PORTED_TEST_MODULES = [
"allmydata.test.mutable.test_multiple_encodings",
"allmydata.test.mutable.test_multiple_versions",
"allmydata.test.mutable.test_problems",
"allmydata.test.mutable.test_repair",
"allmydata.test.mutable.test_roundtrip",
"allmydata.test.mutable.test_servermap",
"allmydata.test.mutable.test_update",
"allmydata.test.mutable.test_version",
"allmydata.test.test_abbreviate",
"allmydata.test.test_base32",
"allmydata.test.test_base62",