mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-01-18 10:46:24 +00:00
Merge remote-tracking branch 'origin/master' into 3459.test-checker-python-3
This commit is contained in:
commit
b658a66e7f
0
newsfragments/3467.minor
Normal file
0
newsfragments/3467.minor
Normal file
0
newsfragments/3473.minor
Normal file
0
newsfragments/3473.minor
Normal file
@ -147,9 +147,9 @@ class MutableFileNode(object):
|
||||
|
||||
def _get_initial_contents(self, contents):
|
||||
if contents is None:
|
||||
return MutableData("")
|
||||
return MutableData(b"")
|
||||
|
||||
if isinstance(contents, str):
|
||||
if isinstance(contents, bytes):
|
||||
return MutableData(contents)
|
||||
|
||||
if IMutableUploadable.providedBy(contents):
|
||||
@ -884,9 +884,9 @@ class MutableFileVersion(object):
|
||||
d = self._try_to_download_data()
|
||||
def _apply(old_contents):
|
||||
new_contents = modifier(old_contents, self._servermap, first_time)
|
||||
precondition((isinstance(new_contents, str) or
|
||||
precondition((isinstance(new_contents, bytes) or
|
||||
new_contents is None),
|
||||
"Modifier function must return a string "
|
||||
"Modifier function must return bytes "
|
||||
"or None")
|
||||
|
||||
if new_contents is None or new_contents == old_contents:
|
||||
@ -960,7 +960,7 @@ class MutableFileVersion(object):
|
||||
c = consumer.MemoryConsumer()
|
||||
# modify will almost certainly write, so we need the privkey.
|
||||
d = self._read(c, fetch_privkey=True)
|
||||
d.addCallback(lambda mc: "".join(mc.chunks))
|
||||
d.addCallback(lambda mc: b"".join(mc.chunks))
|
||||
return d
|
||||
|
||||
|
||||
@ -1076,7 +1076,7 @@ class MutableFileVersion(object):
|
||||
start = offset
|
||||
rest = offset + data.get_size()
|
||||
new = old[:start]
|
||||
new += "".join(data.read(data.get_size()))
|
||||
new += b"".join(data.read(data.get_size()))
|
||||
new += old[rest:]
|
||||
return new
|
||||
return self._modify(m, None)
|
||||
|
@ -305,7 +305,7 @@ class Publish(object):
|
||||
# Our update process fetched these for us. We need to update
|
||||
# them in place as publishing happens.
|
||||
self.blockhashes = {} # (shnum, [blochashes])
|
||||
for (i, bht) in blockhashes.items():
|
||||
for (i, bht) in list(blockhashes.items()):
|
||||
# We need to extract the leaves from our old hash tree.
|
||||
old_segcount = mathutil.div_ceil(version[4],
|
||||
version[3])
|
||||
@ -794,7 +794,7 @@ class Publish(object):
|
||||
def push_blockhashes(self):
|
||||
self.sharehash_leaves = [None] * len(self.blockhashes)
|
||||
self._status.set_status("Building and pushing block hash tree")
|
||||
for shnum, blockhashes in self.blockhashes.items():
|
||||
for shnum, blockhashes in list(self.blockhashes.items()):
|
||||
t = hashtree.HashTree(blockhashes)
|
||||
self.blockhashes[shnum] = list(t)
|
||||
# set the leaf for future use.
|
||||
@ -867,7 +867,7 @@ class Publish(object):
|
||||
ds = []
|
||||
verification_key = rsa.der_string_from_verifying_key(self._pubkey)
|
||||
|
||||
for (shnum, writers) in self.writers.copy().items():
|
||||
for (shnum, writers) in list(self.writers.copy().items()):
|
||||
for writer in writers:
|
||||
writer.put_verification_key(verification_key)
|
||||
self.num_outstanding += 1
|
||||
|
@ -1,17 +1,29 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
from allmydata.mutable.publish import MutableData
|
||||
|
||||
class DataHandle(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.test_data = "Test Data" * 50000
|
||||
self.test_data = b"Test Data" * 50000
|
||||
self.uploadable = MutableData(self.test_data)
|
||||
|
||||
|
||||
def test_datahandle_read(self):
|
||||
chunk_size = 10
|
||||
for i in xrange(0, len(self.test_data), chunk_size):
|
||||
for i in range(0, len(self.test_data), chunk_size):
|
||||
data = self.uploadable.read(chunk_size)
|
||||
data = "".join(data)
|
||||
data = b"".join(data)
|
||||
start = i
|
||||
end = i + chunk_size
|
||||
self.failUnlessEqual(data, self.test_data[start:end])
|
||||
@ -28,7 +40,7 @@ class DataHandle(unittest.TestCase):
|
||||
# disturbing the location of the seek pointer.
|
||||
chunk_size = 100
|
||||
data = self.uploadable.read(chunk_size)
|
||||
self.failUnlessEqual("".join(data), self.test_data[:chunk_size])
|
||||
self.failUnlessEqual(b"".join(data), self.test_data[:chunk_size])
|
||||
|
||||
# Now get the size.
|
||||
size = self.uploadable.get_size()
|
||||
@ -38,4 +50,4 @@ class DataHandle(unittest.TestCase):
|
||||
more_data = self.uploadable.read(chunk_size)
|
||||
start = chunk_size
|
||||
end = chunk_size * 2
|
||||
self.failUnlessEqual("".join(more_data), self.test_data[start:end])
|
||||
self.failUnlessEqual(b"".join(more_data), self.test_data[start:end])
|
||||
|
@ -1,3 +1,15 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
from twisted.trial import unittest
|
||||
from .util import FakeStorage, make_nodemaker
|
||||
|
||||
@ -10,7 +22,7 @@ class DifferentEncoding(unittest.TestCase):
|
||||
# create a file with 3-of-20, then modify it with a client configured
|
||||
# to do 3-of-10. #1510 tracks a failure here
|
||||
self.nodemaker.default_encoding_parameters["n"] = 20
|
||||
d = self.nodemaker.create_mutable_file("old contents")
|
||||
d = self.nodemaker.create_mutable_file(b"old contents")
|
||||
def _created(n):
|
||||
filecap = n.get_cap().to_string()
|
||||
del n # we want a new object, not the cached one
|
||||
@ -19,6 +31,6 @@ class DifferentEncoding(unittest.TestCase):
|
||||
return n2
|
||||
d.addCallback(_created)
|
||||
def modifier(old_contents, servermap, first_time):
|
||||
return "new contents"
|
||||
return b"new contents"
|
||||
d.addCallback(lambda n: n.modify(modifier))
|
||||
return d
|
||||
|
@ -1,21 +1,33 @@
|
||||
"""
|
||||
Ported to Python 3.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
from six.moves import cStringIO as StringIO
|
||||
from io import BytesIO
|
||||
from twisted.trial import unittest
|
||||
from allmydata.mutable.publish import MutableFileHandle
|
||||
|
||||
class FileHandle(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.test_data = "Test Data" * 50000
|
||||
self.sio = StringIO(self.test_data)
|
||||
self.test_data = b"Test Data" * 50000
|
||||
self.sio = BytesIO(self.test_data)
|
||||
self.uploadable = MutableFileHandle(self.sio)
|
||||
|
||||
|
||||
def test_filehandle_read(self):
|
||||
self.basedir = "mutable/FileHandle/test_filehandle_read"
|
||||
chunk_size = 10
|
||||
for i in xrange(0, len(self.test_data), chunk_size):
|
||||
for i in range(0, len(self.test_data), chunk_size):
|
||||
data = self.uploadable.read(chunk_size)
|
||||
data = "".join(data)
|
||||
data = b"".join(data)
|
||||
start = i
|
||||
end = i + chunk_size
|
||||
self.failUnlessEqual(data, self.test_data[start:end])
|
||||
@ -33,7 +45,7 @@ class FileHandle(unittest.TestCase):
|
||||
# disturbing the location of the seek pointer.
|
||||
chunk_size = 100
|
||||
data = self.uploadable.read(chunk_size)
|
||||
self.failUnlessEqual("".join(data), self.test_data[:chunk_size])
|
||||
self.failUnlessEqual(b"".join(data), self.test_data[:chunk_size])
|
||||
|
||||
# Now get the size.
|
||||
size = self.uploadable.get_size()
|
||||
@ -43,26 +55,26 @@ class FileHandle(unittest.TestCase):
|
||||
more_data = self.uploadable.read(chunk_size)
|
||||
start = chunk_size
|
||||
end = chunk_size * 2
|
||||
self.failUnlessEqual("".join(more_data), self.test_data[start:end])
|
||||
self.failUnlessEqual(b"".join(more_data), self.test_data[start:end])
|
||||
|
||||
|
||||
def test_filehandle_file(self):
|
||||
# Make sure that the MutableFileHandle works on a file as well
|
||||
# as a StringIO object, since in some cases it will be asked to
|
||||
# as a BytesIO object, since in some cases it will be asked to
|
||||
# deal with files.
|
||||
self.basedir = self.mktemp()
|
||||
# necessary? What am I doing wrong here?
|
||||
os.mkdir(self.basedir)
|
||||
f_path = os.path.join(self.basedir, "test_file")
|
||||
f = open(f_path, "w")
|
||||
f = open(f_path, "wb")
|
||||
f.write(self.test_data)
|
||||
f.close()
|
||||
f = open(f_path, "r")
|
||||
f = open(f_path, "rb")
|
||||
|
||||
uploadable = MutableFileHandle(f)
|
||||
|
||||
data = uploadable.read(len(self.test_data))
|
||||
self.failUnlessEqual("".join(data), self.test_data)
|
||||
self.failUnlessEqual(b"".join(data), self.test_data)
|
||||
size = uploadable.get_size()
|
||||
self.failUnlessEqual(size, len(self.test_data))
|
||||
|
||||
|
@ -1,4 +1,6 @@
|
||||
from six.moves import cStringIO as StringIO
|
||||
from past.builtins import long
|
||||
|
||||
from io import BytesIO
|
||||
import attr
|
||||
from twisted.internet import defer, reactor
|
||||
from foolscap.api import eventually, fireEventually
|
||||
@ -75,8 +77,8 @@ class FakeStorage(object):
|
||||
if peerid not in self._peers:
|
||||
self._peers[peerid] = {}
|
||||
shares = self._peers[peerid]
|
||||
f = StringIO()
|
||||
f.write(shares.get(shnum, ""))
|
||||
f = BytesIO()
|
||||
f.write(shares.get(shnum, b""))
|
||||
f.seek(offset)
|
||||
f.write(data)
|
||||
shares[shnum] = f.getvalue()
|
||||
@ -129,7 +131,7 @@ class FakeStorageServer(object):
|
||||
readv = {}
|
||||
for shnum, (testv, writev, new_length) in tw_vectors.items():
|
||||
for (offset, length, op, specimen) in testv:
|
||||
assert op in ("le", "eq", "ge")
|
||||
assert op in (b"le", b"eq", b"ge")
|
||||
# TODO: this isn't right, the read is controlled by read_vector,
|
||||
# not by testv
|
||||
readv[shnum] = [ specimen
|
||||
@ -222,10 +224,10 @@ def make_peer(s, i):
|
||||
|
||||
:rtype: ``Peer``
|
||||
"""
|
||||
peerid = base32.b2a(tagged_hash("peerid", "%d" % i)[:20])
|
||||
peerid = base32.b2a(tagged_hash(b"peerid", b"%d" % i)[:20])
|
||||
fss = FakeStorageServer(peerid, s)
|
||||
ann = {
|
||||
"anonymous-storage-FURL": "pb://%s@nowhere/fake" % (peerid,),
|
||||
"anonymous-storage-FURL": b"pb://%s@nowhere/fake" % (peerid,),
|
||||
"permutation-seed-base32": peerid,
|
||||
}
|
||||
return Peer(peerid=peerid, storage_server=fss, announcement=ann)
|
||||
@ -297,7 +299,7 @@ def make_nodemaker_with_storage_broker(storage_broker, keysize):
|
||||
|
||||
:param StorageFarmBroker peers: The storage broker to use.
|
||||
"""
|
||||
sh = client.SecretHolder("lease secret", "convergence secret")
|
||||
sh = client.SecretHolder(b"lease secret", b"convergence secret")
|
||||
keygen = client.KeyGenerator()
|
||||
if keysize:
|
||||
keygen.set_default_keysize(keysize)
|
||||
|
@ -11,7 +11,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
from future.builtins import filter, map, zip, ascii, chr, dict, hex, input, next, oct, open, pow, round, super, bytes, int, list, object, range, str, max, min # noqa: F401
|
||||
from future.builtins import filter, map, zip, ascii, chr, dict, hex, input, next, oct, open, pow, round, super, bytes, list, object, range, str, max, min # noqa: F401
|
||||
|
||||
import os
|
||||
from twisted.trial import unittest
|
||||
|
@ -9,6 +9,7 @@ from __future__ import unicode_literals
|
||||
|
||||
from future.utils import PY2
|
||||
if PY2:
|
||||
# open is not here because we want to use native strings on Py2
|
||||
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||
import six
|
||||
import os, time, sys
|
||||
|
@ -90,6 +90,9 @@ PORTED_MODULES = [
|
||||
]
|
||||
|
||||
PORTED_TEST_MODULES = [
|
||||
"allmydata.test.mutable.test_datahandle",
|
||||
"allmydata.test.mutable.test_different_encoding",
|
||||
"allmydata.test.mutable.test_filehandle",
|
||||
"allmydata.test.test_abbreviate",
|
||||
"allmydata.test.test_base32",
|
||||
"allmydata.test.test_base62",
|
||||
|
Loading…
Reference in New Issue
Block a user