mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-23 23:02:25 +00:00
Merge branch '3475.mutable-tests-part-2-python-3' into 3496.mutable-tests-python-3-part-3
This commit is contained in:
commit
6017b11910
0
newsfragments/3475.minor
Normal file
0
newsfragments/3475.minor
Normal file
@ -187,7 +187,7 @@ class MutableChecker(object):
|
|||||||
if self.bad_shares:
|
if self.bad_shares:
|
||||||
report.append("Corrupt Shares:")
|
report.append("Corrupt Shares:")
|
||||||
summary.append("Corrupt Shares:")
|
summary.append("Corrupt Shares:")
|
||||||
for (server, shnum, f) in sorted(self.bad_shares):
|
for (server, shnum, f) in self.bad_shares:
|
||||||
serverid = server.get_serverid()
|
serverid = server.get_serverid()
|
||||||
locator = (server, self._storage_index, shnum)
|
locator = (server, self._storage_index, shnum)
|
||||||
corrupt_share_locators.append(locator)
|
corrupt_share_locators.append(locator)
|
||||||
|
@ -901,7 +901,7 @@ class Publish(object):
|
|||||||
|
|
||||||
def log_goal(self, goal, message=""):
|
def log_goal(self, goal, message=""):
|
||||||
logmsg = [message]
|
logmsg = [message]
|
||||||
for (shnum, server) in sorted([(s,p) for (p,s) in goal]):
|
for (shnum, server) in [(s,p) for (p,s) in goal]:
|
||||||
logmsg.append("sh%d to [%s]" % (shnum, server.get_name()))
|
logmsg.append("sh%d to [%s]" % (shnum, server.get_name()))
|
||||||
self.log("current goal: %s" % (", ".join(logmsg)), level=log.NOISY)
|
self.log("current goal: %s" % (", ".join(logmsg)), level=log.NOISY)
|
||||||
self.log("we are planning to push new seqnum=#%d" % self._new_seqnum,
|
self.log("we are planning to push new seqnum=#%d" % self._new_seqnum,
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from foolscap.api import flushEventualQueue
|
from foolscap.api import flushEventualQueue
|
||||||
from allmydata.monitor import Monitor
|
from allmydata.monitor import Monitor
|
||||||
@ -22,7 +34,7 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
def test_check_no_shares(self):
|
def test_check_no_shares(self):
|
||||||
for shares in self._storage._peers.values():
|
for shares in list(self._storage._peers.values()):
|
||||||
shares.clear()
|
shares.clear()
|
||||||
d = self._fn.check(Monitor())
|
d = self._fn.check(Monitor())
|
||||||
d.addCallback(self.check_bad, "test_check_no_shares")
|
d.addCallback(self.check_bad, "test_check_no_shares")
|
||||||
@ -31,7 +43,7 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
def test_check_mdmf_no_shares(self):
|
def test_check_mdmf_no_shares(self):
|
||||||
d = self.publish_mdmf()
|
d = self.publish_mdmf()
|
||||||
def _then(ignored):
|
def _then(ignored):
|
||||||
for share in self._storage._peers.values():
|
for share in list(self._storage._peers.values()):
|
||||||
share.clear()
|
share.clear()
|
||||||
d.addCallback(_then)
|
d.addCallback(_then)
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
@ -40,8 +52,8 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
def test_check_not_enough_shares(self):
|
def test_check_not_enough_shares(self):
|
||||||
for shares in self._storage._peers.values():
|
for shares in list(self._storage._peers.values()):
|
||||||
for shnum in shares.keys():
|
for shnum in list(shares.keys()):
|
||||||
if shnum > 0:
|
if shnum > 0:
|
||||||
del shares[shnum]
|
del shares[shnum]
|
||||||
d = self._fn.check(Monitor())
|
d = self._fn.check(Monitor())
|
||||||
@ -51,8 +63,8 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
def test_check_mdmf_not_enough_shares(self):
|
def test_check_mdmf_not_enough_shares(self):
|
||||||
d = self.publish_mdmf()
|
d = self.publish_mdmf()
|
||||||
def _then(ignored):
|
def _then(ignored):
|
||||||
for shares in self._storage._peers.values():
|
for shares in list(self._storage._peers.values()):
|
||||||
for shnum in shares.keys():
|
for shnum in list(shares.keys()):
|
||||||
if shnum > 0:
|
if shnum > 0:
|
||||||
del shares[shnum]
|
del shares[shnum]
|
||||||
d.addCallback(_then)
|
d.addCallback(_then)
|
||||||
@ -83,7 +95,7 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
# On 8 of the shares, corrupt the beginning of the share data.
|
# On 8 of the shares, corrupt the beginning of the share data.
|
||||||
# The signature check during the servermap update won't catch this.
|
# The signature check during the servermap update won't catch this.
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
corrupt(None, self._storage, "share_data", range(8)))
|
corrupt(None, self._storage, "share_data", list(range(8))))
|
||||||
# On 2 of the shares, corrupt the end of the share data.
|
# On 2 of the shares, corrupt the end of the share data.
|
||||||
# The signature check during the servermap update won't catch
|
# The signature check during the servermap update won't catch
|
||||||
# this either, and the retrieval process will have to process
|
# this either, and the retrieval process will have to process
|
||||||
@ -242,14 +254,14 @@ class Checker(unittest.TestCase, CheckerMixin, PublishMixin):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
def test_verify_sdmf_empty(self):
|
def test_verify_sdmf_empty(self):
|
||||||
d = self.publish_sdmf("")
|
d = self.publish_sdmf(b"")
|
||||||
d.addCallback(lambda ignored: self._fn.check(Monitor(), verify=True))
|
d.addCallback(lambda ignored: self._fn.check(Monitor(), verify=True))
|
||||||
d.addCallback(self.check_good, "test_verify_sdmf")
|
d.addCallback(self.check_good, "test_verify_sdmf")
|
||||||
d.addCallback(flushEventualQueue)
|
d.addCallback(flushEventualQueue)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def test_verify_mdmf_empty(self):
|
def test_verify_mdmf_empty(self):
|
||||||
d = self.publish_mdmf("")
|
d = self.publish_mdmf(b"")
|
||||||
d.addCallback(lambda ignored: self._fn.check(Monitor(), verify=True))
|
d.addCallback(lambda ignored: self._fn.check(Monitor(), verify=True))
|
||||||
d.addCallback(self.check_good, "test_verify_mdmf")
|
d.addCallback(self.check_good, "test_verify_mdmf")
|
||||||
d.addCallback(flushEventualQueue)
|
d.addCallback(flushEventualQueue)
|
||||||
|
@ -1,3 +1,16 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from allmydata.mutable.common import NeedMoreDataError, UncoordinatedWriteError
|
from allmydata.mutable.common import NeedMoreDataError, UncoordinatedWriteError
|
||||||
|
|
||||||
|
@ -1,3 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Ported to Python 3.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from future.utils import PY2
|
||||||
|
if PY2:
|
||||||
|
from future.builtins import filter, map, zip, ascii, chr, hex, input, next, oct, open, pow, round, super, bytes, dict, list, object, range, str, max, min # noqa: F401
|
||||||
|
|
||||||
from six.moves import cStringIO as StringIO
|
from six.moves import cStringIO as StringIO
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
@ -73,11 +85,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
return n
|
return n
|
||||||
d.addCallback(_created)
|
d.addCallback(_created)
|
||||||
d.addCallback(lambda n:
|
d.addCallback(lambda n:
|
||||||
n.overwrite(MutableData("Contents" * 50000)))
|
n.overwrite(MutableData(b"Contents" * 50000)))
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self._node.download_best_version())
|
self._node.download_best_version())
|
||||||
d.addCallback(lambda contents:
|
d.addCallback(lambda contents:
|
||||||
self.failUnlessEqual(contents, "Contents" * 50000))
|
self.failUnlessEqual(contents, b"Contents" * 50000))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def test_max_shares(self):
|
def test_max_shares(self):
|
||||||
@ -95,13 +107,13 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
d.addCallback(_created)
|
d.addCallback(_created)
|
||||||
# Now we upload some contents
|
# Now we upload some contents
|
||||||
d.addCallback(lambda n:
|
d.addCallback(lambda n:
|
||||||
n.overwrite(MutableData("contents" * 50000)))
|
n.overwrite(MutableData(b"contents" * 50000)))
|
||||||
# ...then download contents
|
# ...then download contents
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self._node.download_best_version())
|
self._node.download_best_version())
|
||||||
# ...and check to make sure everything went okay.
|
# ...and check to make sure everything went okay.
|
||||||
d.addCallback(lambda contents:
|
d.addCallback(lambda contents:
|
||||||
self.failUnlessEqual("contents" * 50000, contents))
|
self.failUnlessEqual(b"contents" * 50000, contents))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def test_max_shares_mdmf(self):
|
def test_max_shares_mdmf(self):
|
||||||
@ -119,11 +131,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
return n
|
return n
|
||||||
d.addCallback(_created)
|
d.addCallback(_created)
|
||||||
d.addCallback(lambda n:
|
d.addCallback(lambda n:
|
||||||
n.overwrite(MutableData("contents" * 50000)))
|
n.overwrite(MutableData(b"contents" * 50000)))
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self._node.download_best_version())
|
self._node.download_best_version())
|
||||||
d.addCallback(lambda contents:
|
d.addCallback(lambda contents:
|
||||||
self.failUnlessEqual(contents, "contents" * 50000))
|
self.failUnlessEqual(contents, b"contents" * 50000))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def test_mdmf_filenode_cap(self):
|
def test_mdmf_filenode_cap(self):
|
||||||
@ -148,7 +160,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
def _created(n):
|
def _created(n):
|
||||||
self.failUnless(isinstance(n, MutableFileNode))
|
self.failUnless(isinstance(n, MutableFileNode))
|
||||||
s = n.get_uri()
|
s = n.get_uri()
|
||||||
self.failUnless(s.startswith("URI:MDMF"))
|
self.failUnless(s.startswith(b"URI:MDMF"))
|
||||||
n2 = self.nodemaker.create_from_cap(s)
|
n2 = self.nodemaker.create_from_cap(s)
|
||||||
self.failUnless(isinstance(n2, MutableFileNode))
|
self.failUnless(isinstance(n2, MutableFileNode))
|
||||||
self.failUnlessEqual(n.get_storage_index(), n2.get_storage_index())
|
self.failUnlessEqual(n.get_storage_index(), n2.get_storage_index())
|
||||||
@ -216,33 +228,33 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
d.addCallback(lambda smap: smap.dump(StringIO()))
|
d.addCallback(lambda smap: smap.dump(StringIO()))
|
||||||
d.addCallback(lambda sio:
|
d.addCallback(lambda sio:
|
||||||
self.failUnless("3-of-10" in sio.getvalue()))
|
self.failUnless("3-of-10" in sio.getvalue()))
|
||||||
d.addCallback(lambda res: n.overwrite(MutableData("contents 1")))
|
d.addCallback(lambda res: n.overwrite(MutableData(b"contents 1")))
|
||||||
d.addCallback(lambda res: self.failUnlessIdentical(res, None))
|
d.addCallback(lambda res: self.failUnlessIdentical(res, None))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1"))
|
||||||
d.addCallback(lambda res: n.get_size_of_best_version())
|
d.addCallback(lambda res: n.get_size_of_best_version())
|
||||||
d.addCallback(lambda size:
|
d.addCallback(lambda size:
|
||||||
self.failUnlessEqual(size, len("contents 1")))
|
self.failUnlessEqual(size, len(b"contents 1")))
|
||||||
d.addCallback(lambda res: n.overwrite(MutableData("contents 2")))
|
d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2")))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 2"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2"))
|
||||||
d.addCallback(lambda res: n.get_servermap(MODE_WRITE))
|
d.addCallback(lambda res: n.get_servermap(MODE_WRITE))
|
||||||
d.addCallback(lambda smap: n.upload(MutableData("contents 3"), smap))
|
d.addCallback(lambda smap: n.upload(MutableData(b"contents 3"), smap))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 3"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3"))
|
||||||
d.addCallback(lambda res: n.get_servermap(MODE_ANYTHING))
|
d.addCallback(lambda res: n.get_servermap(MODE_ANYTHING))
|
||||||
d.addCallback(lambda smap:
|
d.addCallback(lambda smap:
|
||||||
n.download_version(smap,
|
n.download_version(smap,
|
||||||
smap.best_recoverable_version()))
|
smap.best_recoverable_version()))
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 3"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3"))
|
||||||
# test a file that is large enough to overcome the
|
# test a file that is large enough to overcome the
|
||||||
# mapupdate-to-retrieve data caching (i.e. make the shares larger
|
# mapupdate-to-retrieve data caching (i.e. make the shares larger
|
||||||
# than the default readsize, which is 2000 bytes). A 15kB file
|
# than the default readsize, which is 2000 bytes). A 15kB file
|
||||||
# will have 5kB shares.
|
# will have 5kB shares.
|
||||||
d.addCallback(lambda res: n.overwrite(MutableData("large size file" * 1000)))
|
d.addCallback(lambda res: n.overwrite(MutableData(b"large size file" * 1000)))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
self.failUnlessEqual(res, "large size file" * 1000))
|
self.failUnlessEqual(res, b"large size file" * 1000))
|
||||||
return d
|
return d
|
||||||
d.addCallback(_created)
|
d.addCallback(_created)
|
||||||
return d
|
return d
|
||||||
@ -261,7 +273,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
# Now overwrite the contents with some new contents. We want
|
# Now overwrite the contents with some new contents. We want
|
||||||
# to make them big enough to force the file to be uploaded
|
# to make them big enough to force the file to be uploaded
|
||||||
# in more than one segment.
|
# in more than one segment.
|
||||||
big_contents = "contents1" * 100000 # about 900 KiB
|
big_contents = b"contents1" * 100000 # about 900 KiB
|
||||||
big_contents_uploadable = MutableData(big_contents)
|
big_contents_uploadable = MutableData(big_contents)
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
n.overwrite(big_contents_uploadable))
|
n.overwrite(big_contents_uploadable))
|
||||||
@ -273,7 +285,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
# before, they need to be big enough to force multiple
|
# before, they need to be big enough to force multiple
|
||||||
# segments, so that we make the downloader deal with
|
# segments, so that we make the downloader deal with
|
||||||
# multiple segments.
|
# multiple segments.
|
||||||
bigger_contents = "contents2" * 1000000 # about 9MiB
|
bigger_contents = b"contents2" * 1000000 # about 9MiB
|
||||||
bigger_contents_uploadable = MutableData(bigger_contents)
|
bigger_contents_uploadable = MutableData(bigger_contents)
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
n.overwrite(bigger_contents_uploadable))
|
n.overwrite(bigger_contents_uploadable))
|
||||||
@ -289,7 +301,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
def test_retrieve_producer_mdmf(self):
|
def test_retrieve_producer_mdmf(self):
|
||||||
# We should make sure that the retriever is able to pause and stop
|
# We should make sure that the retriever is able to pause and stop
|
||||||
# correctly.
|
# correctly.
|
||||||
data = "contents1" * 100000
|
data = b"contents1" * 100000
|
||||||
d = self.nodemaker.create_mutable_file(MutableData(data),
|
d = self.nodemaker.create_mutable_file(MutableData(data),
|
||||||
version=MDMF_VERSION)
|
version=MDMF_VERSION)
|
||||||
d.addCallback(lambda node: node.get_best_mutable_version())
|
d.addCallback(lambda node: node.get_best_mutable_version())
|
||||||
@ -300,7 +312,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
# after-the-first-write() trick to pause or stop the download.
|
# after-the-first-write() trick to pause or stop the download.
|
||||||
# Disabled until we find a better approach.
|
# Disabled until we find a better approach.
|
||||||
def OFF_test_retrieve_producer_sdmf(self):
|
def OFF_test_retrieve_producer_sdmf(self):
|
||||||
data = "contents1" * 100000
|
data = b"contents1" * 100000
|
||||||
d = self.nodemaker.create_mutable_file(MutableData(data),
|
d = self.nodemaker.create_mutable_file(MutableData(data),
|
||||||
version=SDMF_VERSION)
|
version=SDMF_VERSION)
|
||||||
d.addCallback(lambda node: node.get_best_mutable_version())
|
d.addCallback(lambda node: node.get_best_mutable_version())
|
||||||
@ -347,15 +359,15 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
def _created(node):
|
def _created(node):
|
||||||
self.uri = node.get_uri()
|
self.uri = node.get_uri()
|
||||||
# also confirm that the cap has no extension fields
|
# also confirm that the cap has no extension fields
|
||||||
pieces = self.uri.split(":")
|
pieces = self.uri.split(b":")
|
||||||
self.failUnlessEqual(len(pieces), 4)
|
self.failUnlessEqual(len(pieces), 4)
|
||||||
|
|
||||||
return node.overwrite(MutableData("contents1" * 100000))
|
return node.overwrite(MutableData(b"contents1" * 100000))
|
||||||
def _then(ignored):
|
def _then(ignored):
|
||||||
node = self.nodemaker.create_from_cap(self.uri)
|
node = self.nodemaker.create_from_cap(self.uri)
|
||||||
return node.download_best_version()
|
return node.download_best_version()
|
||||||
def _downloaded(data):
|
def _downloaded(data):
|
||||||
self.failUnlessEqual(data, "contents1" * 100000)
|
self.failUnlessEqual(data, b"contents1" * 100000)
|
||||||
d.addCallback(_created)
|
d.addCallback(_created)
|
||||||
d.addCallback(_then)
|
d.addCallback(_then)
|
||||||
d.addCallback(_downloaded)
|
d.addCallback(_downloaded)
|
||||||
@ -368,7 +380,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
be published. Otherwise, we introduce undesirable semantics that are a
|
be published. Otherwise, we introduce undesirable semantics that are a
|
||||||
regression from SDMF.
|
regression from SDMF.
|
||||||
"""
|
"""
|
||||||
upload = MutableData("MDMF" * 100000) # about 400 KiB
|
upload = MutableData(b"MDMF" * 100000) # about 400 KiB
|
||||||
d = self.nodemaker.create_mutable_file(upload,
|
d = self.nodemaker.create_mutable_file(upload,
|
||||||
version=MDMF_VERSION)
|
version=MDMF_VERSION)
|
||||||
def _check_server_write_counts(ignored):
|
def _check_server_write_counts(ignored):
|
||||||
@ -381,22 +393,22 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
|
|
||||||
|
|
||||||
def test_create_with_initial_contents(self):
|
def test_create_with_initial_contents(self):
|
||||||
upload1 = MutableData("contents 1")
|
upload1 = MutableData(b"contents 1")
|
||||||
d = self.nodemaker.create_mutable_file(upload1)
|
d = self.nodemaker.create_mutable_file(upload1)
|
||||||
def _created(n):
|
def _created(n):
|
||||||
d = n.download_best_version()
|
d = n.download_best_version()
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1"))
|
||||||
upload2 = MutableData("contents 2")
|
upload2 = MutableData(b"contents 2")
|
||||||
d.addCallback(lambda res: n.overwrite(upload2))
|
d.addCallback(lambda res: n.overwrite(upload2))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 2"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2"))
|
||||||
return d
|
return d
|
||||||
d.addCallback(_created)
|
d.addCallback(_created)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
def test_create_mdmf_with_initial_contents(self):
|
def test_create_mdmf_with_initial_contents(self):
|
||||||
initial_contents = "foobarbaz" * 131072 # 900KiB
|
initial_contents = b"foobarbaz" * 131072 # 900KiB
|
||||||
initial_contents_uploadable = MutableData(initial_contents)
|
initial_contents_uploadable = MutableData(initial_contents)
|
||||||
d = self.nodemaker.create_mutable_file(initial_contents_uploadable,
|
d = self.nodemaker.create_mutable_file(initial_contents_uploadable,
|
||||||
version=MDMF_VERSION)
|
version=MDMF_VERSION)
|
||||||
@ -404,24 +416,24 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
d = n.download_best_version()
|
d = n.download_best_version()
|
||||||
d.addCallback(lambda data:
|
d.addCallback(lambda data:
|
||||||
self.failUnlessEqual(data, initial_contents))
|
self.failUnlessEqual(data, initial_contents))
|
||||||
uploadable2 = MutableData(initial_contents + "foobarbaz")
|
uploadable2 = MutableData(initial_contents + b"foobarbaz")
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
n.overwrite(uploadable2))
|
n.overwrite(uploadable2))
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
n.download_best_version())
|
n.download_best_version())
|
||||||
d.addCallback(lambda data:
|
d.addCallback(lambda data:
|
||||||
self.failUnlessEqual(data, initial_contents +
|
self.failUnlessEqual(data, initial_contents +
|
||||||
"foobarbaz"))
|
b"foobarbaz"))
|
||||||
return d
|
return d
|
||||||
d.addCallback(_created)
|
d.addCallback(_created)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def test_create_with_initial_contents_function(self):
|
def test_create_with_initial_contents_function(self):
|
||||||
data = "initial contents"
|
data = b"initial contents"
|
||||||
def _make_contents(n):
|
def _make_contents(n):
|
||||||
self.failUnless(isinstance(n, MutableFileNode))
|
self.failUnless(isinstance(n, MutableFileNode))
|
||||||
key = n.get_writekey()
|
key = n.get_writekey()
|
||||||
self.failUnless(isinstance(key, str), key)
|
self.failUnless(isinstance(key, bytes), key)
|
||||||
self.failUnlessEqual(len(key), 16) # AES key size
|
self.failUnlessEqual(len(key), 16) # AES key size
|
||||||
return MutableData(data)
|
return MutableData(data)
|
||||||
d = self.nodemaker.create_mutable_file(_make_contents)
|
d = self.nodemaker.create_mutable_file(_make_contents)
|
||||||
@ -433,11 +445,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
|
|
||||||
|
|
||||||
def test_create_mdmf_with_initial_contents_function(self):
|
def test_create_mdmf_with_initial_contents_function(self):
|
||||||
data = "initial contents" * 100000
|
data = b"initial contents" * 100000
|
||||||
def _make_contents(n):
|
def _make_contents(n):
|
||||||
self.failUnless(isinstance(n, MutableFileNode))
|
self.failUnless(isinstance(n, MutableFileNode))
|
||||||
key = n.get_writekey()
|
key = n.get_writekey()
|
||||||
self.failUnless(isinstance(key, str), key)
|
self.failUnless(isinstance(key, bytes), key)
|
||||||
self.failUnlessEqual(len(key), 16)
|
self.failUnlessEqual(len(key), 16)
|
||||||
return MutableData(data)
|
return MutableData(data)
|
||||||
d = self.nodemaker.create_mutable_file(_make_contents,
|
d = self.nodemaker.create_mutable_file(_make_contents,
|
||||||
@ -450,7 +462,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
|
|
||||||
|
|
||||||
def test_create_with_too_large_contents(self):
|
def test_create_with_too_large_contents(self):
|
||||||
BIG = "a" * (self.OLD_MAX_SEGMENT_SIZE + 1)
|
BIG = b"a" * (self.OLD_MAX_SEGMENT_SIZE + 1)
|
||||||
BIG_uploadable = MutableData(BIG)
|
BIG_uploadable = MutableData(BIG)
|
||||||
d = self.nodemaker.create_mutable_file(BIG_uploadable)
|
d = self.nodemaker.create_mutable_file(BIG_uploadable)
|
||||||
def _created(n):
|
def _created(n):
|
||||||
@ -469,7 +481,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
|
|
||||||
def test_modify(self):
|
def test_modify(self):
|
||||||
def _modifier(old_contents, servermap, first_time):
|
def _modifier(old_contents, servermap, first_time):
|
||||||
new_contents = old_contents + "line2"
|
new_contents = old_contents + b"line2"
|
||||||
return new_contents
|
return new_contents
|
||||||
def _non_modifier(old_contents, servermap, first_time):
|
def _non_modifier(old_contents, servermap, first_time):
|
||||||
return old_contents
|
return old_contents
|
||||||
@ -478,7 +490,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
def _error_modifier(old_contents, servermap, first_time):
|
def _error_modifier(old_contents, servermap, first_time):
|
||||||
raise ValueError("oops")
|
raise ValueError("oops")
|
||||||
def _toobig_modifier(old_contents, servermap, first_time):
|
def _toobig_modifier(old_contents, servermap, first_time):
|
||||||
new_content = "b" * (self.OLD_MAX_SEGMENT_SIZE + 1)
|
new_content = b"b" * (self.OLD_MAX_SEGMENT_SIZE + 1)
|
||||||
return new_content
|
return new_content
|
||||||
calls = []
|
calls = []
|
||||||
def _ucw_error_modifier(old_contents, servermap, first_time):
|
def _ucw_error_modifier(old_contents, servermap, first_time):
|
||||||
@ -486,7 +498,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
calls.append(1)
|
calls.append(1)
|
||||||
if len(calls) <= 1:
|
if len(calls) <= 1:
|
||||||
raise UncoordinatedWriteError("simulated")
|
raise UncoordinatedWriteError("simulated")
|
||||||
new_contents = old_contents + "line3"
|
new_contents = old_contents + b"line3"
|
||||||
return new_contents
|
return new_contents
|
||||||
def _ucw_error_non_modifier(old_contents, servermap, first_time):
|
def _ucw_error_non_modifier(old_contents, servermap, first_time):
|
||||||
# simulate an UncoordinatedWriteError once, and don't actually
|
# simulate an UncoordinatedWriteError once, and don't actually
|
||||||
@ -496,41 +508,41 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
raise UncoordinatedWriteError("simulated")
|
raise UncoordinatedWriteError("simulated")
|
||||||
return old_contents
|
return old_contents
|
||||||
|
|
||||||
initial_contents = "line1"
|
initial_contents = b"line1"
|
||||||
d = self.nodemaker.create_mutable_file(MutableData(initial_contents))
|
d = self.nodemaker.create_mutable_file(MutableData(initial_contents))
|
||||||
def _created(n):
|
def _created(n):
|
||||||
d = n.modify(_modifier)
|
d = n.modify(_modifier)
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
|
||||||
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "m"))
|
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "m"))
|
||||||
|
|
||||||
d.addCallback(lambda res: n.modify(_non_modifier))
|
d.addCallback(lambda res: n.modify(_non_modifier))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
|
||||||
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "non"))
|
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "non"))
|
||||||
|
|
||||||
d.addCallback(lambda res: n.modify(_none_modifier))
|
d.addCallback(lambda res: n.modify(_none_modifier))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
|
||||||
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "none"))
|
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "none"))
|
||||||
|
|
||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
self.shouldFail(ValueError, "error_modifier", None,
|
self.shouldFail(ValueError, "error_modifier", None,
|
||||||
n.modify, _error_modifier))
|
n.modify, _error_modifier))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
|
||||||
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "err"))
|
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "err"))
|
||||||
|
|
||||||
|
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
|
||||||
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "big"))
|
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "big"))
|
||||||
|
|
||||||
d.addCallback(lambda res: n.modify(_ucw_error_modifier))
|
d.addCallback(lambda res: n.modify(_ucw_error_modifier))
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(len(calls), 2))
|
d.addCallback(lambda res: self.failUnlessEqual(len(calls), 2))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res,
|
d.addCallback(lambda res: self.failUnlessEqual(res,
|
||||||
"line1line2line3"))
|
b"line1line2line3"))
|
||||||
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "ucw"))
|
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "ucw"))
|
||||||
|
|
||||||
def _reset_ucw_error_modifier(res):
|
def _reset_ucw_error_modifier(res):
|
||||||
@ -548,7 +560,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
d.addCallback(lambda res: self.failUnlessEqual(len(calls), 2))
|
d.addCallback(lambda res: self.failUnlessEqual(len(calls), 2))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res,
|
d.addCallback(lambda res: self.failUnlessEqual(res,
|
||||||
"line1line2line3"))
|
b"line1line2line3"))
|
||||||
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 4, "ucw"))
|
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 4, "ucw"))
|
||||||
d.addCallback(lambda res: n.modify(_toobig_modifier))
|
d.addCallback(lambda res: n.modify(_toobig_modifier))
|
||||||
return d
|
return d
|
||||||
@ -558,14 +570,14 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
|
|
||||||
def test_modify_backoffer(self):
|
def test_modify_backoffer(self):
|
||||||
def _modifier(old_contents, servermap, first_time):
|
def _modifier(old_contents, servermap, first_time):
|
||||||
return old_contents + "line2"
|
return old_contents + b"line2"
|
||||||
calls = []
|
calls = []
|
||||||
def _ucw_error_modifier(old_contents, servermap, first_time):
|
def _ucw_error_modifier(old_contents, servermap, first_time):
|
||||||
# simulate an UncoordinatedWriteError once
|
# simulate an UncoordinatedWriteError once
|
||||||
calls.append(1)
|
calls.append(1)
|
||||||
if len(calls) <= 1:
|
if len(calls) <= 1:
|
||||||
raise UncoordinatedWriteError("simulated")
|
raise UncoordinatedWriteError("simulated")
|
||||||
return old_contents + "line3"
|
return old_contents + b"line3"
|
||||||
def _always_ucw_error_modifier(old_contents, servermap, first_time):
|
def _always_ucw_error_modifier(old_contents, servermap, first_time):
|
||||||
raise UncoordinatedWriteError("simulated")
|
raise UncoordinatedWriteError("simulated")
|
||||||
def _backoff_stopper(node, f):
|
def _backoff_stopper(node, f):
|
||||||
@ -580,11 +592,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
giveuper._delay = 0.1
|
giveuper._delay = 0.1
|
||||||
giveuper.factor = 1
|
giveuper.factor = 1
|
||||||
|
|
||||||
d = self.nodemaker.create_mutable_file(MutableData("line1"))
|
d = self.nodemaker.create_mutable_file(MutableData(b"line1"))
|
||||||
def _created(n):
|
def _created(n):
|
||||||
d = n.modify(_modifier)
|
d = n.modify(_modifier)
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
|
||||||
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "m"))
|
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "m"))
|
||||||
|
|
||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
@ -593,7 +605,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
n.modify, _ucw_error_modifier,
|
n.modify, _ucw_error_modifier,
|
||||||
_backoff_stopper))
|
_backoff_stopper))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "line1line2"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"line1line2"))
|
||||||
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "stop"))
|
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 2, "stop"))
|
||||||
|
|
||||||
def _reset_ucw_error_modifier(res):
|
def _reset_ucw_error_modifier(res):
|
||||||
@ -604,7 +616,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
_backoff_pauser))
|
_backoff_pauser))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res,
|
d.addCallback(lambda res: self.failUnlessEqual(res,
|
||||||
"line1line2line3"))
|
b"line1line2line3"))
|
||||||
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "pause"))
|
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "pause"))
|
||||||
|
|
||||||
d.addCallback(lambda res:
|
d.addCallback(lambda res:
|
||||||
@ -614,7 +626,7 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
giveuper.delay))
|
giveuper.delay))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res,
|
d.addCallback(lambda res: self.failUnlessEqual(res,
|
||||||
"line1line2line3"))
|
b"line1line2line3"))
|
||||||
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "giveup"))
|
d.addCallback(lambda res: self.failUnlessCurrentSeqnumIs(n, 3, "giveup"))
|
||||||
|
|
||||||
return d
|
return d
|
||||||
@ -630,22 +642,22 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
d.addCallback(lambda smap: smap.dump(StringIO()))
|
d.addCallback(lambda smap: smap.dump(StringIO()))
|
||||||
d.addCallback(lambda sio:
|
d.addCallback(lambda sio:
|
||||||
self.failUnless("3-of-10" in sio.getvalue()))
|
self.failUnless("3-of-10" in sio.getvalue()))
|
||||||
d.addCallback(lambda res: n.overwrite(MutableData("contents 1")))
|
d.addCallback(lambda res: n.overwrite(MutableData(b"contents 1")))
|
||||||
d.addCallback(lambda res: self.failUnlessIdentical(res, None))
|
d.addCallback(lambda res: self.failUnlessIdentical(res, None))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 1"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 1"))
|
||||||
d.addCallback(lambda res: n.overwrite(MutableData("contents 2")))
|
d.addCallback(lambda res: n.overwrite(MutableData(b"contents 2")))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 2"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 2"))
|
||||||
d.addCallback(lambda res: n.get_servermap(MODE_WRITE))
|
d.addCallback(lambda res: n.get_servermap(MODE_WRITE))
|
||||||
d.addCallback(lambda smap: n.upload(MutableData("contents 3"), smap))
|
d.addCallback(lambda smap: n.upload(MutableData(b"contents 3"), smap))
|
||||||
d.addCallback(lambda res: n.download_best_version())
|
d.addCallback(lambda res: n.download_best_version())
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 3"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3"))
|
||||||
d.addCallback(lambda res: n.get_servermap(MODE_ANYTHING))
|
d.addCallback(lambda res: n.get_servermap(MODE_ANYTHING))
|
||||||
d.addCallback(lambda smap:
|
d.addCallback(lambda smap:
|
||||||
n.download_version(smap,
|
n.download_version(smap,
|
||||||
smap.best_recoverable_version()))
|
smap.best_recoverable_version()))
|
||||||
d.addCallback(lambda res: self.failUnlessEqual(res, "contents 3"))
|
d.addCallback(lambda res: self.failUnlessEqual(res, b"contents 3"))
|
||||||
return d
|
return d
|
||||||
d.addCallback(_created)
|
d.addCallback(_created)
|
||||||
return d
|
return d
|
||||||
@ -663,11 +675,11 @@ class Filenode(unittest.TestCase, testutil.ShouldFailMixin):
|
|||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self.failUnlessEqual(self.n.get_size(), 0))
|
self.failUnlessEqual(self.n.get_size(), 0))
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self.n.overwrite(MutableData("foobarbaz")))
|
self.n.overwrite(MutableData(b"foobarbaz")))
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self.failUnlessEqual(self.n.get_size(), 9))
|
self.failUnlessEqual(self.n.get_size(), 9))
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self.nodemaker.create_mutable_file(MutableData("foobarbaz")))
|
self.nodemaker.create_mutable_file(MutableData(b"foobarbaz")))
|
||||||
d.addCallback(_created)
|
d.addCallback(_created)
|
||||||
d.addCallback(lambda ignored:
|
d.addCallback(lambda ignored:
|
||||||
self.failUnlessEqual(self.n.get_size(), 9))
|
self.failUnlessEqual(self.n.get_size(), 9))
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
from future.utils import bchr
|
||||||
from past.builtins import long
|
from past.builtins import long
|
||||||
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
@ -146,14 +147,14 @@ class FakeStorageServer(object):
|
|||||||
|
|
||||||
def flip_bit(original, byte_offset):
|
def flip_bit(original, byte_offset):
|
||||||
return (original[:byte_offset] +
|
return (original[:byte_offset] +
|
||||||
chr(ord(original[byte_offset]) ^ 0x01) +
|
bchr(ord(original[byte_offset:byte_offset+1]) ^ 0x01) +
|
||||||
original[byte_offset+1:])
|
original[byte_offset+1:])
|
||||||
|
|
||||||
def add_two(original, byte_offset):
|
def add_two(original, byte_offset):
|
||||||
# It isn't enough to simply flip the bit for the version number,
|
# It isn't enough to simply flip the bit for the version number,
|
||||||
# because 1 is a valid version number. So we add two instead.
|
# because 1 is a valid version number. So we add two instead.
|
||||||
return (original[:byte_offset] +
|
return (original[:byte_offset] +
|
||||||
chr(ord(original[byte_offset]) ^ 0x02) +
|
bchr(ord(original[byte_offset:byte_offset+1]) ^ 0x02) +
|
||||||
original[byte_offset+1:])
|
original[byte_offset+1:])
|
||||||
|
|
||||||
def corrupt(res, s, offset, shnums_to_corrupt=None, offset_offset=0):
|
def corrupt(res, s, offset, shnums_to_corrupt=None, offset_offset=0):
|
||||||
@ -313,7 +314,7 @@ class PublishMixin(object):
|
|||||||
def publish_one(self):
|
def publish_one(self):
|
||||||
# publish a file and create shares, which can then be manipulated
|
# publish a file and create shares, which can then be manipulated
|
||||||
# later.
|
# later.
|
||||||
self.CONTENTS = "New contents go here" * 1000
|
self.CONTENTS = b"New contents go here" * 1000
|
||||||
self.uploadable = MutableData(self.CONTENTS)
|
self.uploadable = MutableData(self.CONTENTS)
|
||||||
self._storage = FakeStorage()
|
self._storage = FakeStorage()
|
||||||
self._nodemaker = make_nodemaker(self._storage)
|
self._nodemaker = make_nodemaker(self._storage)
|
||||||
@ -330,7 +331,7 @@ class PublishMixin(object):
|
|||||||
# an MDMF file.
|
# an MDMF file.
|
||||||
# self.CONTENTS should have more than one segment.
|
# self.CONTENTS should have more than one segment.
|
||||||
if data is None:
|
if data is None:
|
||||||
data = "This is an MDMF file" * 100000
|
data = b"This is an MDMF file" * 100000
|
||||||
self.CONTENTS = data
|
self.CONTENTS = data
|
||||||
self.uploadable = MutableData(self.CONTENTS)
|
self.uploadable = MutableData(self.CONTENTS)
|
||||||
self._storage = FakeStorage()
|
self._storage = FakeStorage()
|
||||||
@ -348,7 +349,7 @@ class PublishMixin(object):
|
|||||||
# like publish_one, except that the result is guaranteed to be
|
# like publish_one, except that the result is guaranteed to be
|
||||||
# an SDMF file
|
# an SDMF file
|
||||||
if data is None:
|
if data is None:
|
||||||
data = "This is an SDMF file" * 1000
|
data = b"This is an SDMF file" * 1000
|
||||||
self.CONTENTS = data
|
self.CONTENTS = data
|
||||||
self.uploadable = MutableData(self.CONTENTS)
|
self.uploadable = MutableData(self.CONTENTS)
|
||||||
self._storage = FakeStorage()
|
self._storage = FakeStorage()
|
||||||
@ -363,11 +364,11 @@ class PublishMixin(object):
|
|||||||
|
|
||||||
|
|
||||||
def publish_multiple(self, version=0):
|
def publish_multiple(self, version=0):
|
||||||
self.CONTENTS = ["Contents 0",
|
self.CONTENTS = [b"Contents 0",
|
||||||
"Contents 1",
|
b"Contents 1",
|
||||||
"Contents 2",
|
b"Contents 2",
|
||||||
"Contents 3a",
|
b"Contents 3a",
|
||||||
"Contents 3b"]
|
b"Contents 3b"]
|
||||||
self.uploadables = [MutableData(d) for d in self.CONTENTS]
|
self.uploadables = [MutableData(d) for d in self.CONTENTS]
|
||||||
self._copied_shares = {}
|
self._copied_shares = {}
|
||||||
self._storage = FakeStorage()
|
self._storage = FakeStorage()
|
||||||
|
@ -95,9 +95,12 @@ PORTED_MODULES = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
PORTED_TEST_MODULES = [
|
PORTED_TEST_MODULES = [
|
||||||
|
"allmydata.test.mutable.test_checker",
|
||||||
"allmydata.test.mutable.test_datahandle",
|
"allmydata.test.mutable.test_datahandle",
|
||||||
"allmydata.test.mutable.test_different_encoding",
|
"allmydata.test.mutable.test_different_encoding",
|
||||||
|
"allmydata.test.mutable.test_exceptions",
|
||||||
"allmydata.test.mutable.test_filehandle",
|
"allmydata.test.mutable.test_filehandle",
|
||||||
|
"allmydata.test.mutable.test_filenode",
|
||||||
"allmydata.test.test_abbreviate",
|
"allmydata.test.test_abbreviate",
|
||||||
"allmydata.test.test_base32",
|
"allmydata.test.test_base32",
|
||||||
"allmydata.test.test_base62",
|
"allmydata.test.test_base62",
|
||||||
|
Loading…
Reference in New Issue
Block a user