test_mutable: make test-multiple-encodings work

This commit is contained in:
Brian Warner 2008-03-10 23:16:28 -07:00
parent 57bd23f35f
commit 9ca55b8b79

View File

@ -5,6 +5,7 @@ from twisted.trial import unittest
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
from twisted.python import failure from twisted.python import failure
from allmydata import mutable, uri, dirnode, download from allmydata import mutable, uri, dirnode, download
from allmydata.util.idlib import shortnodeid_b2a
from allmydata.util.hashutil import tagged_hash from allmydata.util.hashutil import tagged_hash
from allmydata.encode import NotEnoughPeersError from allmydata.encode import NotEnoughPeersError
from allmydata.interfaces import IURI, INewDirectoryURI, \ from allmydata.interfaces import IURI, INewDirectoryURI, \
@ -88,7 +89,7 @@ class FakeStorage:
if peerid in pending: if peerid in pending:
d, shares = pending.pop(peerid) d, shares = pending.pop(peerid)
eventually(d.callback, shares) eventually(d.callback, shares)
for (d, shares) in pending.items(): for (d, shares) in pending.values():
eventually(d.callback, shares) eventually(d.callback, shares)
def write(self, peerid, storage_index, shnum, offset, data): def write(self, peerid, storage_index, shnum, offset, data):
@ -704,7 +705,7 @@ class Roundtrip(unittest.TestCase):
d.addCallback(_retrieved) d.addCallback(_retrieved)
return d return d
def OFF_test_multiple_encodings(self): # not finished yet def test_multiple_encodings(self):
# we encode the same file in two different ways (3-of-10 and 4-of-9), # we encode the same file in two different ways (3-of-10 and 4-of-9),
# then mix up the shares, to make sure that download survives seeing # then mix up the shares, to make sure that download survives seeing
# a variety of encodings. This is actually kind of tricky to set up. # a variety of encodings. This is actually kind of tricky to set up.
@ -763,45 +764,63 @@ class Roundtrip(unittest.TestCase):
d.addCallback(_published_2) d.addCallback(_published_2)
def _merge(res): def _merge(res):
log.msg("merging sharelists") log.msg("merging sharelists")
print len(self._shares1), len(self._shares2) # we merge the shares from the two sets, leaving each shnum in
from allmydata.util import idlib # its original location, but using a share from set1 or set2
# we rearrange the shares, removing them from their original # according to the following sequence:
# homes. #
shares1 = self._shares1.values()
shares2 = self._shares2.values()
print len(shares1), len(shares2)
# then we place shares in the following order:
# 4-of-9 a s2 # 4-of-9 a s2
# 4-of-9 b s2 # 4-of-9 b s2
# 4-of-9 c s2 # 4-of-9 c s2
# 3-of-9 d s1 # 3-of-9 d s1
# 3-of-9 e s1 # 3-of-9 e s1
# 4-of-9 f s2 # 3-of-9 f s1
# 3-of-9 g s1 # 4-of-9 g s2
# so that neither form can be recovered until fetch [f]. Later, #
# when we implement code that handles multiple versions, we can # so that neither form can be recovered until fetch [f], at which
# use this framework to assert that all recoverable versions are # point version-s1 (the 3-of-10 form) should be recoverable. If
# retrieved, and test that 'epsilon' does its job # the implementation latches on to the first version it sees,
places = [2, 2, 2, 1, 1, 2, 1] # then s2 will be recoverable at fetch [g].
# Later, when we implement code that handles multiple versions,
# we can use this framework to assert that all recoverable
# versions are retrieved, and test that 'epsilon' does its job
places = [2, 2, 2, 1, 1, 1, 2]
sharemap = {}
for i in range(len(s._sequence)): for i in range(len(s._sequence)):
peerid = s._sequence[i] peerid = s._sequence[i]
if not places: peerid_s = shortnodeid_b2a(peerid)
print idlib.shortnodeid_b2a(peerid), "-", "-" for shnum in self._shares1.get(peerid, {}):
break sharemap[shnum] = peerid
which = places.pop(0) if shnum < len(places):
if which == 1: which = places[shnum]
print idlib.shortnodeid_b2a(peerid), "1", "-"
s._peers[peerid] = shares1.pop(0)
else: else:
print idlib.shortnodeid_b2a(peerid), "-", "2" which = "x"
s._peers[peerid] = shares2.pop(0) s._peers[peerid] = peers = {}
in_1 = shnum in self._shares1[peerid]
in_2 = shnum in self._shares2.get(peerid, {})
#print peerid_s, shnum, which, in_1, in_2
if which == 1:
if in_1:
peers[shnum] = self._shares1[peerid][shnum]
elif which == 2:
if in_2:
peers[shnum] = self._shares2[peerid][shnum]
# we don't bother placing any other shares # we don't bother placing any other shares
# now sort the sequence so that share 0 is returned first
new_sequence = [sharemap[shnum]
for shnum in sorted(sharemap.keys())]
s._sequence = new_sequence
log.msg("merge done") log.msg("merge done")
d.addCallback(_merge) d.addCallback(_merge)
d.addCallback(lambda res: r3.retrieve()) d.addCallback(lambda res: r3.retrieve())
def _retrieved(new_contents): def _retrieved(new_contents):
# the current specified behavior is "first version recoverable" ## the current specified behavior is "first version recoverable"
#self.failUnlessEqual(new_contents, contents1)
# the current behavior is "first version seen is sticky"
self.failUnlessEqual(new_contents, contents2) self.failUnlessEqual(new_contents, contents2)
d.addCallback(_retrieved) d.addCallback(_retrieved)
return d return d