SDMF: update filenode with correct k/N after Retrieve. Fixes #1510.

Without this, we get a regression when modifying a mutable file that was
created with more shares (larger N) than our current tahoe.cfg . The
modification attempt creates new versions of the (0,1,..,newN-1) shares, but
leaves the old versions of the (newN,..,oldN-1) shares alone (and throws a
assertion error in SDMFSlotWriteProxy.finish_publishing in the process).

The mixed versions that result (some shares with e.g. N=10, some with N=20,
such that both versions are recoverable) cause problems for the Publish code,
even before MDMF landed. Might be related to refs #1390 and refs #1042.
This commit is contained in:
Brian Warner 2011-08-27 15:50:31 -07:00
parent b8c90d24fc
commit 370e6f271e
3 changed files with 29 additions and 1 deletions

View File

@ -499,7 +499,7 @@ class SDMFSlotWriteProxy:
"""
for k in ["sharedata", "encprivkey", "signature", "verification_key",
"share_hash_chain", "block_hash_tree"]:
assert k in self._share_pieces, (k, self._share_pieces.keys())
assert k in self._share_pieces, (self.shnum, k, self._share_pieces.keys())
# This is the only method that actually writes something to the
# remote server.
# First, we need to pack the share into data that we can write

View File

@ -1080,6 +1080,12 @@ class Retrieve:
self._status.timings['total'] = now - self._started
self._status.timings['fetch'] = now - self._started_fetching
# remember the encoding parameters, use them again next time
(seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
offsets_tuple) = self.verinfo
self._node._populate_required_shares(k)
self._node._populate_total_shares(N)
if self._verify:
ret = list(self._bad_shares)
self.log("done verifying, found %d bad shares" % len(ret))

View File

@ -3577,3 +3577,25 @@ class Interoperability(GridTestMixin, unittest.TestCase, testutil.ShouldFailMixi
d = n.download_best_version()
d.addCallback(self.failUnlessEqual, self.sdmf_old_contents)
return d
class DifferentEncoding(unittest.TestCase):
def setUp(self):
self._storage = s = FakeStorage()
self.nodemaker = make_nodemaker(s)
def test_filenode(self):
# create a file with 3-of-20, then modify it with a client configured
# to do 3-of-10. #1510 tracks a failure here
self.nodemaker.default_encoding_parameters["n"] = 20
d = self.nodemaker.create_mutable_file("old contents")
def _created(n):
filecap = n.get_cap().to_string()
del n # we want a new object, not the cached one
self.nodemaker.default_encoding_parameters["n"] = 10
n2 = self.nodemaker.create_from_cap(filecap)
return n2
d.addCallback(_created)
def modifier(old_contents, servermap, first_time):
return "new contents"
d.addCallback(lambda n: n.modify(modifier))
return d