mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-02-21 10:01:54 +00:00
mutable.py: add share-unpacking code, use it for more tests
This commit is contained in:
parent
70bd92f24d
commit
6e5b799d46
@ -8,6 +8,17 @@ from allmydata.uri import WriteableSSKFileURI
|
|||||||
from allmydata.Crypto.Cipher import AES
|
from allmydata.Crypto.Cipher import AES
|
||||||
from allmydata import hashtree, codec
|
from allmydata import hashtree, codec
|
||||||
|
|
||||||
|
|
||||||
|
HEADER_LENGTH = struct.calcsize(">BQ32s BBQQ LLLLLQQ")
|
||||||
|
|
||||||
|
class NeedMoreDataError(Exception):
|
||||||
|
def __init__(self, needed_bytes):
|
||||||
|
Exception.__init__(self)
|
||||||
|
self.needed_bytes = needed_bytes
|
||||||
|
|
||||||
|
|
||||||
|
# use client.create_mutable_file() to make one of these
|
||||||
|
|
||||||
class MutableFileNode:
|
class MutableFileNode:
|
||||||
implements(IMutableFileNode)
|
implements(IMutableFileNode)
|
||||||
|
|
||||||
@ -79,28 +90,57 @@ class MutableFileNode:
|
|||||||
def replace(self, newdata):
|
def replace(self, newdata):
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
|
||||||
def unpack_data(self, data):
|
class Retrieve:
|
||||||
offsets = {}
|
|
||||||
|
def __init__(self, filenode):
|
||||||
|
self._node = filenode
|
||||||
|
|
||||||
|
def _unpack_share(self, data):
|
||||||
|
assert len(data) >= HEADER_LENGTH
|
||||||
|
o = {}
|
||||||
(version,
|
(version,
|
||||||
seqnum,
|
seqnum,
|
||||||
root_hash,
|
root_hash,
|
||||||
k, N, segsize, datalen,
|
k, N, segsize, datalen,
|
||||||
offsets['signature'],
|
o['signature'],
|
||||||
offsets['share_hash_chain'],
|
o['share_hash_chain'],
|
||||||
offsets['block_hash_tree'],
|
o['block_hash_tree'],
|
||||||
offsets['IV'],
|
o['IV'],
|
||||||
offsets['share_data'],
|
o['share_data'],
|
||||||
offsets['enc_privkey']) = struct.unpack(">BQ32s" + "BBQQ" + "LLLLLQ")
|
o['enc_privkey'],
|
||||||
|
o['EOF']) = struct.unpack(">BQ32s" + "BBQQ" + "LLLLLQQ",
|
||||||
|
data[:HEADER_LENGTH])
|
||||||
|
|
||||||
assert version == 0
|
assert version == 0
|
||||||
signature = data[offsets['signature']:offsets['share_hash_chain']]
|
if len(data) < o['EOF']:
|
||||||
share_hash_chain = data[offsets['share_hash_chain']:offsets['block_hash_tree']]
|
raise NeedMoreDataError(o['EOF'])
|
||||||
block_hash_tree = data[offsets['block_hash_tree']:offsets['IV']]
|
|
||||||
IV = data[offsets['IV']:offsets['share_data']]
|
pubkey = data[HEADER_LENGTH:o['signature']]
|
||||||
share_data = data[offsets['share_data']:offsets['share_data']+datalen]
|
signature = data[o['signature']:o['share_hash_chain']]
|
||||||
enc_privkey = data[offsets['enc_privkey']:]
|
share_hash_chain_s = data[o['share_hash_chain']:o['block_hash_tree']]
|
||||||
|
share_hash_format = ">H32s"
|
||||||
|
hsize = struct.calcsize(share_hash_format)
|
||||||
|
assert len(share_hash_chain_s) % hsize == 0, len(share_hash_chain_s)
|
||||||
|
share_hash_chain = []
|
||||||
|
for i in range(0, len(share_hash_chain_s), hsize):
|
||||||
|
chunk = share_hash_chain_s[i:i+hsize]
|
||||||
|
(hid, h) = struct.unpack(share_hash_format, chunk)
|
||||||
|
share_hash_chain.append( (hid, h) )
|
||||||
|
block_hash_tree_s = data[o['block_hash_tree']:o['IV']]
|
||||||
|
assert len(block_hash_tree_s) % 32 == 0, len(block_hash_tree_s)
|
||||||
|
block_hash_tree = []
|
||||||
|
for i in range(0, len(block_hash_tree_s), 32):
|
||||||
|
block_hash_tree.append(block_hash_tree_s[i:i+32])
|
||||||
|
|
||||||
|
IV = data[o['IV']:o['share_data']]
|
||||||
|
share_data = data[o['share_data']:o['enc_privkey']]
|
||||||
|
enc_privkey = data[o['enc_privkey']:o['EOF']]
|
||||||
|
|
||||||
|
return (seqnum, root_hash, k, N, segsize, datalen,
|
||||||
|
pubkey, signature, share_hash_chain, block_hash_tree,
|
||||||
|
IV, share_data, enc_privkey)
|
||||||
|
|
||||||
|
|
||||||
# use client.create_mutable_file() to make one of these
|
|
||||||
|
|
||||||
class Publish:
|
class Publish:
|
||||||
"""I represent a single act of publishing the mutable file to the grid."""
|
"""I represent a single act of publishing the mutable file to the grid."""
|
||||||
@ -237,10 +277,11 @@ class Publish:
|
|||||||
share_data = all_shares[shnum]
|
share_data = all_shares[shnum]
|
||||||
offsets = self._pack_offsets(len(verification_key),
|
offsets = self._pack_offsets(len(verification_key),
|
||||||
len(signature),
|
len(signature),
|
||||||
len(share_hash_chain),
|
len(share_hash_chain_s),
|
||||||
len(block_hash_tree),
|
len(block_hash_tree_s),
|
||||||
len(IV),
|
len(IV),
|
||||||
len(share_data))
|
len(share_data),
|
||||||
|
len(encprivkey))
|
||||||
|
|
||||||
final_shares[shnum] = "".join([prefix,
|
final_shares[shnum] = "".join([prefix,
|
||||||
offsets,
|
offsets,
|
||||||
@ -271,8 +312,8 @@ class Publish:
|
|||||||
|
|
||||||
def _pack_offsets(self, verification_key_length, signature_length,
|
def _pack_offsets(self, verification_key_length, signature_length,
|
||||||
share_hash_chain_length, block_hash_tree_length,
|
share_hash_chain_length, block_hash_tree_length,
|
||||||
IV_length, share_data_length):
|
IV_length, share_data_length, encprivkey_length):
|
||||||
post_offset = struct.calcsize(">BQ32s" + "BBQQ" + "LLLLLQ")
|
post_offset = HEADER_LENGTH
|
||||||
offsets = {}
|
offsets = {}
|
||||||
o1 = offsets['signature'] = post_offset + verification_key_length
|
o1 = offsets['signature'] = post_offset + verification_key_length
|
||||||
o2 = offsets['share_hash_chain'] = o1 + signature_length
|
o2 = offsets['share_hash_chain'] = o1 + signature_length
|
||||||
@ -281,49 +322,14 @@ class Publish:
|
|||||||
o4 = offsets['IV'] = o3 + block_hash_tree_length
|
o4 = offsets['IV'] = o3 + block_hash_tree_length
|
||||||
o5 = offsets['share_data'] = o4 + IV_length
|
o5 = offsets['share_data'] = o4 + IV_length
|
||||||
o6 = offsets['enc_privkey'] = o5 + share_data_length
|
o6 = offsets['enc_privkey'] = o5 + share_data_length
|
||||||
|
o7 = offsets['EOF'] = o6 + encprivkey_length
|
||||||
|
|
||||||
return struct.pack(">LLLLLQ",
|
return struct.pack(">LLLLLQQ",
|
||||||
offsets['signature'],
|
offsets['signature'],
|
||||||
offsets['share_hash_chain'],
|
offsets['share_hash_chain'],
|
||||||
offsets['block_hash_tree'],
|
offsets['block_hash_tree'],
|
||||||
offsets['IV'],
|
offsets['IV'],
|
||||||
offsets['share_data'],
|
offsets['share_data'],
|
||||||
offsets['enc_privkey'])
|
offsets['enc_privkey'],
|
||||||
|
offsets['EOF'])
|
||||||
def OFF_pack_data(self):
|
|
||||||
# dummy values to satisfy pyflakes until we wire this all up
|
|
||||||
seqnum, root_hash, k, N, segsize, datalen = 0,0,0,0,0,0
|
|
||||||
(verification_key, signature, share_hash_chain, block_hash_tree,
|
|
||||||
IV, share_data, enc_privkey) = ["0"*16] * 7
|
|
||||||
seqnum += 1
|
|
||||||
newbuf = [struct.pack(">BQ32s" + "BBQQ",
|
|
||||||
0, # version byte
|
|
||||||
seqnum,
|
|
||||||
root_hash,
|
|
||||||
k, N, segsize, datalen)]
|
|
||||||
post_offset = struct.calcsize(">BQ32s" + "BBQQ" + "LLLLLQ")
|
|
||||||
offsets = {}
|
|
||||||
o1 = offsets['signature'] = post_offset + len(verification_key)
|
|
||||||
o2 = offsets['share_hash_chain'] = o1 + len(signature)
|
|
||||||
o3 = offsets['block_hash_tree'] = o2 + len(share_hash_chain)
|
|
||||||
assert len(IV) == 16
|
|
||||||
o4 = offsets['IV'] = o3 + len(block_hash_tree)
|
|
||||||
o5 = offsets['share_data'] = o4 + len(IV)
|
|
||||||
o6 = offsets['enc_privkey'] = o5 + len(share_data)
|
|
||||||
|
|
||||||
newbuf.append(struct.pack(">LLLLLQ",
|
|
||||||
offsets['signature'],
|
|
||||||
offsets['share_hash_chain'],
|
|
||||||
offsets['block_hash_tree'],
|
|
||||||
offsets['IV'],
|
|
||||||
offsets['share_data'],
|
|
||||||
offsets['enc_privkey']))
|
|
||||||
newbuf.extend([verification_key,
|
|
||||||
signature,
|
|
||||||
share_hash_chain,
|
|
||||||
block_hash_tree,
|
|
||||||
IV,
|
|
||||||
share_data,
|
|
||||||
enc_privkey])
|
|
||||||
return "".join(newbuf)
|
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
import itertools
|
import itertools, struct
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
@ -152,6 +152,7 @@ class Publish(unittest.TestCase):
|
|||||||
CONTENTS = "some initial contents"
|
CONTENTS = "some initial contents"
|
||||||
fn.create(CONTENTS)
|
fn.create(CONTENTS)
|
||||||
p = mutable.Publish(fn)
|
p = mutable.Publish(fn)
|
||||||
|
r = mutable.Retrieve(fn)
|
||||||
# make some fake shares
|
# make some fake shares
|
||||||
shares_and_ids = ( ["%07d" % i for i in range(10)], range(10) )
|
shares_and_ids = ( ["%07d" % i for i in range(10)], range(10) )
|
||||||
d = defer.maybeDeferred(p._generate_shares,
|
d = defer.maybeDeferred(p._generate_shares,
|
||||||
@ -171,7 +172,35 @@ class Publish(unittest.TestCase):
|
|||||||
self.failUnlessEqual(sorted(final_shares.keys()), range(10))
|
self.failUnlessEqual(sorted(final_shares.keys()), range(10))
|
||||||
for i,sh in final_shares.items():
|
for i,sh in final_shares.items():
|
||||||
self.failUnless(isinstance(sh, str))
|
self.failUnless(isinstance(sh, str))
|
||||||
self.failUnlessEqual(len(sh), 359)
|
self.failUnlessEqual(len(sh), 367)
|
||||||
|
# feed the share through the unpacker as a sanity-check
|
||||||
|
pieces = r._unpack_share(sh)
|
||||||
|
(u_seqnum, u_root_hash, k, N, segsize, datalen,
|
||||||
|
pubkey, signature, share_hash_chain, block_hash_tree,
|
||||||
|
IV, share_data, enc_privkey) = pieces
|
||||||
|
self.failUnlessEqual(u_seqnum, 3)
|
||||||
|
self.failUnlessEqual(u_root_hash, root_hash)
|
||||||
|
self.failUnlessEqual(k, 3)
|
||||||
|
self.failUnlessEqual(N, 10)
|
||||||
|
self.failUnlessEqual(segsize, 21)
|
||||||
|
self.failUnlessEqual(datalen, len(CONTENTS))
|
||||||
|
self.failUnlessEqual(pubkey, FakePubKey().serialize())
|
||||||
|
sig_material = struct.pack(">BQ32s BBQQ", 0, seqnum, root_hash,
|
||||||
|
k, N, segsize, datalen)
|
||||||
|
self.failUnlessEqual(signature,
|
||||||
|
FakePrivKey().sign(sig_material))
|
||||||
|
self.failUnless(isinstance(share_hash_chain, list))
|
||||||
|
self.failUnlessEqual(len(share_hash_chain), 4) # ln2(10)++
|
||||||
|
for i in share_hash_chain:
|
||||||
|
self.failUnless(isinstance(i, tuple))
|
||||||
|
self.failUnless(isinstance(i[0], int))
|
||||||
|
self.failUnless(isinstance(i[1], str))
|
||||||
|
self.failUnlessEqual(len(i[1]), 32)
|
||||||
|
self.failUnless(isinstance(block_hash_tree, list))
|
||||||
|
self.failUnlessEqual(len(block_hash_tree), 1) # very small tree
|
||||||
|
self.failUnlessEqual(IV, "IV"*8)
|
||||||
|
self.failUnlessEqual(len(share_data), len("%07d" % 1))
|
||||||
|
self.failUnlessEqual(enc_privkey, "encprivkey")
|
||||||
d.addCallback(_done)
|
d.addCallback(_done)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user