diff --git a/src/allmydata/mutable.py b/src/allmydata/mutable.py index aa44dcc6f..d97e76fd8 100644 --- a/src/allmydata/mutable.py +++ b/src/allmydata/mutable.py @@ -18,6 +18,8 @@ class NeedMoreDataError(Exception): def __init__(self, needed_bytes): Exception.__init__(self) self.needed_bytes = needed_bytes + def __str__(self): + return "" % self.needed_bytes class UncoordinatedWriteError(Exception): pass @@ -372,7 +374,7 @@ class Retrieve: return if f.check(NeedMoreDataError): # ah, just re-send the query then. - self._read_size = max(self._read_size, f.needed_bytes) + self._read_size = max(self._read_size, f.value.needed_bytes) (conn, storage_index, peer_storage_servers) = stuff self._do_query(conn, peerid, storage_index, self._read_size, peer_storage_servers) diff --git a/src/allmydata/scripts/debug.py b/src/allmydata/scripts/debug.py index 936e84a6e..5ec8e11c5 100644 --- a/src/allmydata/scripts/debug.py +++ b/src/allmydata/scripts/debug.py @@ -181,7 +181,16 @@ def dump_SDMF_share(offset, length, config, out, err): data = f.read(min(length, 2000)) f.close() - pieces = mutable.unpack_share(data) + try: + pieces = mutable.unpack_share(data) + except mutable.NeedMoreDataError, e: + # retry once with the larger size + size = e.needed_bytes + f = open(config['filename'], "rb") + f.seek(offset) + data = f.read(min(length, size)) + f.close() + pieces = mutable.unpack_share(data) (seqnum, root_hash, IV, k, N, segsize, datalen, pubkey, signature, share_hash_chain, block_hash_tree,