encode: make MAX_SEGMENT_SIZE controllable, to support tests which force the use of multiple segments. Also, remove not-very-useful upload-side debug messages

This commit is contained in:
Brian Warner 2007-04-16 19:29:57 -07:00
parent b9624502c9
commit ff8cb4d32e
3 changed files with 14 additions and 15 deletions

@ -79,6 +79,12 @@ class Encoder(object):
implements(IEncoder)
NEEDED_SHARES = 25
TOTAL_SHARES = 100
MAX_SEGMENT_SIZE = 2*MiB
def __init__(self, options={}):
object.__init__(self)
self.MAX_SEGMENT_SIZE = options.get("max_segment_size",
self.MAX_SEGMENT_SIZE)
def setup(self, infile):
self.infile = infile
@ -89,7 +95,7 @@ class Encoder(object):
self.num_shares = self.TOTAL_SHARES
self.required_shares = self.NEEDED_SHARES
self.segment_size = min(2*MiB, self.file_size)
self.segment_size = min(self.MAX_SEGMENT_SIZE, self.file_size)
# this must be a multiple of self.required_shares
self.segment_size = mathutil.next_multiple(self.segment_size,
self.required_shares)

@ -147,12 +147,12 @@ class Roundtrip(unittest.TestCase):
bucket_modes={}):
if AVAILABLE_SHARES is None:
AVAILABLE_SHARES = NUM_SHARES
e = encode.Encoder()
options = {"max_segment_size": 25} # force use of multiple segments
e = encode.Encoder(options)
data = "happy happy joy joy" * 4
e.setup(StringIO(data))
assert e.num_shares == NUM_SHARES # else we'll be completely confused
e.segment_size = 25 # force use of multiple segments
e.setup_codec() # need to rebuild the codec for that change
assert (NUM_SEGMENTS-1)*e.segment_size < len(data) <= NUM_SEGMENTS*e.segment_size

@ -56,10 +56,10 @@ class PeerTracker:
return (alreadygot, set(buckets.keys()))
class FileUploader:
debug = False
def __init__(self, client):
def __init__(self, client, options={}):
self._client = client
self._options = options
def set_params(self, needed_shares, shares_of_happiness, total_shares):
self.needed_shares = needed_shares
@ -87,12 +87,10 @@ class FileUploader:
string)."""
log.msg("starting upload [%s]" % (idlib.b2a(self._verifierid),))
if self.debug:
print "starting upload"
assert self.needed_shares
# create the encoder, so we can know how large the shares will be
self._encoder = encode.Encoder()
self._encoder = encode.Encoder(self._options)
self._encoder.setup(self._filehandle)
share_size = self._encoder.get_share_size()
block_size = self._encoder.get_block_size()
@ -279,7 +277,6 @@ class Uploader(service.MultiService):
implements(IUploader)
name = "uploader"
uploader_class = FileUploader
debug = False
needed_shares = 25 # Number of shares required to reconstruct a file.
desired_shares = 75 # We will abort an upload unless we can allocate space for at least this many.
@ -294,18 +291,14 @@ class Uploader(service.MultiService):
# note: this is only of the plaintext data, no encryption yet
return hasher.digest()
def upload(self, f):
def upload(self, f, options={}):
# this returns the URI
assert self.parent
assert self.running
f = IUploadable(f)
fh = f.get_filehandle()
u = self.uploader_class(self.parent)
if self.debug:
u.debug = True
u = self.uploader_class(self.parent, options)
u.set_filehandle(fh)
# push two shares, require that we get two back. TODO: this is
# temporary, of course.
u.set_params(self.needed_shares, self.desired_shares, self.total_shares)
u.set_verifierid(self._compute_verifierid(fh))
d = u.start()