mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2024-12-24 07:06:41 +00:00
encode.py: update comments, max_segment_size is now 2MiB
This commit is contained in:
parent
9da1d70676
commit
6160af5f50
@ -286,8 +286,8 @@ class Encoder(object):
|
|||||||
# memory footprint: we only hold a tiny piece of the plaintext at any
|
# memory footprint: we only hold a tiny piece of the plaintext at any
|
||||||
# given time. We build up a segment's worth of cryptttext, then hand
|
# given time. We build up a segment's worth of cryptttext, then hand
|
||||||
# it to the encoder. Assuming 3-of-10 encoding (3.3x expansion) and
|
# it to the encoder. Assuming 3-of-10 encoding (3.3x expansion) and
|
||||||
# 2MiB max_segment_size, we get a peak memory footprint of 4.3*2MiB =
|
# 1MiB max_segment_size, we get a peak memory footprint of 4.3*1MiB =
|
||||||
# 8.6MiB. Lowering max_segment_size to, say, 100KiB would drop the
|
# 4.3MiB. Lowering max_segment_size to, say, 100KiB would drop the
|
||||||
# footprint to 430KiB at the expense of more hash-tree overhead.
|
# footprint to 430KiB at the expense of more hash-tree overhead.
|
||||||
|
|
||||||
d = self._gather_data(self.required_shares, input_piece_size,
|
d = self._gather_data(self.required_shares, input_piece_size,
|
||||||
|
Loading…
Reference in New Issue
Block a user