mirror of
https://github.com/tahoe-lafs/tahoe-lafs.git
synced 2025-04-13 22:03:04 +00:00
zfec: add benchmark utility
This commit is contained in:
parent
279561b61b
commit
d1c40a7ab5
46
src/zfec/bench/bench_zfec.py
Normal file
46
src/zfec/bench/bench_zfec.py
Normal file
@ -0,0 +1,46 @@
|
||||
from zfec import filefec
|
||||
|
||||
import os
|
||||
|
||||
from pyutil import benchutil
|
||||
|
||||
FNAME="benchrandom.data"
|
||||
|
||||
def _make_new_rand_file(size):
|
||||
open(FNAME, "wb").write(os.urandom(size))
|
||||
|
||||
def donothing(results, reslenthing):
|
||||
pass
|
||||
|
||||
import sha
|
||||
hashers = [ sha.new() for i in range(100) ]
|
||||
def hashem(results, reslenthing):
|
||||
for i, result in enumerate(results):
|
||||
hashers[i].update(result)
|
||||
|
||||
def _encode_file(N):
|
||||
filefec.encode_file(open(FNAME, "rb"), donothing, 25, 100)
|
||||
|
||||
def _encode_file_stringy(N):
|
||||
filefec.encode_file_stringy(open(FNAME, "rb"), donothing, 25, 100)
|
||||
|
||||
def _encode_file_stringy_easyfec(N):
|
||||
filefec.encode_file_stringy_easyfec(open(FNAME, "rb"), donothing, 25, 100)
|
||||
|
||||
def _encode_file_not_really(N):
|
||||
filefec.encode_file_not_really(open(FNAME, "rb"), donothing, 25, 100)
|
||||
|
||||
def _encode_file_not_really_and_hash(N):
|
||||
filefec.encode_file_not_really_and_hash(open(FNAME, "rb"), donothing, 25, 100)
|
||||
|
||||
def _encode_file_and_hash(N):
|
||||
filefec.encode_file(open(FNAME, "rb"), hashem, 25, 100)
|
||||
|
||||
def bench():
|
||||
# for f in [_encode_file_stringy_easyfec, _encode_file_stringy, _encode_file, _encode_file_not_really,]:
|
||||
# for f in [_encode_file,]:
|
||||
for f in [_encode_file_not_really, _encode_file_not_really_and_hash, _encode_file, _encode_file_and_hash,]:
|
||||
print f
|
||||
benchutil.bench(f, initfunc=_make_new_rand_file, TOPXP=23, MAXREPS=128, MAXTIME=64)
|
||||
|
||||
# bench()
|
@ -336,6 +336,72 @@ def encode_file(inf, cb, k, m, chunksize=4096):
|
||||
res = enc.encode(l)
|
||||
cb(res, indatasize)
|
||||
|
||||
import sha
|
||||
def encode_file_not_really(inf, cb, k, m, chunksize=4096):
|
||||
enc = zfec.Encoder(k, m)
|
||||
l = tuple([ array.array('c') for i in range(k) ])
|
||||
indatasize = k*chunksize # will be reset to shorter upon EOF
|
||||
eof = False
|
||||
ZEROES=array.array('c', ['\x00'])*chunksize
|
||||
while not eof:
|
||||
# This loop body executes once per segment.
|
||||
i = 0
|
||||
while (i<len(l)):
|
||||
# This loop body executes once per chunk.
|
||||
a = l[i]
|
||||
del a[:]
|
||||
try:
|
||||
a.fromfile(inf, chunksize)
|
||||
i += 1
|
||||
except EOFError:
|
||||
eof = True
|
||||
indatasize = i*chunksize + len(a)
|
||||
|
||||
# padding
|
||||
a.fromstring("\x00" * (chunksize-len(a)))
|
||||
i += 1
|
||||
while (i<len(l)):
|
||||
a = l[i]
|
||||
a[:] = ZEROES
|
||||
i += 1
|
||||
|
||||
# res = enc.encode(l)
|
||||
cb(None, None)
|
||||
|
||||
def encode_file_not_really_and_hash(inf, cb, k, m, chunksize=4096):
|
||||
hasher = sha.new()
|
||||
enc = zfec.Encoder(k, m)
|
||||
l = tuple([ array.array('c') for i in range(k) ])
|
||||
indatasize = k*chunksize # will be reset to shorter upon EOF
|
||||
eof = False
|
||||
ZEROES=array.array('c', ['\x00'])*chunksize
|
||||
while not eof:
|
||||
# This loop body executes once per segment.
|
||||
i = 0
|
||||
while (i<len(l)):
|
||||
# This loop body executes once per chunk.
|
||||
a = l[i]
|
||||
del a[:]
|
||||
try:
|
||||
a.fromfile(inf, chunksize)
|
||||
i += 1
|
||||
except EOFError:
|
||||
eof = True
|
||||
indatasize = i*chunksize + len(a)
|
||||
|
||||
# padding
|
||||
a.fromstring("\x00" * (chunksize-len(a)))
|
||||
i += 1
|
||||
while (i<len(l)):
|
||||
a = l[i]
|
||||
a[:] = ZEROES
|
||||
i += 1
|
||||
|
||||
# res = enc.encode(l)
|
||||
for thing in l:
|
||||
hasher.update(thing)
|
||||
cb(None, None)
|
||||
|
||||
def encode_file_stringy(inf, cb, k, m, chunksize=4096):
|
||||
"""
|
||||
Read in the contents of inf, encode, and call cb with the results.
|
||||
|
Loading…
x
Reference in New Issue
Block a user