configurable testcache with malloc (#581)

* cache item number to cache memory size

* reload testcase if trimming changed the size

* fix splicing selection

* slim splicing

* import sync fix

* write testcache stats to fuzzer_stats

* fix new seed selection algo

* malloc+read instead of mmap

* fix

* testcache is configurable now and no reference counts

* fixes compilation, test script

* fixes

* switch TEST_CC to afl-cc in makefile

* code format

* fix

* fix crash

* fix crash

* fix env help output

* remove unnecessary pointer resets

* fix endless loop bug

* actually use the cache if set

* one more fix

* increase default cache entries, add default cache size value to config.h

Co-authored-by: hexcoder- <heiko@hexco.de>
This commit is contained in:
van Hauser
2020-10-14 15:30:30 +02:00
committed by GitHub
parent c20ba2c2c5
commit 56ac3fcdc5
13 changed files with 276 additions and 163 deletions

View File

@ -31,11 +31,12 @@
inline u32 select_next_queue_entry(afl_state_t *afl) {
u32 s = rand_below(afl, afl->queued_paths);
u32 s = rand_below(afl, afl->queued_paths);
double p = rand_next_percent(afl);
/*
fprintf(stderr, "select: p=%f s=%u ... p < prob[s]=%f ? s=%u : alias[%u]=%u"
" ==> %u\n", p, s, afl->alias_probability[s], s, s, afl->alias_table[s], p < afl->alias_probability[s] ? s : afl->alias_table[s]);
" ==> %u\n", p, s, afl->alias_probability[s], s, s, afl->alias_table[s], p <
afl->alias_probability[s] ? s : afl->alias_table[s]);
*/
return (p < afl->alias_probability[s] ? s : afl->alias_table[s]);
@ -55,7 +56,7 @@ void create_alias_table(afl_state_t *afl) {
int * S = (u32 *)afl_realloc(AFL_BUF_PARAM(out_scratch), n * sizeof(u32));
int * L = (u32 *)afl_realloc(AFL_BUF_PARAM(in_scratch), n * sizeof(u32));
if (!P || !S || !L) FATAL("could not aquire memory for alias table");
if (!P || !S || !L) { FATAL("could not aquire memory for alias table"); }
memset((void *)afl->alias_table, 0, n * sizeof(u32));
memset((void *)afl->alias_probability, 0, n * sizeof(double));
@ -65,7 +66,7 @@ void create_alias_table(afl_state_t *afl) {
struct queue_entry *q = afl->queue_buf[i];
if (!q->disabled) q->perf_score = calculate_score(afl, q);
if (!q->disabled) { q->perf_score = calculate_score(afl, q); }
sum += q->perf_score;
@ -74,19 +75,23 @@ void create_alias_table(afl_state_t *afl) {
for (i = 0; i < n; i++) {
struct queue_entry *q = afl->queue_buf[i];
P[i] = q->perf_score * n / sum;
P[i] = (q->perf_score * n) / sum;
}
int nS = 0, nL = 0, s;
for (s = (s32)n - 1; s >= 0; --s) {
if (P[s] < 1)
if (P[s] < 1) {
S[nS++] = s;
else
} else {
L[nL++] = s;
}
}
while (nS && nL) {
@ -96,11 +101,16 @@ void create_alias_table(afl_state_t *afl) {
afl->alias_probability[a] = P[a];
afl->alias_table[a] = g;
P[g] = P[g] + P[a] - 1;
if (P[g] < 1)
if (P[g] < 1) {
S[nS++] = g;
else
} else {
L[nL++] = g;
}
}
while (nL)
@ -110,11 +120,10 @@ void create_alias_table(afl_state_t *afl) {
afl->alias_probability[S[--nS]] = 1;
/*
fprintf(stderr, " %-3s %-3s %-9s %-9s\n", "entry", "alias", "prob", "perf");
for (u32 i = 0; i < n; ++i)
fprintf(stderr, " %3i %3i %9.7f %9.7f\n", i, afl->alias_table[i],
afl->alias_probability[i], afl->queue_buf[i]->perf_score);
fprintf(stderr, " entry alias probability perf_score\n");
for (u32 i = 0; i < n; ++i)
fprintf(stderr, " %5u %5u %11u %0.9f\n", i, afl->alias_table[i],
afl->alias_probability[i], afl->queue_buf[i]->perf_score);
*/
}
@ -860,3 +869,131 @@ u32 calculate_score(afl_state_t *afl, struct queue_entry *q) {
}
void queue_testcase_retake(afl_state_t *afl, struct queue_entry *q,
u32 old_len) {
if (likely(q->testcase_buf)) {
free(q->testcase_buf);
int fd = open(q->fname, O_RDONLY);
if (unlikely(fd < 0)) { PFATAL("Unable to open '%s'", q->fname); }
u32 len = q->len;
q->testcase_buf = malloc(len);
if (unlikely(!q->testcase_buf)) {
PFATAL("Unable to mmap '%s' with len %d", q->fname, len);
}
close(fd);
afl->q_testcase_cache_size = afl->q_testcase_cache_size + q->len - old_len;
}
}
/* Returns the testcase buf from the file behind this queue entry.
Increases the refcount. */
inline u8 *queue_testcase_get(afl_state_t *afl, struct queue_entry *q) {
u32 len = q->len;
/* first handle if no testcase cache is configured */
if (unlikely(!afl->q_testcase_max_cache_size)) {
u8 *buf;
if (q == afl->queue_cur) {
buf = afl_realloc((void **)&afl->testcase_buf, len);
} else {
buf = afl_realloc((void **)&afl->splicecase_buf, len);
}
if (unlikely(!buf)) {
PFATAL("Unable to malloc '%s' with len %u", q->fname, len);
}
int fd = open(q->fname, O_RDONLY);
if (unlikely(fd < 0)) { PFATAL("Unable to open '%s'", q->fname); }
ck_read(fd, buf, len, q->fname);
close(fd);
return buf;
}
/* now handle the testcase cache */
if (unlikely(!q->testcase_buf)) {
/* Buf not cached, let's load it */
u32 tid = 0;
while (unlikely(afl->q_testcase_cache_size + len >=
afl->q_testcase_max_cache_size ||
afl->q_testcase_cache_count >= TESTCASE_ENTRIES - 1)) {
/* Cache full. We neet to evict one to map one.
Get a random one which is not in use */
do {
tid = rand_below(afl, afl->q_testcase_max_cache_count);
} while (afl->q_testcase_cache[tid] == NULL ||
afl->q_testcase_cache[tid] == afl->queue_cur);
struct queue_entry *old_cached = afl->q_testcase_cache[tid];
free(old_cached->testcase_buf);
old_cached->testcase_buf = NULL;
afl->q_testcase_cache_size -= old_cached->len;
afl->q_testcase_cache[tid] = NULL;
--afl->q_testcase_cache_count;
}
while (likely(afl->q_testcase_cache[tid] != NULL))
++tid;
/* Map the test case into memory. */
int fd = open(q->fname, O_RDONLY);
if (unlikely(fd < 0)) { PFATAL("Unable to open '%s'", q->fname); }
q->testcase_buf = malloc(len);
if (unlikely(!q->testcase_buf)) {
PFATAL("Unable to malloc '%s' with len %u", q->fname, len);
}
ck_read(fd, q->testcase_buf, len, q->fname);
close(fd);
/* Register testcase as cached */
afl->q_testcase_cache[tid] = q;
afl->q_testcase_cache_size += q->len;
++afl->q_testcase_cache_count;
if (tid >= afl->q_testcase_max_cache_count)
afl->q_testcase_max_cache_count = tid + 1;
}
return q->testcase_buf;
}