This commit is contained in:
Dominik Maier
2020-03-27 21:59:08 +01:00
parent 5a7d33330a
commit 14bd424d2a
4 changed files with 106 additions and 60 deletions

View File

@ -583,6 +583,27 @@ typedef struct afl_state {
u8 clean_trace_custom[MAP_SIZE]; u8 clean_trace_custom[MAP_SIZE];
u8 first_trace[MAP_SIZE]; u8 first_trace[MAP_SIZE];
/*needed for afl_fuzz_one */
// TODO: see which we can reuse
u8 *out_buf;
size_t out_size;
u8 *out_scratch_buf;
size_t out_scratch_size;
u8 *eff_buf;
size_t eff_size;
u8 *in_buf;
size_t in_size;
u8 *in_scratch_buf;
size_t in_scratch_size;
u8 *ex_buf;
size_t ex_size;
} afl_state_t; } afl_state_t;
/* A global pointer to all instances is needed (for now) for signals to arrive /* A global pointer to all instances is needed (for now) for signals to arrive

View File

@ -35,6 +35,9 @@
#include "types.h" #include "types.h"
#include "debug.h" #include "debug.h"
/* Initial size used for ck_maybe_grow */
#define INITIAL_GROWTH_SIZE (64)
// Be careful! _WANT_ORIGINAL_AFL_ALLOC is not compatible with custom mutators // Be careful! _WANT_ORIGINAL_AFL_ALLOC is not compatible with custom mutators
#ifndef _WANT_ORIGINAL_AFL_ALLOC #ifndef _WANT_ORIGINAL_AFL_ALLOC
@ -764,5 +767,42 @@ static inline void TRK_ck_free(void *ptr, const char *file, const char *func,
#endif /* _WANT_ORIGINAL_AFL_ALLOC */ #endif /* _WANT_ORIGINAL_AFL_ALLOC */
/* This function makes sure *size is > size_needed after call.
It will realloc *buf otherwise.
*size will grow exponentially as per:
https://blog.mozilla.org/nnethercote/2014/11/04/please-grow-your-buffers-exponentially/
Will FATAL if size_needed is <1 or *size is negative.
@return For convenience, this function returns *buf.
*/
static inline void *ck_maybe_grow(void **buf, size_t *size, size_t size_needed) {
/* Oops. found a bug? */
if (unlikely(size_needed < 1)) FATAL("cannot grow to non-positive size");
/* No need to realloc */
if (likely(*size >= size_needed)) return *buf;
if (unlikely(*size < 0)) FATAL("Negative size detected!");
/* No inital size was set */
if (*size == 0) *size = INITIAL_GROWTH_SIZE;
while (*size < size_needed) {
*size *= 2;
}
*buf = ck_realloc(*buf, *size);
return *buf;
}
/* Swaps buf1 ptr and buf2 ptr, as well as their sizes */
static inline void swap_bufs(void **buf1, size_t *size1, void **buf2, size_t *size2) {
void *scratch_buf = *buf1;
size_t scratch_size = *size1;
*buf1 = *buf2;
*size1 = *size2;
*buf2 = scratch_buf;
*size2 = scratch_size;
}
#undef INITIAL_GROWTH_SIZE
#endif /* ! _HAVE_ALLOC_INL_H */ #endif /* ! _HAVE_ALLOC_INL_H */

View File

@ -79,6 +79,8 @@ list_t afl_states = {.element_prealloc_count = 0};
void afl_state_init(afl_state_t *afl) { void afl_state_init(afl_state_t *afl) {
/* thanks to this memset, growing vars like out_buf
and out_size are NULL/0 by default. */
memset(afl, 0, sizeof(afl_state_t)); memset(afl, 0, sizeof(afl_state_t));
afl->w_init = 0.9; afl->w_init = 0.9;

View File

@ -347,6 +347,9 @@ u8 fuzz_one_original(afl_state_t *afl) {
u8 a_collect[MAX_AUTO_EXTRA]; u8 a_collect[MAX_AUTO_EXTRA];
u32 a_len = 0; u32 a_len = 0;
/* Not pretty, but saves a lot of writing */
#define BUF_PARAMS(name) (void **)&afl->name##_buf, &afl->name##_size
#ifdef IGNORE_FINDS #ifdef IGNORE_FINDS
/* In IGNORE_FINDS mode, skip any entries that weren't in the /* In IGNORE_FINDS mode, skip any entries that weren't in the
@ -427,7 +430,7 @@ u8 fuzz_one_original(afl_state_t *afl) {
single byte anyway, so it wouldn't give us any performance or memory usage single byte anyway, so it wouldn't give us any performance or memory usage
benefits. */ benefits. */
out_buf = ck_alloc_nozero(len); out_buf = ck_maybe_grow((void **)&afl->out_buf, &afl->out_size, len);
afl->subseq_tmouts = 0; afl->subseq_tmouts = 0;
@ -719,7 +722,7 @@ u8 fuzz_one_original(afl_state_t *afl) {
/* Initialize effector map for the next step (see comments below). Always /* Initialize effector map for the next step (see comments below). Always
flag first and last byte as doing something. */ flag first and last byte as doing something. */
eff_map = ck_alloc(EFF_ALEN(len)); eff_map = ck_maybe_grow(BUF_PARAMS(eff), EFF_ALEN(len));
eff_map[0] = 1; eff_map[0] = 1;
if (EFF_APOS(len - 1) != 0) { if (EFF_APOS(len - 1) != 0) {
@ -1443,7 +1446,7 @@ skip_interest:
orig_hit_cnt = new_hit_cnt; orig_hit_cnt = new_hit_cnt;
ex_tmp = ck_alloc(len + MAX_DICT_FILE); ex_tmp = ck_maybe_grow(BUF_PARAMS(ex), len + MAX_DICT_FILE);
for (i = 0; i <= len; ++i) { for (i = 0; i <= len; ++i) {
@ -1466,7 +1469,6 @@ skip_interest:
if (common_fuzz_stuff(afl, ex_tmp, len + afl->extras[j].len)) { if (common_fuzz_stuff(afl, ex_tmp, len + afl->extras[j].len)) {
ck_free(ex_tmp);
goto abandon_entry; goto abandon_entry;
} }
@ -1480,8 +1482,6 @@ skip_interest:
} }
ck_free(ex_tmp);
new_hit_cnt = afl->queued_paths + afl->unique_crashes; new_hit_cnt = afl->queued_paths + afl->unique_crashes;
afl->stage_finds[STAGE_EXTRAS_UI] += new_hit_cnt - orig_hit_cnt; afl->stage_finds[STAGE_EXTRAS_UI] += new_hit_cnt - orig_hit_cnt;
@ -1607,14 +1607,16 @@ custom_mutator_stage:
/* Read the additional testcase into a new buffer. */ /* Read the additional testcase into a new buffer. */
fd = open(target->fname, O_RDONLY); fd = open(target->fname, O_RDONLY);
if (unlikely(fd < 0)) PFATAL("Unable to open '%s'", target->fname); if (unlikely(fd < 0)) PFATAL("Unable to open '%s'", target->fname);
new_buf = ck_alloc_nozero(target->len);
new_buf = ck_maybe_grow(BUF_PARAMS(out_scratch), target->len);
ck_read(fd, new_buf, target->len, target->fname); ck_read(fd, new_buf, target->len, target->fname);
close(fd); close(fd);
// TODO: clean up this mess.
size_t mutated_size = afl->mutator->afl_custom_fuzz( size_t mutated_size = afl->mutator->afl_custom_fuzz(
afl->mutator->data, &out_buf, len, new_buf, target->len, max_seed_size); afl->mutator->data, &out_buf, len, new_buf, target->len, max_seed_size);
ck_free(new_buf); if (mutated_size > len) afl->out_size = mutated_size;
if (mutated_size > 0) { if (mutated_size > 0) {
@ -1642,7 +1644,7 @@ custom_mutator_stage:
} }
if (mutated_size < len) out_buf = ck_realloc(out_buf, len); out_buf = ck_maybe_grow(BUF_PARAMS(out), len);
memcpy(out_buf, in_buf, len); memcpy(out_buf, in_buf, len);
} }
@ -1955,7 +1957,7 @@ havoc_stage:
clone_to = rand_below(afl, temp_len); clone_to = rand_below(afl, temp_len);
new_buf = ck_alloc_nozero(temp_len + clone_len); new_buf = ck_maybe_grow((void **)&afl->out_scratch_buf, &afl->out_scratch_size, temp_len + clone_len);
/* Head */ /* Head */
@ -1975,7 +1977,8 @@ havoc_stage:
memcpy(new_buf + clone_to + clone_len, out_buf + clone_to, memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
temp_len - clone_to); temp_len - clone_to);
ck_free(out_buf);
swap_bufs((void **)&afl->out_buf, &afl->out_size, (void **)&afl->out_scratch_buf, &afl->out_scratch_size);
out_buf = new_buf; out_buf = new_buf;
temp_len += clone_len; temp_len += clone_len;
@ -2069,7 +2072,7 @@ havoc_stage:
if (temp_len + extra_len >= MAX_FILE) break; if (temp_len + extra_len >= MAX_FILE) break;
new_buf = ck_alloc_nozero(temp_len + extra_len); new_buf = ck_maybe_grow(BUF_PARAMS(out_scratch), temp_len + extra_len);
/* Head */ /* Head */
memcpy(new_buf, out_buf, insert_at); memcpy(new_buf, out_buf, insert_at);
@ -2085,7 +2088,7 @@ havoc_stage:
if (temp_len + extra_len >= MAX_FILE) break; if (temp_len + extra_len >= MAX_FILE) break;
new_buf = ck_alloc_nozero(temp_len + extra_len); new_buf = ck_maybe_grow(BUF_PARAMS(out_scratch), temp_len + extra_len);
/* Head */ /* Head */
memcpy(new_buf, out_buf, insert_at); memcpy(new_buf, out_buf, insert_at);
@ -2099,7 +2102,7 @@ havoc_stage:
memcpy(new_buf + insert_at + extra_len, out_buf + insert_at, memcpy(new_buf + insert_at + extra_len, out_buf + insert_at,
temp_len - insert_at); temp_len - insert_at);
ck_free(out_buf); swap_bufs(BUF_PARAMS(out), BUF_PARAMS(out_scratch));
out_buf = new_buf; out_buf = new_buf;
temp_len += extra_len; temp_len += extra_len;
@ -2116,7 +2119,7 @@ havoc_stage:
/* out_buf might have been mangled a bit, so let's restore it to its /* out_buf might have been mangled a bit, so let's restore it to its
original size and shape. */ original size and shape. */
if (temp_len < len) out_buf = ck_realloc(out_buf, len); out_buf = ck_maybe_grow(BUF_PARAMS(out), len);
temp_len = len; temp_len = len;
memcpy(out_buf, in_buf, len); memcpy(out_buf, in_buf, len);
@ -2178,7 +2181,6 @@ retry_splicing:
if (in_buf != orig_in) { if (in_buf != orig_in) {
ck_free(in_buf);
in_buf = orig_in; in_buf = orig_in;
len = afl->queue_cur->len; len = afl->queue_cur->len;
@ -2222,7 +2224,7 @@ retry_splicing:
if (unlikely(fd < 0)) PFATAL("Unable to open '%s'", target->fname); if (unlikely(fd < 0)) PFATAL("Unable to open '%s'", target->fname);
new_buf = ck_alloc_nozero(target->len); new_buf = ck_maybe_grow(BUF_PARAMS(in_scratch), target->len);
ck_read(fd, new_buf, target->len, target->fname); ck_read(fd, new_buf, target->len, target->fname);
@ -2236,7 +2238,6 @@ retry_splicing:
if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) { if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) {
ck_free(new_buf);
goto retry_splicing; goto retry_splicing;
} }
@ -2249,10 +2250,10 @@ retry_splicing:
len = target->len; len = target->len;
memcpy(new_buf, in_buf, split_at); memcpy(new_buf, in_buf, split_at);
swap_bufs(BUF_PARAMS(in), BUF_PARAMS(in_scratch));
in_buf = new_buf; in_buf = new_buf;
ck_free(out_buf); out_buf = ck_maybe_grow(BUF_PARAMS(out), len);
out_buf = ck_alloc_nozero(len);
memcpy(out_buf, in_buf, len); memcpy(out_buf, in_buf, len);
goto custom_mutator_stage; goto custom_mutator_stage;
@ -2280,12 +2281,14 @@ radamsa_stage:
orig_hit_cnt = afl->queued_paths + afl->unique_crashes; orig_hit_cnt = afl->queued_paths + afl->unique_crashes;
/* Read the additional testcase into a new buffer. */ /* Read the additional testcase.
u8 *save_buf = ck_alloc_nozero(len); We'll reuse in_scratch, as it is free at this point.
*/
u8 *save_buf = ck_maybe_grow(BUF_PARAMS(in_scratch), len);
memcpy(save_buf, out_buf, len); memcpy(save_buf, out_buf, len);
u32 max_len = len + choose_block_len(afl, HAVOC_BLK_XL); u32 max_len = len + choose_block_len(afl, HAVOC_BLK_XL);
u8 *new_buf = ck_alloc_nozero(max_len); u8 *new_buf = ck_maybe_grow(BUF_PARAMS(out_scratch), max_len);
u8 *tmp_buf; u8 *tmp_buf;
for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) { for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
@ -2307,17 +2310,12 @@ radamsa_stage:
if (common_fuzz_stuff(afl, tmp_buf, temp_len)) { if (common_fuzz_stuff(afl, tmp_buf, temp_len)) {
ck_free(save_buf);
ck_free(new_buf);
goto abandon_entry; goto abandon_entry;
} }
} }
ck_free(save_buf);
ck_free(new_buf);
new_hit_cnt = afl->queued_paths + afl->unique_crashes; new_hit_cnt = afl->queued_paths + afl->unique_crashes;
afl->stage_finds[STAGE_RADAMSA] += new_hit_cnt - orig_hit_cnt; afl->stage_finds[STAGE_RADAMSA] += new_hit_cnt - orig_hit_cnt;
@ -2347,10 +2345,6 @@ abandon_entry:
munmap(orig_in, afl->queue_cur->len); munmap(orig_in, afl->queue_cur->len);
if (in_buf != orig_in) ck_free(in_buf);
ck_free(out_buf);
ck_free(eff_map);
return ret_val; return ret_val;
#undef FLIP_BIT #undef FLIP_BIT
@ -2449,7 +2443,7 @@ u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) {
single byte anyway, so it wouldn't give us any performance or memory usage single byte anyway, so it wouldn't give us any performance or memory usage
benefits. */ benefits. */
out_buf = ck_alloc_nozero(len); out_buf = ck_maybe_grow(BUF_PARAMS(out), len);
afl->subseq_tmouts = 0; afl->subseq_tmouts = 0;
@ -2728,7 +2722,7 @@ u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) {
/* Initialize effector map for the next step (see comments below). Always /* Initialize effector map for the next step (see comments below). Always
flag first and last byte as doing something. */ flag first and last byte as doing something. */
eff_map = ck_alloc(EFF_ALEN(len)); eff_map = ck_maybe_grow(BUF_PARAMS(eff), EFF_ALEN(len));
eff_map[0] = 1; eff_map[0] = 1;
if (EFF_APOS(len - 1) != 0) { if (EFF_APOS(len - 1) != 0) {
@ -3452,7 +3446,7 @@ skip_interest:
orig_hit_cnt = new_hit_cnt; orig_hit_cnt = new_hit_cnt;
ex_tmp = ck_alloc(len + MAX_DICT_FILE); ex_tmp = ck_maybe_grow(BUF_PARAMS(ex), len + MAX_DICT_FILE);
for (i = 0; i <= len; ++i) { for (i = 0; i <= len; ++i) {
@ -3475,7 +3469,6 @@ skip_interest:
if (common_fuzz_stuff(afl, ex_tmp, len + afl->extras[j].len)) { if (common_fuzz_stuff(afl, ex_tmp, len + afl->extras[j].len)) {
ck_free(ex_tmp);
goto abandon_entry; goto abandon_entry;
} }
@ -3489,8 +3482,6 @@ skip_interest:
} /* for i = 0; i <= len */ } /* for i = 0; i <= len */
ck_free(ex_tmp);
new_hit_cnt = afl->queued_paths + afl->unique_crashes; new_hit_cnt = afl->queued_paths + afl->unique_crashes;
afl->stage_finds[STAGE_EXTRAS_UI] += new_hit_cnt - orig_hit_cnt; afl->stage_finds[STAGE_EXTRAS_UI] += new_hit_cnt - orig_hit_cnt;
@ -3894,7 +3885,7 @@ pacemaker_fuzzing:
clone_to = rand_below(afl, temp_len); clone_to = rand_below(afl, temp_len);
new_buf = ck_alloc_nozero(temp_len + clone_len); new_buf = ck_maybe_grow(BUF_PARAMS(out_scratch), temp_len + clone_len);
/* Head */ /* Head */
@ -3915,7 +3906,7 @@ pacemaker_fuzzing:
memcpy(new_buf + clone_to + clone_len, out_buf + clone_to, memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
temp_len - clone_to); temp_len - clone_to);
ck_free(out_buf); swap_bufs(BUF_PARAMS(out), BUF_PARAMS(out_scratch));
out_buf = new_buf; out_buf = new_buf;
temp_len += clone_len; temp_len += clone_len;
MOpt_globals.cycles_v2[STAGE_Clone75] += 1; MOpt_globals.cycles_v2[STAGE_Clone75] += 1;
@ -3968,7 +3959,7 @@ pacemaker_fuzzing:
/* out_buf might have been mangled a bit, so let's restore it to its /* out_buf might have been mangled a bit, so let's restore it to its
original size and shape. */ original size and shape. */
if (temp_len < len) out_buf = ck_realloc(out_buf, len); out_buf = ck_maybe_grow(BUF_PARAMS(out), len);
temp_len = len; temp_len = len;
memcpy(out_buf, in_buf, len); memcpy(out_buf, in_buf, len);
@ -4046,7 +4037,6 @@ pacemaker_fuzzing:
if (in_buf != orig_in) { if (in_buf != orig_in) {
ck_free(in_buf);
in_buf = orig_in; in_buf = orig_in;
len = afl->queue_cur->len; len = afl->queue_cur->len;
@ -4091,7 +4081,7 @@ pacemaker_fuzzing:
if (fd < 0) PFATAL("Unable to open '%s'", target->fname); if (fd < 0) PFATAL("Unable to open '%s'", target->fname);
new_buf = ck_alloc_nozero(target->len); new_buf = ck_maybe_grow(BUF_PARAMS(in_scratch), target->len);
ck_read(fd, new_buf, target->len, target->fname); ck_read(fd, new_buf, target->len, target->fname);
@ -4105,7 +4095,6 @@ pacemaker_fuzzing:
if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) { if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) {
ck_free(new_buf);
goto retry_splicing_puppet; goto retry_splicing_puppet;
} }
@ -4118,9 +4107,9 @@ pacemaker_fuzzing:
len = target->len; len = target->len;
memcpy(new_buf, in_buf, split_at); memcpy(new_buf, in_buf, split_at);
swap_bufs(BUF_PARAMS(in), BUF_PARAMS(in_scratch));
in_buf = new_buf; in_buf = new_buf;
ck_free(out_buf); out_buf = ck_maybe_grow(BUF_PARAMS(out), len);
out_buf = ck_alloc_nozero(len);
memcpy(out_buf, in_buf, len); memcpy(out_buf, in_buf, len);
goto havoc_stage_puppet; goto havoc_stage_puppet;
@ -4155,10 +4144,6 @@ pacemaker_fuzzing:
munmap(orig_in, afl->queue_cur->len); munmap(orig_in, afl->queue_cur->len);
if (in_buf != orig_in) ck_free(in_buf);
ck_free(out_buf);
ck_free(eff_map);
if (afl->key_puppet == 1) { if (afl->key_puppet == 1) {
if (unlikely( if (unlikely(
@ -4380,18 +4365,13 @@ u8 fuzz_one(afl_state_t *afl) {
int key_val_lv = 0; int key_val_lv = 0;
#ifdef _AFL_DOCUMENT_MUTATIONS #ifdef _AFL_DOCUMENT_MUTATIONS
u8 path_buf[PATH_MAX];
if (afl->do_document == 0) { if (afl->do_document == 0) {
char *fn = alloc_printf("%s/mutations", afl->out_dir); snprintf(path_buf, PATH_MAX, "%s/mutations", afl->out_dir);
if (fn) { afl->do_document = mkdir(path_buf, 0700); // if it exists we do not care
afl->do_document = mkdir(fn, 0700); // if it exists we do not care
afl->do_document = 1; afl->do_document = 1;
ck_free(fn);
} else
PFATAL("malloc()");
} else { } else {
@ -4419,5 +4399,8 @@ u8 fuzz_one(afl_state_t *afl) {
return key_val_lv; return key_val_lv;
#undef BUF_PARAMS
} }