mirror of
https://github.com/AFLplusplus/AFLplusplus.git
synced 2025-06-15 03:18:07 +00:00
Merge branch 'debug' into dev
This commit is contained in:
@ -458,28 +458,171 @@ u8 fuzz_one_original(afl_state_t *afl) {
|
||||
|
||||
}
|
||||
|
||||
/* Map the test case into memory. */
|
||||
u32 tmp_val = 0;
|
||||
|
||||
fd = open(afl->queue_cur->fname, O_RDONLY);
|
||||
if (unlikely(afl->taint_mode)) {
|
||||
|
||||
if (unlikely(fd < 0)) {
|
||||
tmp_val = afl->queue_cycle % 2; // starts with 1
|
||||
ret_val = 0;
|
||||
|
||||
PFATAL("Unable to open '%s'", afl->queue_cur->fname);
|
||||
if (unlikely(afl->queue_cur->cal_failed && !tmp_val)) goto abandon_entry;
|
||||
if (unlikely(!afl->skip_deterministic && !afl->queue_cur->passed_det &&
|
||||
!tmp_val))
|
||||
goto abandon_entry;
|
||||
if ((!afl->queue_cur->taint_bytes_new ||
|
||||
afl->queue_cur->taint_bytes_new == afl->queue_cur->len) &&
|
||||
!tmp_val)
|
||||
goto abandon_entry;
|
||||
|
||||
ret_val = 1;
|
||||
|
||||
s32 dst = 0, i;
|
||||
temp_len = len = afl->queue_cur->len;
|
||||
s32 j = 0; // tmp
|
||||
|
||||
fd = open(afl->queue_cur->fname, O_RDONLY);
|
||||
afl->taint_src = mmap(0, len, PROT_READ | PROT_WRITE, MAP_PRIVATE, fd, 0);
|
||||
if (fd < 0 || (ssize_t)afl->taint_src == -1)
|
||||
FATAL("unable to open '%s'", afl->queue_cur->fname);
|
||||
close(fd);
|
||||
afl->taint_needs_splode = 1;
|
||||
|
||||
switch (tmp_val) {
|
||||
|
||||
case 1: // fuzz only tainted bytes
|
||||
|
||||
// special case: all or nothing tainted. in this case we act like
|
||||
// nothing is special. this is not the taint you are looking for ...
|
||||
if (!afl->queue_cur->taint_bytes_all ||
|
||||
afl->queue_cur->taint_bytes_all == (u32)len) {
|
||||
|
||||
orig_in = in_buf = afl->taint_src;
|
||||
afl->taint_needs_splode = 0;
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
fd = open(afl->taint_input_file, O_RDONLY);
|
||||
temp_len = len = afl->taint_len = afl->queue_cur->taint_bytes_all;
|
||||
orig_in = in_buf =
|
||||
mmap(0, len >= MAX_FILE - 65536 ? MAX_FILE : len + 65536,
|
||||
PROT_READ | PROT_WRITE, MAP_PRIVATE, fd, 0);
|
||||
if (fd < 0 || (ssize_t)in_buf == -1)
|
||||
FATAL("unable to open '%s'", afl->taint_input_file);
|
||||
close(fd);
|
||||
|
||||
fd = open(afl->queue_cur->fname_taint, O_RDONLY);
|
||||
afl->taint_map = mmap(0, afl->queue_cur->len, PROT_READ | PROT_WRITE,
|
||||
MAP_PRIVATE, fd, 0);
|
||||
if (fd < 0 || (ssize_t)in_buf == -1)
|
||||
FATAL("unable to open '%s'", afl->queue_cur->fname_taint);
|
||||
close(fd);
|
||||
|
||||
for (i = 0; i < (s32)afl->queue_cur->len && dst < len; i++)
|
||||
if (afl->taint_map[i]) in_buf[dst++] = afl->taint_src[i];
|
||||
|
||||
// FIXME DEBUG TODO XXX
|
||||
for (i = 0; i < (s32)afl->queue_cur->len; i++) {
|
||||
|
||||
switch (afl->taint_map[i]) {
|
||||
|
||||
case 0x0:
|
||||
break;
|
||||
case '!':
|
||||
j++;
|
||||
break;
|
||||
default:
|
||||
FATAL(
|
||||
"invalid taint map entry byte 0x%02x at position %d "
|
||||
"(passed_det:%d)\n",
|
||||
afl->taint_map[i], i, afl->queue_cur->passed_det);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (j != len)
|
||||
FATAL("different taint values in map vs in queue (%d != %d)", j, len);
|
||||
|
||||
break;
|
||||
|
||||
case 0: // fuzz only newly tainted bytes
|
||||
|
||||
fd = open(afl->taint_input_file, O_RDONLY);
|
||||
temp_len = len = afl->taint_len = afl->queue_cur->taint_bytes_new;
|
||||
orig_in = in_buf =
|
||||
mmap(0, len >= MAX_FILE - 65536 ? MAX_FILE : len + 65536,
|
||||
PROT_READ | PROT_WRITE, MAP_PRIVATE, fd, 0);
|
||||
if (fd < 0 || (ssize_t)in_buf == -1)
|
||||
FATAL("unable to open '%s'", afl->taint_input_file);
|
||||
close(fd);
|
||||
|
||||
u8 *fn = alloc_printf("%s.new", afl->queue_cur->fname_taint);
|
||||
if (!fn) FATAL("OOM");
|
||||
fd = open(fn, O_RDWR);
|
||||
afl->taint_map = mmap(0, afl->queue_cur->len, PROT_READ | PROT_WRITE,
|
||||
MAP_PRIVATE, fd, 0);
|
||||
if (fd < 0 || (ssize_t)in_buf == -1)
|
||||
FATAL("unable to open '%s' for %u bytes", fn, len);
|
||||
close(fd);
|
||||
ck_free(fn);
|
||||
|
||||
for (i = 0; i < (s32)afl->queue_cur->len && dst < len; i++)
|
||||
if (afl->taint_map[i]) in_buf[dst++] = afl->taint_src[i];
|
||||
|
||||
// FIXME DEBUG TODO XXX
|
||||
for (i = 0; i < (s32)afl->queue_cur->len; i++) {
|
||||
|
||||
switch (afl->taint_map[i]) {
|
||||
|
||||
case 0x0:
|
||||
break;
|
||||
case '!':
|
||||
j++;
|
||||
break;
|
||||
default:
|
||||
FATAL(
|
||||
"invalid taint map entry byte 0x%02x at position %d "
|
||||
"(passed_det:%d)\n",
|
||||
afl->taint_map[i], i, afl->queue_cur->passed_det);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (j != len)
|
||||
FATAL("different taint values in map vs in queue (%d != %d)", j, len);
|
||||
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
/* Map the test case into memory. */
|
||||
|
||||
fd = open(afl->queue_cur->fname, O_RDONLY);
|
||||
|
||||
if (unlikely(fd < 0)) {
|
||||
|
||||
PFATAL("Unable to open '%s'", afl->queue_cur->fname);
|
||||
|
||||
}
|
||||
|
||||
len = afl->queue_cur->len;
|
||||
|
||||
orig_in = in_buf = mmap(0, len, PROT_READ | PROT_WRITE, MAP_PRIVATE, fd, 0);
|
||||
|
||||
if (unlikely(orig_in == MAP_FAILED)) {
|
||||
|
||||
PFATAL("Unable to mmap '%s' with len %d", afl->queue_cur->fname, len);
|
||||
|
||||
}
|
||||
|
||||
close(fd);
|
||||
|
||||
}
|
||||
|
||||
len = afl->queue_cur->len;
|
||||
|
||||
orig_in = in_buf = mmap(0, len, PROT_READ | PROT_WRITE, MAP_PRIVATE, fd, 0);
|
||||
|
||||
if (unlikely(orig_in == MAP_FAILED)) {
|
||||
|
||||
PFATAL("Unable to mmap '%s' with len %d", afl->queue_cur->fname, len);
|
||||
|
||||
}
|
||||
|
||||
close(fd);
|
||||
|
||||
/* We could mmap() out_buf as MAP_PRIVATE, but we end up clobbering every
|
||||
single byte anyway, so it wouldn't give us any performance or memory usage
|
||||
benefits. */
|
||||
@ -502,8 +645,12 @@ u8 fuzz_one_original(afl_state_t *afl) {
|
||||
|
||||
afl->queue_cur->exec_cksum = 0;
|
||||
|
||||
res =
|
||||
calibrate_case(afl, afl->queue_cur, in_buf, afl->queue_cycle - 1, 0);
|
||||
if (unlikely(afl->taint_needs_splode))
|
||||
res = calibrate_case(afl, afl->queue_cur, afl->taint_src,
|
||||
afl->queue_cycle - 1, 0);
|
||||
else
|
||||
res = calibrate_case(afl, afl->queue_cur, in_buf, afl->queue_cycle - 1,
|
||||
0);
|
||||
|
||||
if (unlikely(res == FSRV_RUN_ERROR)) {
|
||||
|
||||
@ -526,8 +673,8 @@ u8 fuzz_one_original(afl_state_t *afl) {
|
||||
* TRIMMING *
|
||||
************/
|
||||
|
||||
if (!afl->non_instrumented_mode && !afl->queue_cur->trim_done &&
|
||||
!afl->disable_trim) {
|
||||
if (unlikely(!afl->non_instrumented_mode && !afl->queue_cur->trim_done &&
|
||||
!afl->disable_trim && !afl->taint_needs_splode)) {
|
||||
|
||||
u8 res = trim_case(afl, afl->queue_cur, in_buf);
|
||||
|
||||
@ -564,13 +711,26 @@ u8 fuzz_one_original(afl_state_t *afl) {
|
||||
|
||||
if (afl->shm.cmplog_mode && !afl->queue_cur->fully_colorized) {
|
||||
|
||||
if (input_to_state_stage(afl, in_buf, out_buf, len,
|
||||
afl->queue_cur->exec_cksum)) {
|
||||
int res;
|
||||
if (unlikely(afl->taint_needs_splode)) {
|
||||
|
||||
goto abandon_entry;
|
||||
len = afl->queue_cur->len;
|
||||
memcpy(out_buf, afl->taint_src, len);
|
||||
res = input_to_state_stage(afl, afl->taint_src, out_buf, len,
|
||||
afl->queue_cur->exec_cksum);
|
||||
// just abandon as success
|
||||
ret_val = 0;
|
||||
res = 1;
|
||||
|
||||
} else {
|
||||
|
||||
res = input_to_state_stage(afl, in_buf, out_buf, len,
|
||||
afl->queue_cur->exec_cksum);
|
||||
|
||||
}
|
||||
|
||||
if (unlikely(res)) { goto abandon_entry; }
|
||||
|
||||
}
|
||||
|
||||
/* Skip right away if -d is given, if it has not been chosen sufficiently
|
||||
@ -2133,8 +2293,18 @@ havoc_stage:
|
||||
|
||||
if (actually_clone) {
|
||||
|
||||
clone_len = choose_block_len(afl, temp_len);
|
||||
clone_from = rand_below(afl, temp_len - clone_len + 1);
|
||||
if (unlikely(afl->taint_needs_splode)) {
|
||||
|
||||
clone_len = choose_block_len(afl, afl->queue_cur->len);
|
||||
clone_from =
|
||||
rand_below(afl, afl->queue_cur->len - clone_len + 1);
|
||||
|
||||
} else {
|
||||
|
||||
clone_len = choose_block_len(afl, temp_len);
|
||||
clone_from = rand_below(afl, temp_len - clone_len + 1);
|
||||
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
@ -2156,7 +2326,11 @@ havoc_stage:
|
||||
|
||||
if (actually_clone) {
|
||||
|
||||
memcpy(new_buf + clone_to, out_buf + clone_from, clone_len);
|
||||
if (unlikely(afl->taint_needs_splode))
|
||||
memcpy(new_buf + clone_to, afl->taint_src + clone_from,
|
||||
clone_len);
|
||||
else
|
||||
memcpy(new_buf + clone_to, out_buf + clone_from, clone_len);
|
||||
|
||||
} else {
|
||||
|
||||
@ -2168,7 +2342,7 @@ havoc_stage:
|
||||
}
|
||||
|
||||
/* Tail */
|
||||
memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
|
||||
memmove(new_buf + clone_to + clone_len, out_buf + clone_to,
|
||||
temp_len - clone_to);
|
||||
|
||||
swap_bufs(BUF_PARAMS(out), BUF_PARAMS(out_scratch));
|
||||
@ -2189,16 +2363,49 @@ havoc_stage:
|
||||
|
||||
if (temp_len < 2) { break; }
|
||||
|
||||
copy_len = choose_block_len(afl, temp_len - 1);
|
||||
if (unlikely(afl->taint_needs_splode)) {
|
||||
|
||||
copy_from = rand_below(afl, temp_len - copy_len + 1);
|
||||
copy_to = rand_below(afl, temp_len - copy_len + 1);
|
||||
copy_len = choose_block_len(afl, afl->queue_cur->len - 1);
|
||||
copy_from = rand_below(afl, afl->queue_cur->len - copy_len + 1);
|
||||
copy_to = rand_below(afl, temp_len + 1);
|
||||
|
||||
} else {
|
||||
|
||||
copy_len = choose_block_len(afl, temp_len - 1);
|
||||
copy_from = rand_below(afl, temp_len - copy_len + 1);
|
||||
copy_to = rand_below(afl, temp_len - copy_len + 1);
|
||||
|
||||
}
|
||||
|
||||
if (rand_below(afl, 4)) {
|
||||
|
||||
if (copy_from != copy_to) {
|
||||
|
||||
memmove(out_buf + copy_to, out_buf + copy_from, copy_len);
|
||||
if (unlikely(afl->taint_needs_splode)) {
|
||||
|
||||
if (temp_len >= (s32)(copy_to + copy_len)) {
|
||||
|
||||
memcpy(out_buf + copy_to, afl->taint_src + copy_from,
|
||||
copy_len);
|
||||
|
||||
} else {
|
||||
|
||||
u8 *new_buf = ck_maybe_grow(BUF_PARAMS(out_scratch),
|
||||
copy_to + copy_len);
|
||||
memcpy(new_buf, in_buf, copy_to);
|
||||
memcpy(new_buf + copy_to, afl->taint_src + copy_from,
|
||||
copy_len);
|
||||
swap_bufs(BUF_PARAMS(out), BUF_PARAMS(out_scratch));
|
||||
out_buf = new_buf;
|
||||
temp_len = copy_to + copy_len;
|
||||
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
memmove(out_buf + copy_to, out_buf + copy_from, copy_len);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -2464,10 +2671,17 @@ havoc_stage:
|
||||
splices them together at some offset, then relies on the havoc
|
||||
code to mutate that blob. */
|
||||
|
||||
u32 saved_len;
|
||||
|
||||
if (unlikely(afl->taint_needs_splode))
|
||||
saved_len = afl->taint_len;
|
||||
else
|
||||
saved_len = afl->queue_cur->len;
|
||||
|
||||
retry_splicing:
|
||||
|
||||
if (afl->use_splicing && splice_cycle++ < SPLICE_CYCLES &&
|
||||
afl->queued_paths > 1 && afl->queue_cur->len > 1) {
|
||||
afl->queued_paths > 1 && saved_len > 1) {
|
||||
|
||||
struct queue_entry *target;
|
||||
u32 tid, split_at;
|
||||
@ -2480,7 +2694,7 @@ retry_splicing:
|
||||
if (in_buf != orig_in) {
|
||||
|
||||
in_buf = orig_in;
|
||||
len = afl->queue_cur->len;
|
||||
len = saved_len;
|
||||
|
||||
}
|
||||
|
||||
@ -2551,6 +2765,8 @@ retry_splicing:
|
||||
|
||||
ret_val = 0;
|
||||
|
||||
goto abandon_entry;
|
||||
|
||||
/* we are through with this queue entry - for this iteration */
|
||||
abandon_entry:
|
||||
|
||||
@ -2570,7 +2786,17 @@ abandon_entry:
|
||||
|
||||
++afl->queue_cur->fuzz_level;
|
||||
|
||||
munmap(orig_in, afl->queue_cur->len);
|
||||
if (unlikely(afl->taint_needs_splode)) {
|
||||
|
||||
munmap(afl->taint_src, afl->queue_cur->len);
|
||||
munmap(orig_in, afl->taint_len);
|
||||
munmap(afl->taint_map, afl->queue_cur->len);
|
||||
|
||||
} else {
|
||||
|
||||
munmap(orig_in, afl->queue_cur->len);
|
||||
|
||||
}
|
||||
|
||||
return ret_val;
|
||||
|
||||
|
Reference in New Issue
Block a user