fix aflfast changes

This commit is contained in:
van Hauser 2020-12-08 11:07:11 +01:00
parent 2f4166d5d6
commit 46156957bd
4 changed files with 63 additions and 29 deletions

View File

@ -169,7 +169,7 @@ struct queue_entry {
u32 tc_ref; /* Trace bytes ref count */
double perf_score, /* performance score */
weight;
weight;
u8 *testcase_buf; /* The testcase buffer, if loaded. */

View File

@ -445,10 +445,13 @@ u8 fuzz_one_original(afl_state_t *afl) {
if (unlikely(afl->not_on_tty)) {
ACTF("Fuzzing test case #%u (%u total, %llu uniq crashes found, perf_score=%0.0f, exec_us=%llu, hits=%u, map=%u)...",
afl->current_entry, afl->queued_paths, afl->unique_crashes,
afl->queue_cur->perf_score, afl->queue_cur->exec_us,
afl->n_fuzz[afl->queue_cur->n_fuzz_entry], afl->queue_cur->bitmap_size);
ACTF(
"Fuzzing test case #%u (%u total, %llu uniq crashes found, "
"perf_score=%0.0f, exec_us=%llu, hits=%u, map=%u)...",
afl->current_entry, afl->queued_paths, afl->unique_crashes,
afl->queue_cur->perf_score, afl->queue_cur->exec_us,
likely(afl->n_fuzz) ? afl->n_fuzz[afl->queue_cur->n_fuzz_entry] : 0,
afl->queue_cur->bitmap_size);
fflush(stdout);
}

View File

@ -42,7 +42,8 @@ inline u32 select_next_queue_entry(afl_state_t *afl) {
}
double compute_weight(afl_state_t *afl, struct queue_entry *q, double avg_exec_us, double avg_bitmap_size) {
double compute_weight(afl_state_t *afl, struct queue_entry *q,
double avg_exec_us, double avg_bitmap_size) {
u32 hits = afl->n_fuzz[q->n_fuzz_entry];
if (hits == 0) hits = 1;
@ -55,13 +56,15 @@ double compute_weight(afl_state_t *afl, struct queue_entry *q, double avg_exec_u
if (q->favored) weight *= 5;
return weight;
}
/* create the alias table that allows weighted random selection - expensive */
void create_alias_table(afl_state_t *afl) {
u32 n = afl->queued_paths, i = 0, a, g;
u32 n = afl->queued_paths, i = 0, a, g;
double sum = 0;
afl->alias_table =
(u32 *)afl_realloc((void **)&afl->alias_table, n * sizeof(u32));
@ -80,35 +83,63 @@ void create_alias_table(afl_state_t *afl) {
memset((void *)afl->alias_table, 0, n * sizeof(u32));
memset((void *)afl->alias_probability, 0, n * sizeof(double));
double avg_exec_us = 0.0;
double avg_bitmap_size = 0.0;
for (i = 0; i < n; i++) {
if (likely(afl->schedule >= FAST && afl->schedule <= RARE)) {
struct queue_entry *q = afl->queue_buf[i];
avg_exec_us += q->exec_us;
avg_bitmap_size += log(q->bitmap_size);
double avg_exec_us = 0.0;
double avg_bitmap_size = 0.0;
for (i = 0; i < n; i++) {
}
avg_exec_us /= afl->queued_paths;
avg_bitmap_size /= afl->queued_paths;
struct queue_entry *q = afl->queue_buf[i];
avg_exec_us += q->exec_us;
avg_bitmap_size += log(q->bitmap_size);
double sum = 0;
for (i = 0; i < n; i++) {
struct queue_entry *q = afl->queue_buf[i];
if (!q->disabled) {
q->weight = compute_weight(afl, q, avg_exec_us, avg_bitmap_size);
q->perf_score = calculate_score(afl, q);
}
sum += q->weight;
avg_exec_us /= afl->queued_paths;
avg_bitmap_size /= afl->queued_paths;
for (i = 0; i < n; i++) {
struct queue_entry *q = afl->queue_buf[i];
if (!q->disabled) {
q->weight = compute_weight(afl, q, avg_exec_us, avg_bitmap_size);
q->perf_score = calculate_score(afl, q);
}
sum += q->weight;
}
for (i = 0; i < n; i++) {
P[i] = (afl->queue_buf[i]->weight * n) / sum;
}
} else {
for (i = 0; i < n; i++) {
struct queue_entry *q = afl->queue_buf[i];
if (!q->disabled) { q->perf_score = calculate_score(afl, q); }
sum += q->perf_score;
}
for (i = 0; i < n; i++) {
struct queue_entry *q = afl->queue_buf[i];
P[i] = (q->perf_score * n) / sum;
}
}
for (i = 0; i < n; i++)
P[i] = (afl->queue_buf[i]->weight * n) / sum;
int nS = 0, nL = 0, s;
for (s = (s32)n - 1; s >= 0; --s) {

View File

@ -7,7 +7,7 @@ int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size);
__attribute__((weak)) int LLVMFuzzerInitialize(int *argc, char ***argv);
#define kMaxAflInputSize (1 * 1024 * 1024);
static uint8_t AflInputBuf[kMaxAflInputSize];
static uint8_t AflInputBuf[kMaxAflInputSize];
void __attribute__((noinline)) afl_qemu_driver_stdin_input(void) {