diff --git a/include/coverage-32.h b/include/coverage-32.h index 0951531b..e26affb0 100644 --- a/include/coverage-32.h +++ b/include/coverage-32.h @@ -7,8 +7,8 @@ #define _AFL_INTSIZEVAR u32 -u32 skim(const u32 *virgin, const u32 *current, const u32 *current_end); -u32 classify_word(u32 word); +u32 skim(const u32 *virgin, const u32 *current, const u32 *current_end); +u32 classify_word(u32 word); inline u32 classify_word(u32 word) { @@ -52,8 +52,9 @@ void simplify_trace(afl_state_t *afl, u8 *bytes) { } inline void classify_counts(afl_forkserver_t *fsrv) { + u32 *mem = (u32 *)fsrv->trace_bits; - u32 i = (fsrv->map_size >> 2); + u32 i = (fsrv->map_size >> 2); while (i--) { @@ -64,6 +65,7 @@ inline void classify_counts(afl_forkserver_t *fsrv) { mem++; } + } /* Updates the virgin bits, then reflects whether a new count or a new tuple is diff --git a/include/coverage-64.h b/include/coverage-64.h index 8594c29d..7b854958 100644 --- a/include/coverage-64.h +++ b/include/coverage-64.h @@ -11,8 +11,8 @@ #include #endif -u32 skim(const u64 *virgin, const u64 *current, const u64 *current_end); -u64 classify_word(u64 word); +u32 skim(const u64 *virgin, const u64 *current, const u64 *current_end); +u64 classify_word(u64 word); inline u64 classify_word(u64 word) { @@ -62,8 +62,9 @@ void simplify_trace(afl_state_t *afl, u8 *bytes) { } inline void classify_counts(afl_forkserver_t *fsrv) { + u64 *mem = (u64 *)fsrv->trace_bits; - u32 i = (fsrv->map_size >> 3); + u32 i = (fsrv->map_size >> 3); while (i--) { @@ -74,6 +75,7 @@ inline void classify_counts(afl_forkserver_t *fsrv) { mem++; } + } /* Updates the virgin bits, then reflects whether a new count or a new tuple is diff --git a/src/afl-fuzz-bitmap.c b/src/afl-fuzz-bitmap.c index 1b43b381..6a754a40 100644 --- a/src/afl-fuzz-bitmap.c +++ b/src/afl-fuzz-bitmap.c @@ -468,6 +468,7 @@ void write_crash_readme(afl_state_t *afl) { u8 __attribute__((hot)) save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) { + u8 classified = 0; if (unlikely(len == 0)) { return 0; } @@ -554,14 +555,17 @@ u8 __attribute__((hot)) save_if_interesting(afl_state_t *afl, void *mem, if (unlikely(afl->san_binary_length) && likely(afl->san_abstraction == UNIQUE_TRACE)) { - + // If schedule is not FAST..RARE, we need to classify counts here // Note: SAND was evaluated under FAST schedule but should also work // with other scedules. if (!classified) { + classify_counts(&afl->fsrv); classified = 1; + } + cksum_unique = hash32(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST); if (unlikely(!bitmap_read(afl->n_fuzz_dup, cksum) && @@ -625,11 +629,16 @@ u8 __attribute__((hot)) save_if_interesting(afl_state_t *afl, void *mem, /* If we are in coverage increasing abstraction and have fed input to sanitizers, we are sure it has new bits.*/ if (classified) { + /* We could have classified the bits in SAND with UNIQUE_TRACE */ new_bits = has_new_bits(afl, afl->virgin_bits); + } else { + new_bits = has_new_bits_unclassified(afl, afl->virgin_bits); + } + } if (likely(!new_bits)) {