fix save_if_interesting

The value of `classified`, `bits_new`, and `cksum`, were not always
correctly maintained.
 1. In the past, `afl->queue_top->exec_cksum` was always assigned when
    `add_to_queue`, however it became conditional since cd57641705.
    This doesn't change correctness because calibrate_case() will
    calculate the checksum. However, this mean one calibration run is
    wasted.

 2. Sometimes `classified` is set incorrectly.
    For example, this code snippet
    ```
    new_bits = has_new_bits_unclassified(afl, afl->virgin_bits);
    classified = 1;
    ```
    should be changed to
    ```
    new_bits = has_new_bits_unclassified(afl, afl->virgin_bits);
    if (new_bits) classified = 1;
    ```

This commit fixed above issues and use macros to make the code easier to
understand. This should prevent to forget to set classified in the
future (like the bug fixed by 30c93d1321).

The macros also defers the calculations to where the values are really
needed. This could save cpu if the code returns earlier. For example,
if a case is timeout first and not timeout the second time, the current
code does classify_counts, which is not always needed.
This commit is contained in:
Kuang-che Wu
2025-05-22 23:10:51 +08:00
parent ff1e0580b0
commit cee764689c
2 changed files with 70 additions and 61 deletions

View File

@ -1216,7 +1216,6 @@ u8 *describe_op(afl_state_t *, u8, size_t);
#endif
u8 save_if_interesting(afl_state_t *, void *, u32, u8);
u8 has_new_bits(afl_state_t *, u8 *);
u8 has_new_bits_unclassified(afl_state_t *, u8 *);
#ifndef AFL_SHOWMAP
void classify_counts(afl_forkserver_t *);
#endif

View File

@ -255,7 +255,8 @@ inline u8 has_new_bits(afl_state_t *afl, u8 *virgin_map) {
* on rare cases it fall backs to the slow path: classify_counts() first, then
* return has_new_bits(). */
inline u8 has_new_bits_unclassified(afl_state_t *afl, u8 *virgin_map) {
static inline u8 has_new_bits_unclassified(afl_state_t *afl, u8 *virgin_map,
bool *classified) {
/* Handle the hot path first: no new coverage */
u8 *end = afl->fsrv.trace_bits + afl->fsrv.map_size;
@ -272,6 +273,7 @@ inline u8 has_new_bits_unclassified(afl_state_t *afl, u8 *virgin_map) {
#endif /* ^WORD_SIZE_64 */
classify_counts(&afl->fsrv);
*classified = true;
return has_new_bits(afl, virgin_map);
}
@ -498,13 +500,61 @@ u8 __attribute__((hot)) save_if_interesting(afl_state_t *afl, void *mem,
u8 fn[PATH_MAX];
u8 *queue_fn = "";
u8 new_bits = 0, keeping = 0, res, is_timeout = 0, need_hash = 1;
u8 classified = 0;
u8 keeping = 0, res, is_timeout = 0;
u8 san_fault = 0, san_idx = 0, feed_san = 0;
s32 fd;
u64 cksum = 0;
u32 cksum_simplified = 0, cksum_unique = 0;
bool classified = false, bits_counted = false, cksumed = false;
u8 new_bits = 0; /* valid if bits_counted is true */
u64 cksum = 0; /* valid if cksumed is true */
#define classify_if_necessary() \
do { \
\
if (!classified) { \
\
classify_counts(&afl->fsrv); \
classified = true; \
\
} \
\
} while (0)
#define calculate_cksum_if_necessary() \
do { \
\
if (!cksumed) { \
\
classify_if_necessary(); \
cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST); \
cksumed = true; \
\
} \
\
} while (0)
#define calculate_new_bits_if_necessary() \
do { \
\
if (!bits_counted) { \
\
if (classified) { \
\
new_bits = has_new_bits(afl, afl->virgin_bits); \
\
} else { \
\
new_bits = \
has_new_bits_unclassified(afl, afl->virgin_bits, &classified); \
\
} \
bits_counted = true; \
\
} \
\
} while (0)
afl->san_case_status = 0;
/* Update path frequency. */
@ -513,11 +563,7 @@ u8 __attribute__((hot)) save_if_interesting(afl_state_t *afl, void *mem,
only be used for special schedules */
if (unlikely(afl->schedule >= FAST && afl->schedule <= RARE)) {
classify_counts(&afl->fsrv);
classified = 1;
need_hash = 0;
cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
calculate_cksum_if_necessary();
/* Saturated increment */
if (likely(afl->n_fuzz[cksum % N_FUZZ_SIZE] < 0xFFFFFFFF))
@ -551,17 +597,8 @@ u8 __attribute__((hot)) save_if_interesting(afl_state_t *afl, void *mem,
if (unlikely(afl->san_binary_length) &&
unlikely(afl->san_abstraction == COVERAGE_INCREASE)) {
if (classified) {
/* We could have classified the bits in SAND with COVERAGE_INCREASE */
new_bits = has_new_bits(afl, afl->virgin_bits);
} else {
new_bits = has_new_bits_unclassified(afl, afl->virgin_bits);
classified = 1;
}
/* Check if the input increase the coverage */
calculate_new_bits_if_necessary();
if (unlikely(new_bits)) { feed_san = 1; }
@ -570,15 +607,9 @@ u8 __attribute__((hot)) save_if_interesting(afl_state_t *afl, void *mem,
if (unlikely(afl->san_binary_length) &&
likely(afl->san_abstraction == UNIQUE_TRACE)) {
// If schedule is not FAST..RARE, we need to classify counts here
// Note: SAND was evaluated under FAST schedule but should also work
// with other scedules.
if (!classified) {
classify_counts(&afl->fsrv);
classified = 1;
}
classify_if_necessary();
cksum_unique =
hash32(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
@ -638,23 +669,7 @@ u8 __attribute__((hot)) save_if_interesting(afl_state_t *afl, void *mem,
/* Keep only if there are new bits in the map, add to queue for
future fuzzing, etc. */
if (!unlikely(afl->san_abstraction == COVERAGE_INCREASE && feed_san)) {
/* If we are in coverage increasing abstraction and have fed input to
sanitizers, we are sure it has new bits.*/
if (classified) {
/* We could have classified the bits in SAND with UNIQUE_TRACE */
new_bits = has_new_bits(afl, afl->virgin_bits);
} else {
new_bits = has_new_bits_unclassified(afl, afl->virgin_bits);
classified = 1;
}
}
calculate_new_bits_if_necessary();
if (likely(!new_bits)) {
@ -677,6 +692,11 @@ u8 __attribute__((hot)) save_if_interesting(afl_state_t *afl, void *mem,
save_to_queue:
/* these calculations are necessary because some code flow may jump here via
goto */
calculate_cksum_if_necessary();
calculate_new_bits_if_necessary();
#ifndef SIMPLE_FILES
if (!afl->afl_env.afl_sha1_filenames) {
@ -758,6 +778,8 @@ u8 __attribute__((hot)) save_if_interesting(afl_state_t *afl, void *mem,
#endif
afl->queue_top->exec_cksum = cksum;
if (new_bits == 2) {
afl->queue_top->has_new_cov = 1;
@ -765,17 +787,8 @@ u8 __attribute__((hot)) save_if_interesting(afl_state_t *afl, void *mem,
}
if (unlikely(need_hash && new_bits)) {
/* due to classify counts we have to recalculate the checksum */
afl->queue_top->exec_cksum =
hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
need_hash = 0;
}
/* For AFLFast schedules we update the new queue entry */
if (likely(cksum)) {
if (unlikely(afl->schedule >= FAST && afl->schedule <= RARE)) {
afl->queue_top->n_fuzz_entry = cksum % N_FUZZ_SIZE;
afl->n_fuzz[afl->queue_top->n_fuzz_entry] = 1;
@ -874,12 +887,9 @@ may_save_fault:
}
new_fault = fuzz_run_target(afl, &afl->fsrv, afl->hang_tmout);
if (!classified) {
classify_counts(&afl->fsrv);
classified = 1;
}
classified = false;
bits_counted = false;
cksumed = false;
/* A corner case that one user reported bumping into: increasing the
timeout actually uncovers a crash. Make sure we don't discard it if