memory adjustments

This commit is contained in:
vanhauser-thc
2024-06-06 17:52:21 +02:00
parent e46c106b89
commit 477063e9ee
5 changed files with 78 additions and 79 deletions

View File

@ -2,6 +2,7 @@
## Must ## Must
- review: queue_testcase_store_mem and queue_testcase_get
- hardened_usercopy=0 page_alloc.shuffle=0 - hardened_usercopy=0 page_alloc.shuffle=0
- add value_profile but only enable after 15 minutes without finds - add value_profile but only enable after 15 minutes without finds
- cmplog max items env? - cmplog max items env?

View File

@ -23,6 +23,7 @@
- -V timing is now accurately the fuzz time (without syncing), before - -V timing is now accurately the fuzz time (without syncing), before
long calibration times and syncing could result in now fuzzing being long calibration times and syncing could result in now fuzzing being
made when the time was already run out until then, thanks to @eqv! made when the time was already run out until then, thanks to @eqv!
- make afl-fuzz use less memory with cmplog and fix a memleak
* afl-cc: * afl-cc:
- re-enable i386 support that was accidently disabled - re-enable i386 support that was accidently disabled
- fixes for LTO and outdated afl-gcc mode for i386 - fixes for LTO and outdated afl-gcc mode for i386

View File

@ -60,63 +60,6 @@ inline u32 select_next_queue_entry(afl_state_t *afl) {
} }
inline double compute_weight(afl_state_t *afl, struct queue_entry *q,
double avg_exec_us, double avg_bitmap_size,
double avg_len) {
double weight = 1.0;
if (likely(afl->schedule >= FAST && afl->schedule <= RARE)) {
u32 hits = afl->n_fuzz[q->n_fuzz_entry];
if (likely(hits)) { weight /= (log10(hits) + 1); }
}
if (likely(afl->schedule < RARE)) {
double t = q->exec_us / avg_exec_us;
if (likely(t < 0.1)) {
// nothing
} else if (likely(t <= 0.25))
weight *= 0.9;
else if (likely(t <= 0.5)) {
// nothing
} else if (likely(t < 1.0))
weight *= 1.15;
else if (unlikely(t > 2.5 && t < 5.0))
weight *= 1.1;
// else nothing
}
double l = q->len / avg_len;
if (likely(l < 0.1))
weight *= 0.75;
else if (likely(l < 0.25))
weight *= 1.1;
else if (unlikely(l >= 10))
weight *= 1.1;
double bms = q->bitmap_size / avg_bitmap_size;
if (likely(bms < 0.5))
weight *= (1.0 + ((bms - 0.5) / 2));
else if (unlikely(bms > 1.33))
weight *= 1.1;
if (unlikely(!q->was_fuzzed)) { weight *= 2.5; }
if (unlikely(q->fs_redundant)) { weight *= 0.75; }
return weight;
}
/* create the alias table that allows weighted random selection - expensive */ /* create the alias table that allows weighted random selection - expensive */
void create_alias_table(afl_state_t *afl) { void create_alias_table(afl_state_t *afl) {
@ -177,8 +120,59 @@ void create_alias_table(afl_state_t *afl) {
if (likely(!q->disabled)) { if (likely(!q->disabled)) {
q->weight = double weight = 1.0;
compute_weight(afl, q, avg_exec_us, avg_bitmap_size, avg_len); { // inline does result in a compile error with LTO, weird
if (likely(afl->schedule >= FAST && afl->schedule <= RARE)) {
u32 hits = afl->n_fuzz[q->n_fuzz_entry];
if (likely(hits)) { weight /= (log10(hits) + 1); }
}
if (likely(afl->schedule < RARE)) {
double t = q->exec_us / avg_exec_us;
if (likely(t < 0.1)) {
// nothing
} else if (likely(t <= 0.25))
weight *= 0.9;
else if (likely(t <= 0.5)) {
// nothing
} else if (likely(t < 1.0))
weight *= 1.15;
else if (unlikely(t > 2.5 && t < 5.0))
weight *= 1.1;
// else nothing
}
double l = q->len / avg_len;
if (likely(l < 0.1))
weight *= 0.75;
else if (likely(l < 0.25))
weight *= 1.1;
else if (unlikely(l >= 10))
weight *= 1.1;
double bms = q->bitmap_size / avg_bitmap_size;
if (likely(bms < 0.5))
weight *= (1.0 + ((bms - 0.5) / 2));
else if (unlikely(bms > 1.33))
weight *= 1.1;
if (unlikely(!q->was_fuzzed)) { weight *= 2.5; }
if (unlikely(q->fs_redundant)) { weight *= 0.75; }
}
q->weight = weight;
q->perf_score = calculate_score(afl, q); q->perf_score = calculate_score(afl, q);
sum += q->weight; sum += q->weight;

View File

@ -322,7 +322,7 @@ static u8 colorization(afl_state_t *afl, u8 *buf, u32 len,
memcpy(backup, buf, len); memcpy(backup, buf, len);
memcpy(changed, buf, len); memcpy(changed, buf, len);
if (afl->cmplog_random_colorization) { if (likely(afl->cmplog_random_colorization)) {
random_replace(afl, changed, len); random_replace(afl, changed, len);
@ -402,6 +402,7 @@ static u8 colorization(afl_state_t *afl, u8 *buf, u32 len,
u32 i = 1; u32 i = 1;
u32 positions = 0; u32 positions = 0;
while (i) { while (i) {
restart: restart:
@ -2996,15 +2997,16 @@ u8 input_to_state_stage(afl_state_t *afl, u8 *orig_buf, u8 *buf, u32 len) {
struct tainted *t = taint; struct tainted *t = taint;
#ifdef _DEBUG
while (t) { while (t) {
#ifdef _DEBUG
fprintf(stderr, "T: idx=%u len=%u\n", t->pos, t->len); fprintf(stderr, "T: idx=%u len=%u\n", t->pos, t->len);
#endif
t = t->next; t = t->next;
} }
#endif
#if defined(_DEBUG) || defined(CMPLOG_INTROSPECTION) #if defined(_DEBUG) || defined(CMPLOG_INTROSPECTION)
u64 start_time = get_cur_time(); u64 start_time = get_cur_time();
u32 cmp_locations = 0; u32 cmp_locations = 0;
@ -3148,7 +3150,7 @@ u8 input_to_state_stage(afl_state_t *afl, u8 *orig_buf, u8 *buf, u32 len) {
exit_its: exit_its:
if (afl->cmplog_lvl == CMPLOG_LVL_MAX) { // if (afl->cmplog_lvl == CMPLOG_LVL_MAX) {
afl->queue_cur->colorized = CMPLOG_LVL_MAX; afl->queue_cur->colorized = CMPLOG_LVL_MAX;
@ -3168,7 +3170,7 @@ exit_its:
afl->queue_cur->taint = NULL; afl->queue_cur->taint = NULL;
} else { /*} else {
afl->queue_cur->colorized = LVL2; afl->queue_cur->colorized = LVL2;
@ -3182,7 +3184,7 @@ exit_its:
} }
} }*/
#ifdef CMPLOG_COMBINE #ifdef CMPLOG_COMBINE
if (afl->queued_items + afl->saved_crashes > orig_hit_cnt + 1) { if (afl->queued_items + afl->saved_crashes > orig_hit_cnt + 1) {

View File

@ -33,15 +33,15 @@ u8 is_det_timeout(u64 cur_ms, u8 is_flip) {
u8 should_det_fuzz(afl_state_t *afl, struct queue_entry *q) { u8 should_det_fuzz(afl_state_t *afl, struct queue_entry *q) {
if (!afl->skipdet_g->virgin_det_bits) { if (unlikely(!afl->skipdet_g->virgin_det_bits)) {
afl->skipdet_g->virgin_det_bits = afl->skipdet_g->virgin_det_bits =
(u8 *)ck_alloc(sizeof(u8) * afl->fsrv.map_size); (u8 *)ck_alloc(sizeof(u8) * afl->fsrv.map_size);
} }
if (!q->favored || q->passed_det) return 0; if (likely(!q->favored || q->passed_det)) return 0;
if (!q->trace_mini) return 0; if (unlikely(!q->trace_mini)) return 0;
if (!afl->skipdet_g->last_cov_undet) if (!afl->skipdet_g->last_cov_undet)
afl->skipdet_g->last_cov_undet = get_cur_time(); afl->skipdet_g->last_cov_undet = get_cur_time();
@ -122,7 +122,8 @@ u8 skip_deterministic_stage(afl_state_t *afl, u8 *orig_buf, u8 *out_buf,
afl->stage_cur = 0; afl->stage_cur = 0;
orig_hit_cnt = afl->queued_items + afl->saved_crashes; orig_hit_cnt = afl->queued_items + afl->saved_crashes;
u8 *inf_eff_map = (u8 *)ck_alloc(sizeof(u8) * len); static u8 *inf_eff_map;
inf_eff_map = (u8 *)ck_realloc(inf_eff_map, sizeof(u8) * len);
memset(inf_eff_map, 1, sizeof(u8) * len); memset(inf_eff_map, 1, sizeof(u8) * len);
if (common_fuzz_stuff(afl, orig_buf, len)) { return 0; } if (common_fuzz_stuff(afl, orig_buf, len)) { return 0; }