minor testcache optimizations

This commit is contained in:
vanhauser-thc
2024-06-07 11:16:42 +02:00
parent 0618bfd4ae
commit fe36ceaa55
2 changed files with 106 additions and 100 deletions

View File

@ -2,7 +2,6 @@
## Must ## Must
- review: queue_testcase_store_mem and queue_testcase_get
- hardened_usercopy=0 page_alloc.shuffle=0 - hardened_usercopy=0 page_alloc.shuffle=0
- add value_profile but only enable after 15 minutes without finds - add value_profile but only enable after 15 minutes without finds
- cmplog max items env? - cmplog max items env?

View File

@ -621,6 +621,8 @@ void add_to_queue(afl_state_t *afl, u8 *fname, u32 len, u8 passed_det) {
q->trace_mini = NULL; q->trace_mini = NULL;
q->testcase_buf = NULL; q->testcase_buf = NULL;
q->mother = afl->queue_cur; q->mother = afl->queue_cur;
q->weight = 1.0;
q->perf_score = 100;
#ifdef INTROSPECTION #ifdef INTROSPECTION
q->bitsmap_size = afl->bitsmap_size; q->bitsmap_size = afl->bitsmap_size;
@ -1226,9 +1228,11 @@ inline void queue_testcase_retake(afl_state_t *afl, struct queue_entry *q,
u32 len = q->len; u32 len = q->len;
if (len != old_len) { // only realloc if necessary or useful
// (a custom trim can make the testcase larger)
if (unlikely(len > old_len || len < old_len + 1024)) {
afl->q_testcase_cache_size = afl->q_testcase_cache_size + len - old_len; afl->q_testcase_cache_size += len - old_len;
q->testcase_buf = (u8 *)realloc(q->testcase_buf, len); q->testcase_buf = (u8 *)realloc(q->testcase_buf, len);
if (unlikely(!q->testcase_buf)) { if (unlikely(!q->testcase_buf)) {
@ -1257,22 +1261,25 @@ inline void queue_testcase_retake_mem(afl_state_t *afl, struct queue_entry *q,
if (likely(q->testcase_buf)) { if (likely(q->testcase_buf)) {
u32 is_same = in == q->testcase_buf; if (likely(in != q->testcase_buf)) {
if (likely(len != old_len)) { // only realloc if we save memory
if (unlikely(len < old_len + 1024)) {
u8 *ptr = (u8 *)realloc(q->testcase_buf, len); u8 *ptr = (u8 *)realloc(q->testcase_buf, len);
if (likely(ptr)) { if (likely(ptr)) {
q->testcase_buf = ptr; q->testcase_buf = ptr;
afl->q_testcase_cache_size = afl->q_testcase_cache_size + len - old_len; afl->q_testcase_cache_size += len - old_len;
} }
} }
if (unlikely(!is_same)) { memcpy(q->testcase_buf, in, len); } memcpy(q->testcase_buf, in, len);
}
} }
@ -1283,15 +1290,19 @@ inline void queue_testcase_retake_mem(afl_state_t *afl, struct queue_entry *q,
inline u8 *queue_testcase_get(afl_state_t *afl, struct queue_entry *q) { inline u8 *queue_testcase_get(afl_state_t *afl, struct queue_entry *q) {
if (likely(q->testcase_buf)) { return q->testcase_buf; }
u32 len = q->len; u32 len = q->len;
double weight = q->weight;
/* first handle if no testcase cache is configured */ // first handle if no testcase cache is configured, or if the
// weighting of the testcase is below average.
if (unlikely(!afl->q_testcase_max_cache_size)) { if (unlikely(weight < 1.0 || !afl->q_testcase_max_cache_size)) {
u8 *buf; u8 *buf;
if (unlikely(q == afl->queue_cur)) { if (likely(q == afl->queue_cur)) {
buf = (u8 *)afl_realloc((void **)&afl->testcase_buf, len); buf = (u8 *)afl_realloc((void **)&afl->testcase_buf, len);
@ -1317,9 +1328,7 @@ inline u8 *queue_testcase_get(afl_state_t *afl, struct queue_entry *q) {
} }
/* now handle the testcase cache */ /* now handle the testcase cache and we know it is an interesting one */
if (unlikely(!q->testcase_buf)) {
/* Buf not cached, let's load it */ /* Buf not cached, let's load it */
u32 tid = afl->q_testcase_max_cache_count; u32 tid = afl->q_testcase_max_cache_count;
@ -1333,18 +1342,17 @@ inline u8 *queue_testcase_get(afl_state_t *afl, struct queue_entry *q) {
/* We want a max number of entries to the cache that we learn. /* We want a max number of entries to the cache that we learn.
Very simple: once the cache is filled by size - that is the max. */ Very simple: once the cache is filled by size - that is the max. */
if (unlikely(afl->q_testcase_cache_size + len >= if (unlikely(
afl->q_testcase_cache_size + len >=
afl->q_testcase_max_cache_size && afl->q_testcase_max_cache_size &&
(afl->q_testcase_cache_count < (afl->q_testcase_cache_count < afl->q_testcase_max_cache_entries &&
afl->q_testcase_max_cache_entries &&
afl->q_testcase_max_cache_count < afl->q_testcase_max_cache_count <
afl->q_testcase_max_cache_entries) && afl->q_testcase_max_cache_entries) &&
!do_once)) { !do_once)) {
if (afl->q_testcase_max_cache_count > afl->q_testcase_cache_count) { if (afl->q_testcase_max_cache_count > afl->q_testcase_cache_count) {
afl->q_testcase_max_cache_entries = afl->q_testcase_max_cache_entries = afl->q_testcase_max_cache_count + 1;
afl->q_testcase_max_cache_count + 1;
} else { } else {
@ -1430,8 +1438,6 @@ inline u8 *queue_testcase_get(afl_state_t *afl, struct queue_entry *q) {
} }
}
return q->testcase_buf; return q->testcase_buf;
} }
@ -1443,12 +1449,13 @@ inline void queue_testcase_store_mem(afl_state_t *afl, struct queue_entry *q,
u32 len = q->len; u32 len = q->len;
if (unlikely(afl->q_testcase_cache_size + len >= if (unlikely(q->weight < 1.0 ||
afl->q_testcase_cache_size + len >=
afl->q_testcase_max_cache_size || afl->q_testcase_max_cache_size ||
afl->q_testcase_cache_count >= afl->q_testcase_cache_count >=
afl->q_testcase_max_cache_entries - 1)) { afl->q_testcase_max_cache_entries - 1)) {
// no space? will be loaded regularly later. // no space or uninteresting? will be loaded regularly later.
return; return;
} }