diff --git a/repos/base/include/base/allocator_avl.h b/repos/base/include/base/allocator_avl.h index 376b90f18f..b871002de4 100644 --- a/repos/base/include/base/allocator_avl.h +++ b/repos/base/include/base/allocator_avl.h @@ -24,7 +24,14 @@ namespace Genode { class Allocator_avl_base; - template + /* + * The default slab block size is dimensioned such that slab-block + * allocations make effective use of entire memory pages. To account for + * the common pattern of using a 'Sliced_heap' as backing store for the + * 'Allocator_avl'. We remove 8 words from the slab-block size to take the + * meta-data overhead of each sliced-heap block into account. + */ + template class Allocator_avl_tpl; /** @@ -338,6 +345,11 @@ class Genode::Allocator_avl_tpl : public Allocator_avl_base ~Allocator_avl_tpl() { _revert_allocations_and_ranges(); } + /** + * Return size of slab blocks used for meta data + */ + static constexpr size_t slab_block_size() { return SLAB_BLOCK_SIZE; } + /** * Assign custom meta data to block at specified address */ diff --git a/repos/base/include/base/heap.h b/repos/base/include/base/heap.h index ec97d00315..d462e01b09 100644 --- a/repos/base/include/base/heap.h +++ b/repos/base/include/base/heap.h @@ -38,23 +38,6 @@ class Genode::Heap : public Allocator { private: - enum { - MIN_CHUNK_SIZE = 4*1024, /* in machine words */ - MAX_CHUNK_SIZE = 256*1024, - /* - * Meta data includes the Dataspace structure and meta data of - * the AVL allocator. - */ - META_DATA_SIZE = 1024, /* in bytes */ - /* - * Allocation sizes >= this value are considered as big - * allocations, which get their own dataspace. In contrast - * to smaller allocations, this memory is released to - * the RAM session when 'free()' is called. - */ - BIG_ALLOCATION_THRESHOLD = 64*1024 /* in bytes */ - }; - class Dataspace : public List::Element { public: @@ -127,16 +110,7 @@ class Genode::Heap : public Allocator Region_map *region_map, size_t quota_limit = UNLIMITED, void *static_addr = 0, - size_t static_size = 0) - : - _alloc(nullptr), - _ds_pool(ram_session, region_map), - _quota_limit(quota_limit), _quota_used(0), - _chunk_size(MIN_CHUNK_SIZE) - { - if (static_addr) - _alloc->add_range((addr_t)static_addr, static_size); - } + size_t static_size = 0); ~Heap(); diff --git a/repos/base/src/base/heap/heap.cc b/repos/base/src/base/heap/heap.cc index 0288b4410a..1f4ce2e509 100644 --- a/repos/base/src/base/heap/heap.cc +++ b/repos/base/src/base/heap/heap.cc @@ -20,6 +20,22 @@ using namespace Genode; +namespace { + + enum { + MIN_CHUNK_SIZE = 4*1024, /* in machine words */ + MAX_CHUNK_SIZE = 256*1024, + /* + * Allocation sizes >= this value are considered as big + * allocations, which get their own dataspace. In contrast + * to smaller allocations, this memory is released to + * the RAM session when 'free()' is called. + */ + BIG_ALLOCATION_THRESHOLD = 64*1024 /* in bytes */ + }; +} + + Heap::Dataspace_pool::~Dataspace_pool() { /* free all ram_dataspaces */ @@ -154,10 +170,10 @@ bool Heap::_unsynchronized_alloc(size_t size, void **out_addr) /* * Calculate block size of needed backing store. The block must hold the * requested 'size' and we add some space for meta data - * ('Dataspace' structures, AVL nodes). + * ('Dataspace' structures, AVL-node slab blocks). * Finally, we align the size to a 4K page. */ - dataspace_size = size + META_DATA_SIZE; + dataspace_size = size + Allocator_avl::slab_block_size() + sizeof(Heap::Dataspace); /* * '_chunk_size' is a multiple of 4K, so 'dataspace_size' becomes @@ -234,6 +250,22 @@ void Heap::free(void *addr, size_t size) } +Heap::Heap(Ram_session *ram_session, + Region_map *region_map, + size_t quota_limit, + void *static_addr, + size_t static_size) +: + _alloc(nullptr), + _ds_pool(ram_session, region_map), + _quota_limit(quota_limit), _quota_used(0), + _chunk_size(MIN_CHUNK_SIZE) +{ + if (static_addr) + _alloc->add_range((addr_t)static_addr, static_size); +} + + Heap::~Heap() { /* diff --git a/repos/base/src/core/include/cpu_thread_allocator.h b/repos/base/src/core/include/cpu_thread_allocator.h index 9b4d8a2657..e31f1904bf 100644 --- a/repos/base/src/core/include/cpu_thread_allocator.h +++ b/repos/base/src/core/include/cpu_thread_allocator.h @@ -16,6 +16,7 @@ /* Genode includes */ #include +#include /* base-internal includes */ #include @@ -26,8 +27,12 @@ namespace Genode /** * Allocator to manage CPU threads associated with a CPU session + * + * We take the knowledge about the used backing-store allocator (sliced + * heap) into account to make sure that slab blocks fill whole pages. */ - typedef Tslab Cpu_thread_allocator; + typedef Tslab + Cpu_thread_allocator; } #endif /* _CORE__INCLUDE__CPU_THREAD_ALLOCATOR_H_ */ diff --git a/repos/base/src/core/include/ram_session_component.h b/repos/base/src/core/include/ram_session_component.h index 82eb3ad8a1..3203d9c3e6 100644 --- a/repos/base/src/core/include/ram_session_component.h +++ b/repos/base/src/core/include/ram_session_component.h @@ -16,6 +16,7 @@ /* Genode includes */ #include +#include #include #include #include @@ -38,7 +39,11 @@ namespace Genode { class Invalid_dataspace : public Exception { }; - static constexpr size_t SBS = get_page_size(); + /* + * Dimension 'Ds_slab' such that slab blocks (including the + * meta-data overhead of the sliced-heap blocks) are page sized. + */ + static constexpr size_t SBS = get_page_size() - Sliced_heap::meta_data_size(); using Ds_slab = Synced_allocator >; diff --git a/repos/base/src/core/include/region_map_component.h b/repos/base/src/core/include/region_map_component.h index 7a57b0598b..180aa98d29 100644 --- a/repos/base/src/core/include/region_map_component.h +++ b/repos/base/src/core/include/region_map_component.h @@ -24,6 +24,7 @@ #include #include #include +#include #include #include @@ -273,7 +274,14 @@ class Genode::Region_map_component : public Rpc_object, void sub_rm(Native_capability cap) { _rm_cap = cap; } }; - Tslab _ref_slab; /* backing store for + /* + * Dimension slab allocator for regions such that backing store is + * allocated at the granularity of pages. + */ + typedef Tslab + Ref_slab; + + Ref_slab _ref_slab; /* backing store for region list */ Allocator_avl_tpl _map; /* region map for attach, detach, pagefaults */