diff --git a/repos/base/include/base/allocator_avl.h b/repos/base/include/base/allocator_avl.h
index e31aeedf97..2b7fab37de 100644
--- a/repos/base/include/base/allocator_avl.h
+++ b/repos/base/include/base/allocator_avl.h
@@ -16,6 +16,7 @@
#include
#include
+#include
#include
#include
@@ -181,6 +182,8 @@ class Genode::Allocator_avl_base : public Range_allocator
int _add_block(Block *block_metadata,
addr_t base, size_t size, bool used);
+ Block *_find_any_used_block(Block *sub_tree);
+
/**
* Destroy block
*/
@@ -197,6 +200,15 @@ class Genode::Allocator_avl_base : public Range_allocator
protected:
+ /**
+ * Clean up the allocator and detect dangling allocations
+ *
+ * This function is called at the destruction time of the allocator. It
+ * makes sure that the allocator instance releases all memory obtained
+ * from the meta-data allocator.
+ */
+ void _revert_allocations_and_ranges();
+
/**
* Find block by specified address
*/
@@ -219,6 +231,8 @@ class Genode::Allocator_avl_base : public Range_allocator
Allocator_avl_base(Allocator *md_alloc, size_t md_entry_size) :
_md_alloc(md_alloc), _md_entry_size(md_entry_size) { }
+ ~Allocator_avl_base() { _revert_allocations_and_ranges(); }
+
public:
/**
@@ -319,6 +333,8 @@ class Genode::Allocator_avl_tpl : public Allocator_avl_base
_metadata((metadata_chunk_alloc) ? metadata_chunk_alloc : this,
(Slab_block *)&_initial_md_block) { }
+ ~Allocator_avl_tpl() { _revert_allocations_and_ranges(); }
+
/**
* Assign custom meta data to block at specified address
*/
diff --git a/repos/base/src/base/allocator/allocator_avl.cc b/repos/base/src/base/allocator/allocator_avl.cc
index f7c61978c8..9c869e2846 100644
--- a/repos/base/src/base/allocator/allocator_avl.cc
+++ b/repos/base/src/base/allocator/allocator_avl.cc
@@ -173,6 +173,28 @@ void Allocator_avl_base::_cut_from_block(Block *b, addr_t addr, size_t size,
}
+void Allocator_avl_base::_revert_allocations_and_ranges()
+{
+ /* revert all allocations */
+ size_t dangling_allocations = 0;
+ for (;; dangling_allocations++) {
+ addr_t addr = 0;
+ if (!any_block_addr(&addr))
+ break;
+
+ free((void *)addr);
+ }
+
+ if (dangling_allocations)
+ PWRN("%zd dangling allocation%s at allocator destruction time",
+ dangling_allocations, (dangling_allocations > 1) ? "s" : "");
+
+ /* remove ranges */
+ while (Block *block = _addr_tree.first())
+ remove_range(block->addr(), block->size());
+}
+
+
int Allocator_avl_base::add_range(addr_t new_addr, size_t new_size)
{
Block *b;
@@ -349,12 +371,29 @@ size_t Allocator_avl_base::size_at(void const *addr) const
}
+Allocator_avl_base::Block *Allocator_avl_base::_find_any_used_block(Block *sub_tree)
+{
+ if (!sub_tree)
+ return nullptr;
+
+ if (sub_tree->used())
+ return sub_tree;
+
+ for (unsigned i = 0; i < 2; i++)
+ if (Block *block = _find_any_used_block(sub_tree->child(i)))
+ return block;
+
+ return nullptr;
+}
+
+
bool Allocator_avl_base::any_block_addr(addr_t *out_addr)
{
- Block *b = _addr_tree.first();
+ Block * const b = _find_any_used_block(_addr_tree.first());
*out_addr = b ? b->addr() : 0;
- return (b != 0);
+
+ return b != nullptr;
}