mirror of
https://github.com/genodelabs/genode.git
synced 2025-04-08 11:55:24 +00:00
allocator_avl: hand back meta data when destructed
This patch ensures that the 'Allocator_avl' releases all memory obtained from the meta-data allocator at destruction time. If allocations are still dangling, it produces a warning, hinting at possible memory leaks. Finally, it properly reverts all 'add_range' operations.
This commit is contained in:
parent
ed37c2ecff
commit
7cf40a0971
@ -16,6 +16,7 @@
|
||||
|
||||
#include <base/allocator.h>
|
||||
#include <base/tslab.h>
|
||||
#include <base/printf.h>
|
||||
#include <util/avl_tree.h>
|
||||
#include <util/misc_math.h>
|
||||
|
||||
@ -181,6 +182,8 @@ class Genode::Allocator_avl_base : public Range_allocator
|
||||
int _add_block(Block *block_metadata,
|
||||
addr_t base, size_t size, bool used);
|
||||
|
||||
Block *_find_any_used_block(Block *sub_tree);
|
||||
|
||||
/**
|
||||
* Destroy block
|
||||
*/
|
||||
@ -197,6 +200,15 @@ class Genode::Allocator_avl_base : public Range_allocator
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Clean up the allocator and detect dangling allocations
|
||||
*
|
||||
* This function is called at the destruction time of the allocator. It
|
||||
* makes sure that the allocator instance releases all memory obtained
|
||||
* from the meta-data allocator.
|
||||
*/
|
||||
void _revert_allocations_and_ranges();
|
||||
|
||||
/**
|
||||
* Find block by specified address
|
||||
*/
|
||||
@ -219,6 +231,8 @@ class Genode::Allocator_avl_base : public Range_allocator
|
||||
Allocator_avl_base(Allocator *md_alloc, size_t md_entry_size) :
|
||||
_md_alloc(md_alloc), _md_entry_size(md_entry_size) { }
|
||||
|
||||
~Allocator_avl_base() { _revert_allocations_and_ranges(); }
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
@ -319,6 +333,8 @@ class Genode::Allocator_avl_tpl : public Allocator_avl_base
|
||||
_metadata((metadata_chunk_alloc) ? metadata_chunk_alloc : this,
|
||||
(Slab_block *)&_initial_md_block) { }
|
||||
|
||||
~Allocator_avl_tpl() { _revert_allocations_and_ranges(); }
|
||||
|
||||
/**
|
||||
* Assign custom meta data to block at specified address
|
||||
*/
|
||||
|
@ -173,6 +173,28 @@ void Allocator_avl_base::_cut_from_block(Block *b, addr_t addr, size_t size,
|
||||
}
|
||||
|
||||
|
||||
void Allocator_avl_base::_revert_allocations_and_ranges()
|
||||
{
|
||||
/* revert all allocations */
|
||||
size_t dangling_allocations = 0;
|
||||
for (;; dangling_allocations++) {
|
||||
addr_t addr = 0;
|
||||
if (!any_block_addr(&addr))
|
||||
break;
|
||||
|
||||
free((void *)addr);
|
||||
}
|
||||
|
||||
if (dangling_allocations)
|
||||
PWRN("%zd dangling allocation%s at allocator destruction time",
|
||||
dangling_allocations, (dangling_allocations > 1) ? "s" : "");
|
||||
|
||||
/* remove ranges */
|
||||
while (Block *block = _addr_tree.first())
|
||||
remove_range(block->addr(), block->size());
|
||||
}
|
||||
|
||||
|
||||
int Allocator_avl_base::add_range(addr_t new_addr, size_t new_size)
|
||||
{
|
||||
Block *b;
|
||||
@ -349,12 +371,29 @@ size_t Allocator_avl_base::size_at(void const *addr) const
|
||||
}
|
||||
|
||||
|
||||
Allocator_avl_base::Block *Allocator_avl_base::_find_any_used_block(Block *sub_tree)
|
||||
{
|
||||
if (!sub_tree)
|
||||
return nullptr;
|
||||
|
||||
if (sub_tree->used())
|
||||
return sub_tree;
|
||||
|
||||
for (unsigned i = 0; i < 2; i++)
|
||||
if (Block *block = _find_any_used_block(sub_tree->child(i)))
|
||||
return block;
|
||||
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
||||
bool Allocator_avl_base::any_block_addr(addr_t *out_addr)
|
||||
{
|
||||
Block *b = _addr_tree.first();
|
||||
Block * const b = _find_any_used_block(_addr_tree.first());
|
||||
|
||||
*out_addr = b ? b->addr() : 0;
|
||||
return (b != 0);
|
||||
|
||||
return b != nullptr;
|
||||
}
|
||||
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user