40#define LOG_MODULE "HeapMem"
41#define LOG_LEVEL LOG_LEVEL_WARN
44#define HEAPMEM_DEBUG 0
51#include "lib/assert.h"
56#ifdef HEAPMEM_CONF_ARENA_SIZE
57#define HEAPMEM_ARENA_SIZE HEAPMEM_CONF_ARENA_SIZE
61#define HEAPMEM_ARENA_SIZE 1
71#ifdef HEAPMEM_CONF_SEARCH_MAX
72#define CHUNK_SEARCH_MAX HEAPMEM_CONF_SEARCH_MAX
74#define CHUNK_SEARCH_MAX 16
81#ifdef HEAPMEM_CONF_REALLOC
82#define HEAPMEM_REALLOC HEAPMEM_CONF_REALLOC
84#define HEAPMEM_REALLOC 1
91#ifdef HEAPMEM_CONF_ALIGNMENT
92#define HEAPMEM_ALIGNMENT HEAPMEM_CONF_ALIGNMENT
94#define HEAPMEM_ALIGNMENT sizeof(int)
98 (((size) + (HEAPMEM_ALIGNMENT - 1)) & ~(HEAPMEM_ALIGNMENT - 1))
101#define NEXT_CHUNK(chunk) \
102 ((chunk_t *)((char *)(chunk) + sizeof(chunk_t) + (chunk)->size))
103#define IS_LAST_CHUNK(chunk) \
104 ((char *)NEXT_CHUNK(chunk) == &heap_base[heap_usage])
108#define GET_CHUNK(ptr) \
109 ((chunk_t *)((char *)(ptr) - sizeof(chunk_t)))
110#define GET_PTR(chunk) \
111 (char *)((chunk) + 1)
114#define CHUNK_FLAG_ALLOCATED 0x1
116#define CHUNK_ALLOCATED(chunk) \
117 ((chunk)->flags & CHUNK_FLAG_ALLOCATED)
118#define CHUNK_FREE(chunk) \
119 (~(chunk)->flags & CHUNK_FLAG_ALLOCATED)
126typedef struct chunk {
139static char heap_base[HEAPMEM_ARENA_SIZE] CC_ALIGN(HEAPMEM_ALIGNMENT);
140static size_t heap_usage;
142static chunk_t *first_chunk = (chunk_t *)heap_base;
143static chunk_t *free_list;
145#define IN_HEAP(ptr) ((char *)(ptr) >= (char *)heap_base) && \
146 ((char *)(ptr) < (char *)heap_base + heap_usage)
151extend_space(
size_t size)
153 if(size > HEAPMEM_ARENA_SIZE - heap_usage) {
157 char *old_usage = &heap_base[heap_usage];
165free_chunk(chunk_t *
const chunk)
167 chunk->flags &= ~CHUNK_FLAG_ALLOCATED;
169 if(IS_LAST_CHUNK(chunk)) {
171 heap_usage -=
sizeof(chunk_t) + chunk->size;
175 chunk->next = free_list;
176 if(free_list != NULL) {
177 free_list->prev = chunk;
186remove_chunk_from_free_list(chunk_t *
const chunk)
188 if(chunk == free_list) {
189 free_list = chunk->next;
190 if(free_list != NULL) {
191 free_list->prev = NULL;
194 chunk->prev->next = chunk->next;
197 if(chunk->next != NULL) {
198 chunk->next->prev = chunk->prev;
208split_chunk(chunk_t *
const chunk,
size_t offset)
210 offset = ALIGN(offset);
212 if(offset +
sizeof(chunk_t) < chunk->size) {
213 chunk_t *new_chunk = (chunk_t *)(GET_PTR(chunk) + offset);
214 new_chunk->size = chunk->size -
sizeof(chunk_t) - offset;
215 new_chunk->flags = 0;
216 free_chunk(new_chunk);
218 chunk->size = offset;
219 chunk->next = chunk->prev = NULL;
226coalesce_chunks(chunk_t *chunk)
228 for(chunk_t *next = NEXT_CHUNK(chunk);
229 (
char *)next < &heap_base[heap_usage] && CHUNK_FREE(next);
230 next = NEXT_CHUNK(next)) {
231 chunk->size +=
sizeof(chunk_t) + next->size;
232 LOG_DBG(
"Coalesce chunk of %zu bytes\n", next->size);
233 remove_chunk_from_free_list(next);
243 int i = CHUNK_SEARCH_MAX;
244 for(chunk_t *chunk = free_list; chunk != NULL; chunk = chunk->next) {
248 coalesce_chunks(chunk);
255get_free_chunk(
const size_t size)
260 chunk_t *best = NULL;
262 int i = CHUNK_SEARCH_MAX;
263 for(chunk_t *chunk = free_list; chunk != NULL; chunk = chunk->next) {
272 if(size <= chunk->size) {
273 if(best == NULL || chunk->size < best->size) {
276 if(best->size == size) {
285 remove_chunk_from_free_list(best);
286 split_chunk(best, size);
308heapmem_alloc_debug(
size_t size,
const char *file,
const unsigned line)
314 if(size > HEAPMEM_ARENA_SIZE) {
320 chunk_t *chunk = get_free_chunk(size);
322 chunk = extend_space(
sizeof(chunk_t) + size);
329 chunk->flags = CHUNK_FLAG_ALLOCATED;
336 LOG_DBG(
"%s ptr %p size %zu\n", __func__, GET_PTR(chunk), size);
338 return GET_PTR(chunk);
355heapmem_free_debug(
void *ptr,
const char *file,
const unsigned line)
361 LOG_WARN(
"%s: ptr %p is not in the heap\n", __func__, ptr);
365 chunk_t *chunk = GET_CHUNK(ptr);
366 if(!CHUNK_ALLOCATED(chunk)) {
367 LOG_WARN(
"%s: ptr %p has already been deallocated\n", __func__, ptr);
372 LOG_DBG(
"%s: ptr %p, allocated at %s:%u\n", __func__, ptr,
373 chunk->file, chunk->line);
399heapmem_realloc_debug(
void *ptr,
size_t size,
400 const char *file,
const unsigned line)
406 LOG_WARN(
"%s: ptr %p is not in the heap\n", __func__, ptr);
411 LOG_DBG(
"%s: ptr %p size %zu at %s:%u\n",
412 __func__, ptr, size, file, line);
416 if(size > HEAPMEM_ARENA_SIZE) {
423 }
else if(size == 0) {
428 chunk_t *chunk = GET_CHUNK(ptr);
429 if(!CHUNK_ALLOCATED(chunk)) {
430 LOG_WARN(
"%s: ptr %p is not allocated\n", __func__, ptr);
440 int size_adj = size - chunk->size;
445 split_chunk(chunk, size);
450 if(IS_LAST_CHUNK(chunk)) {
456 if(extend_space(size_adj) != NULL) {
466 coalesce_chunks(chunk);
467 if(chunk->size >= size) {
470 split_chunk(chunk, size);
486 memcpy(newptr, ptr, chunk->size);
497 memset(stats, 0,
sizeof(*stats));
499 for(chunk_t *chunk = first_chunk;
500 (
char *)chunk < &heap_base[heap_usage];
501 chunk = NEXT_CHUNK(chunk)) {
502 if(CHUNK_ALLOCATED(chunk)) {
503 stats->allocated += chunk->size;
504 stats->overhead +=
sizeof(chunk_t);
506 coalesce_chunks(chunk);
507 stats->available += chunk->size;
510 stats->available += HEAPMEM_ARENA_SIZE - heap_usage;
511 stats->footprint = heap_usage;
512 stats->chunks = stats->overhead /
sizeof(chunk_t);
Default definitions of C compiler quirk work-arounds.
void * heapmem_realloc(void *ptr, size_t size)
Reallocate a chunk of memory in the heap.
void * heapmem_alloc(size_t size)
Allocate a chunk of memory in the heap.
void heapmem_stats(heapmem_stats_t *stats)
Obtain internal heapmem statistics regarding the allocated chunks.
bool heapmem_free(void *ptr)
Deallocate a chunk of memory.
Header file for the dynamic heap memory allocator.
Header file for the logging system.