44 #define PRINTF(...) printf(__VA_ARGS__) 46 #define HEAPMEM_DEBUG 1 51 #ifdef PROJECT_CONF_PATH 53 #include PROJECT_CONF_PATH 63 #ifdef HEAPMEM_CONF_ARENA_SIZE 64 #define HEAPMEM_ARENA_SIZE HEAPMEM_CONF_ARENA_SIZE 68 #define HEAPMEM_ARENA_SIZE 1 78 #ifdef HEAPMEM_CONF_SEARCH_MAX 79 #define CHUNK_SEARCH_MAX HEAPMEM_CONF_SEARCH_MAX 81 #define CHUNK_SEARCH_MAX 16 88 #ifdef HEAPMEM_CONF_REALLOC 89 #define HEAPMEM_REALLOC HEAPMEM_CONF_REALLOC 91 #define HEAPMEM_REALLOC 1 98 #ifdef HEAPMEM_CONF_ALIGNMENT 99 #define HEAPMEM_ALIGNMENT HEAPMEM_CONF_ALIGNMENT 101 #define HEAPMEM_ALIGNMENT sizeof(int) 104 #define ALIGN(size) \ 105 (((size) + (HEAPMEM_ALIGNMENT - 1)) & ~(HEAPMEM_ALIGNMENT - 1)) 108 #define NEXT_CHUNK(chunk) \ 109 ((chunk_t *)((char *)(chunk) + sizeof(chunk_t) + (chunk)->size)) 110 #define IS_LAST_CHUNK(chunk) \ 111 ((char *)NEXT_CHUNK(chunk) == &heap_base[heap_usage]) 115 #define GET_CHUNK(ptr) \ 116 ((chunk_t *)((char *)(ptr) - sizeof(chunk_t))) 117 #define GET_PTR(chunk) \ 118 (char *)((chunk) + 1) 121 #define CHUNK_FLAG_ALLOCATED 0x1 123 #define CHUNK_ALLOCATED(chunk) \ 124 ((chunk)->flags & CHUNK_FLAG_ALLOCATED) 125 #define CHUNK_FREE(chunk) \ 126 (~(chunk)->flags & CHUNK_FLAG_ALLOCATED) 133 typedef struct chunk {
146 static char heap_base[HEAPMEM_ARENA_SIZE];
147 static size_t heap_usage;
149 static chunk_t *first_chunk = (chunk_t *)heap_base;
150 static chunk_t *free_list;
155 extend_space(
size_t size)
159 if(heap_usage + size > HEAPMEM_ARENA_SIZE) {
163 old_usage = &heap_base[heap_usage];
171 free_chunk(chunk_t *
const chunk)
173 chunk->flags &= ~CHUNK_FLAG_ALLOCATED;
175 if(IS_LAST_CHUNK(chunk)) {
177 heap_usage -=
sizeof(chunk_t) + chunk->size;
181 chunk->next = free_list;
182 if(free_list != NULL) {
183 free_list->prev = chunk;
192 allocate_chunk(chunk_t *
const chunk)
194 chunk->flags |= CHUNK_FLAG_ALLOCATED;
196 if(chunk == free_list) {
197 free_list = chunk->next;
198 if(free_list != NULL) {
199 free_list->prev = NULL;
202 chunk->prev->next = chunk->next;
205 if(chunk->next != NULL) {
206 chunk->next->prev = chunk->prev;
216 split_chunk(chunk_t *
const chunk,
size_t offset)
220 offset = ALIGN(offset);
222 if(offset +
sizeof(chunk_t) < chunk->size) {
223 new_chunk = (chunk_t *)(GET_PTR(chunk) + offset);
224 new_chunk->size = chunk->size -
sizeof(chunk_t) - offset;
225 new_chunk->flags = 0;
226 free_chunk(new_chunk);
228 chunk->size = offset;
229 chunk->next = chunk->prev = NULL;
236 coalesce_chunks(chunk_t *chunk)
240 for(next = NEXT_CHUNK(chunk);
241 (
char *)next < &heap_base[heap_usage] && CHUNK_FREE(next);
242 next = NEXT_CHUNK(next)) {
243 chunk->size +=
sizeof(chunk_t) + next->size;
244 allocate_chunk(next);
257 i = CHUNK_SEARCH_MAX;
258 for(chunk = free_list; chunk != NULL; chunk = chunk->next) {
262 coalesce_chunks(chunk);
269 get_free_chunk(
const size_t size)
272 chunk_t *chunk, *best;
279 i = CHUNK_SEARCH_MAX;
280 for(chunk = free_list; chunk != NULL; chunk = chunk->next) {
289 if(size <= chunk->size) {
290 if(best == NULL || chunk->size < best->size) {
293 if(best->size == size) {
302 allocate_chunk(best);
303 split_chunk(best, size);
325 heapmem_alloc_debug(
size_t size,
const char *file,
const unsigned line)
334 chunk = get_free_chunk(size);
336 chunk = extend_space(
sizeof(chunk_t) + size);
343 chunk->flags = CHUNK_FLAG_ALLOCATED;
350 PRINTF(
"%s ptr %p size %lu\n", __func__, GET_PTR(chunk), (
unsigned long)size);
352 return GET_PTR(chunk);
369 heapmem_free_debug(
void *ptr,
const char *file,
const unsigned line)
377 chunk = GET_CHUNK(ptr);
379 PRINTF(
"%s ptr %p, allocated at %s:%u\n", __func__, ptr,
380 chunk->file, chunk->line);
405 heapmem_realloc_debug(
void *ptr,
size_t size,
406 const char *file,
const unsigned line)
415 PRINTF(
"%s ptr %p size %u at %s:%u\n",
416 __func__, ptr, (
unsigned)size, file, line);
421 }
else if(size == 0) {
426 chunk = GET_CHUNK(ptr);
433 size_adj = size - chunk->size;
438 split_chunk(chunk, size);
443 if(IS_LAST_CHUNK(chunk)) {
449 if(extend_space(size_adj) != NULL) {
459 coalesce_chunks(chunk);
460 if(chunk->size >= size) {
463 split_chunk(chunk, size);
479 memcpy(newptr, ptr, chunk->size);
492 memset(stats, 0,
sizeof(*stats));
494 for(chunk = first_chunk;
495 (
char *)chunk < &heap_base[heap_usage];
496 chunk = NEXT_CHUNK(chunk)) {
497 if(CHUNK_ALLOCATED(chunk)) {
498 stats->allocated += chunk->size;
500 coalesce_chunks(chunk);
501 stats->available += chunk->size;
503 stats->overhead +=
sizeof(chunk_t);
505 stats->available += HEAPMEM_ARENA_SIZE - heap_usage;
506 stats->footprint = heap_usage;
507 stats->chunks = stats->overhead /
sizeof(chunk_t);
void heapmem_free(void *ptr)
Deallocate a chunk of memory.
void * heapmem_alloc(size_t size)
Allocate a chunk of memory in the heap.
Header file for the dynamic heap memory allocator.
void heapmem_stats(heapmem_stats_t *stats)
Obtain internal heapmem statistics regarding the allocated chunks.
void * heapmem_realloc(void *ptr, size_t size)
Reallocate a chunk of memory in the heap.