/* Copyright (C) Thornwave Labs Inc - All Rights Reserved * Unauthorized copying of this file, via any medium is strictly prohibited * Proprietary and confidential * Written by Razvan Turiac <razvan.turiac@thornwave.com> */ #include <malloc.h> #define ALLOCATED_MASK ((size_t)1 << ((sizeof(size_t) << 3) - 1)) static void rtmalloc_insert_in_free_list(rtmalloc_context_t* context, rtmalloc_block_link_t* block) { if (context->free_list_head) { rtmalloc_block_link_t* prev = NULL; for(rtmalloc_block_link_t* i = context->free_list_head; i && (i < block); prev = i, i = i->next); if (prev) { //insert the block after prev block->next = prev->next; prev->next = block; //join with the next block if possible if (((uint8_t*)block + context->link_size + block->size) == (uint8_t*)block->next) { block->size += context->link_size + block->next->size; block->next = block->next->next; } //join prev with block if possible if (((uint8_t*)prev + context->link_size + prev->size) == (uint8_t*)block) { prev->size += context->link_size + block->size; prev->next = block->next; } } else { //insert the block at the beginning of the free list block->next = context->free_list_head; context->free_list_head = block; if (((uint8_t*)block + context->link_size + block->size) == (uint8_t*)block->next) { block->size += context->link_size + block->next->size; block->next = block->next->next; } } } else { block->next = NULL; context->free_list_head = block; } } bool rtmalloc_init(rtmalloc_context_t* context, void* heap, size_t size, size_t alignment) { //check if alignment is a power of 2 if (alignment & (alignment - 1)) return false; context->alignment_mask = alignment - 1; context->link_size = (sizeof(rtmalloc_block_link_t) + context->alignment_mask) & ~context->alignment_mask; //align the available memory void *aligned_heap = (void*)(((size_t)heap + context->alignment_mask) & ~context->alignment_mask); size -= ((uint8_t*)aligned_heap - (uint8_t*)heap); context->free_list_head = (rtmalloc_block_link_t*)aligned_heap; context->free_list_head->size = size - context->link_size; context->free_list_head->next = NULL; return true; } void* rtmalloc(rtmalloc_context_t *context, size_t size) { void *p = NULL; if (((size & ALLOCATED_MASK) == 0) && (size > 0)) { //calculate the size of data required size = (size + context->alignment_mask) & ~context->alignment_mask; rtmalloc_block_link_t *prev_block = NULL; rtmalloc_block_link_t *block = context->free_list_head; while(block && (block->size < size)) { prev_block = block; block = block->next; } if (block) { //get the memory pointer to return p = (uint8_t*)block + context->link_size; //remove from list if (prev_block) prev_block->next = block->next; else context->free_list_head = block->next; //split if we have enough size for another allocation if ((block->size - size) >= (2 * context->link_size)) { //split a larger block rtmalloc_block_link_t *new_block_link = (rtmalloc_block_link_t*)((uint8_t*)block + context->link_size + size); new_block_link->size = block->size - context->link_size - size; block->size = size; rtmalloc_insert_in_free_list(context, new_block_link); } block->size |= ALLOCATED_MASK; block->next = NULL; } } return p; } void rtfree(rtmalloc_context_t* context, void* p) { if (p) { rtmalloc_block_link_t* block = (rtmalloc_block_link_t*)((uint8_t*)p - context->link_size); if ((block->size & ALLOCATED_MASK) && (block->next == NULL)) { block->size &= ~ALLOCATED_MASK; rtmalloc_insert_in_free_list(context, block); } } } size_t rtmsize(rtmalloc_context_t* context, void* p) { size_t size = 0; if (p) { rtmalloc_block_link_t* block = (rtmalloc_block_link_t*)((uint8_t*)p - context->link_size); if ((block->size & ALLOCATED_MASK) && (block->next == NULL)) size = block->size & ~ALLOCATED_MASK; } return size; } /* void rtmalloc_dump(rtmalloc_context_t* context) { debug_printf("\nFree block list"); rtmalloc_block_link_t* block = context->free_list_head; while(block) { debug_printf("\n\t%p (%lu)", block, block->size); block = block->next; } debug_printf("\n\n"); } */