Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions include/private/error.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ enum {

/* Memory Protection Errors */
ERR_STACK_CHECK, /* Stack overflow or corruption detected */
ERR_HEAP_CORRUPT, /* Heap corruption or invalid free detected */

/* IPC and Synchronization Errors */
ERR_PIPE_ALLOC, /* Pipe allocation failed */
Expand Down
1 change: 1 addition & 0 deletions kernel/error.c
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ static const struct error_code error_desc[] = {

/* stack guard */
{ERR_STACK_CHECK, "stack corruption"},
{ERR_HEAP_CORRUPT, "heap corruption or invalid free"},

/* IPC / sync */
{ERR_PIPE_ALLOC, "pipe allocation"},
Expand Down
118 changes: 87 additions & 31 deletions lib/malloc.c
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
#include <sys/task.h>
#include <types.h>

#include "private/error.h"
#include "private/utils.h"

/* Memory allocator using first-fit strategy with selective coalescing.
Expand Down Expand Up @@ -44,15 +45,21 @@ static uint32_t free_blocks_count; /* track fragmentation */
/* Validate block integrity */
static inline bool validate_block(memblock_t *block)
{
if (!IS_VALID_BLOCK(block))
if (unlikely(!IS_VALID_BLOCK(block)))
return false;

size_t size = GET_SIZE(block);
if (!size || size > MALLOC_MAX_SIZE)
if (unlikely(!size || size > MALLOC_MAX_SIZE))
return false;

/* Check if block extends beyond heap */
if ((uint8_t *) block + sizeof(memblock_t) + size > (uint8_t *) heap_end)
if (unlikely((uint8_t *) block + sizeof(memblock_t) + size >
(uint8_t *) heap_end))
return false;

if (unlikely(block->next &&
(uint8_t *) block + sizeof(memblock_t) + GET_SIZE(block) !=
(uint8_t *) block->next))
return false;

return true;
Expand All @@ -69,18 +76,17 @@ void free(void *ptr)
memblock_t *p = ((memblock_t *) ptr) - 1;

/* Validate the block being freed */
if (!validate_block(p) || !IS_USED(p)) {
if (unlikely(!validate_block(p) || !IS_USED(p))) {
CRITICAL_LEAVE();
panic(ERR_HEAP_CORRUPT);
return; /* Invalid or double-free */
}

MARK_FREE(p);
free_blocks_count++;

/* Forward merge if the next block is free and physically adjacent */
if (p->next && !IS_USED(p->next) &&
(uint8_t *) p + sizeof(memblock_t) + GET_SIZE(p) ==
(uint8_t *) p->next) {
if (p->next && !IS_USED(p->next)) {
p->size = GET_SIZE(p) + sizeof(memblock_t) + GET_SIZE(p->next);
p->next = p->next->next;
free_blocks_count--;
Expand All @@ -94,9 +100,12 @@ void free(void *ptr)
current = current->next;
}

if (prev && !IS_USED(prev) &&
(uint8_t *) prev + sizeof(memblock_t) + GET_SIZE(prev) ==
(uint8_t *) p) {
if (prev && !IS_USED(prev)) {
if (unlikely(!validate_block(prev))) {
CRITICAL_LEAVE();
panic(ERR_HEAP_CORRUPT);
return;
}
prev->size = GET_SIZE(prev) + sizeof(memblock_t) + GET_SIZE(p);
prev->next = p->next;
free_blocks_count--;
Expand All @@ -109,22 +118,45 @@ void free(void *ptr)
static void selective_coalesce(void)
{
memblock_t *p = first_free;
uint32_t coalesced = 0;

while (p && p->next) {
/* Merge only when blocks are FREE *and* adjacent in memory */
uint8_t *pend = (uint8_t *) p + sizeof(memblock_t) + GET_SIZE(p);
if (!IS_USED(p) && !IS_USED(p->next) && pend == (uint8_t *) p->next) {
if (unlikely(!validate_block(p))) {
panic(ERR_HEAP_CORRUPT);
return;
}
if (!IS_USED(p) && !IS_USED(p->next)) {
p->size = GET_SIZE(p) + sizeof(memblock_t) + GET_SIZE(p->next);
p->next = p->next->next;
coalesced++;
free_blocks_count--;
} else {
p = p->next;
}
}
}

static inline void split_block(memblock_t *block, size_t size)
{
size_t remaining;
memblock_t *new_block;

if (unlikely(size >= GET_SIZE(block))) {
panic(ERR_HEAP_CORRUPT);
return;
}
remaining = GET_SIZE(block) - size;
/* Split only when remaining memory is large enough */
if (remaining < sizeof(memblock_t) + MALLOC_MIN_SIZE)
return;
new_block = (memblock_t *) ((size_t) block + sizeof(memblock_t) + size);
new_block->next = block->next;
new_block->size = remaining - sizeof(memblock_t);
MARK_FREE(new_block);
block->next = new_block;
block->size = size | IS_USED(block);
free_blocks_count++; /* New free block created */
}

/* O(n) first-fit allocation with selective coalescing */
void *malloc(uint32_t size)
{
Expand All @@ -146,29 +178,22 @@ void *malloc(uint32_t size)

memblock_t *p = first_free;
while (p) {
if (!validate_block(p)) {
if (unlikely(!validate_block(p))) {
CRITICAL_LEAVE();
panic(ERR_HEAP_CORRUPT);
return NULL; /* Heap corruption detected */
}

if (!IS_USED(p) && GET_SIZE(p) >= size) {
size_t remaining = GET_SIZE(p) - size;

/* Split block only if remainder is large enough to be useful */
if (remaining >= sizeof(memblock_t) + MALLOC_MIN_SIZE) {
memblock_t *new_block =
(memblock_t *) ((size_t) p + sizeof(memblock_t) + size);
new_block->next = p->next;
new_block->size = remaining - sizeof(memblock_t);
MARK_FREE(new_block);
p->next = new_block;
p->size = size;
free_blocks_count++; /* New free block created */
}
split_block(p, size);

MARK_USED(p);
if (free_blocks_count > 0)
free_blocks_count--;
if (unlikely(free_blocks_count <= 0)) {
panic(ERR_HEAP_CORRUPT);
return NULL;
}
free_blocks_count--;

CRITICAL_LEAVE();
return (void *) (p + 1);
Expand Down Expand Up @@ -213,7 +238,7 @@ void *calloc(uint32_t nmemb, uint32_t size)
if (unlikely(nmemb && size > MALLOC_MAX_SIZE / nmemb))
return NULL;

uint32_t total_size = nmemb * size;
uint32_t total_size = ALIGN4(nmemb * size);
void *buf = malloc(total_size);

if (buf)
Expand All @@ -236,11 +261,15 @@ void *realloc(void *ptr, uint32_t size)
return NULL;
}

size = ALIGN4(size);

memblock_t *old_block = ((memblock_t *) ptr) - 1;

/* Validate the existing block */
if (!validate_block(old_block) || !IS_USED(old_block))
if (unlikely(!validate_block(old_block) || !IS_USED(old_block))) {
panic(ERR_HEAP_CORRUPT);
return NULL;
}

size_t old_size = GET_SIZE(old_block);

Expand All @@ -249,6 +278,33 @@ void *realloc(void *ptr, uint32_t size)
old_size - size < sizeof(memblock_t) + MALLOC_MIN_SIZE)
return ptr;

/* fast path for shrinking */
if (size <= old_size) {
split_block(old_block, size);
/* Trigger coalescing only when fragmentation is high */
if (free_blocks_count > COALESCE_THRESHOLD)
selective_coalesce();
CRITICAL_LEAVE();
return (void *) (old_block + 1);
}

/* fast path for growing */
if (old_block->next && !IS_USED(old_block->next) &&
GET_SIZE(old_block) + sizeof(memblock_t) + GET_SIZE(old_block->next) >=
size) {
old_block->size = GET_SIZE(old_block) + sizeof(memblock_t) +
GET_SIZE(old_block->next);
old_block->next = old_block->next->next;
free_blocks_count--;
split_block(old_block, size);
/* Trigger coalescing only when fragmentation is high */
if (free_blocks_count > COALESCE_THRESHOLD)
selective_coalesce();
CRITICAL_LEAVE();
return (void *) (old_block + 1);
}


void *new_buf = malloc(size);
if (new_buf) {
memcpy(new_buf, ptr, min(old_size, size));
Expand Down