aboutsummaryrefslogtreecommitdiffstats
path: root/src/malloc.c
diff options
context:
space:
mode:
authorThomas Vanbesien <tvanbesi@proton.me>2026-02-27 11:13:19 +0100
committerThomas Vanbesien <tvanbesi@proton.me>2026-02-27 11:13:19 +0100
commitfb19b1c35f6ec52c075b214d2f0416900a7c1bbe (patch)
treedd323940d5e60253e60626c5065393ad4ec3a24d /src/malloc.c
parent8849d801b9d3767390e3e1ed6b562db738ac1bcb (diff)
downloadmalloc-fb19b1c35f6ec52c075b214d2f0416900a7c1bbe.tar.gz
malloc-fb19b1c35f6ec52c075b214d2f0416900a7c1bbe.zip
Add chunk splitting and free-chunk search, use sysconf for page size
malloc now searches zones for a free chunk, splits it to the requested aligned size, and allocates a new zone when none have space. Replace getpagesize() with sysconf(_SC_PAGESIZE).
Diffstat (limited to 'src/malloc.c')
-rw-r--r--src/malloc.c56
1 files changed, 50 insertions, 6 deletions
diff --git a/src/malloc.c b/src/malloc.c
index 268e0d1..8c70b08 100644
--- a/src/malloc.c
+++ b/src/malloc.c
@@ -8,6 +8,41 @@
t_heap g_heap = { 0 };
+static t_chunk *
+_s_find_free_chunk (t_zone *zone, size_t aligned_size)
+{
+ t_chunk *chunk;
+
+ chunk = (t_chunk *)((char *)zone + ALIGN (sizeof (t_zone)));
+ while (chunk)
+ {
+ if (chunk->is_free && chunk->size >= aligned_size)
+ return (chunk);
+ chunk = chunk->next;
+ }
+ return (NULL);
+}
+
+static void
+_s_split_chunk (t_chunk *chunk, size_t aligned_size)
+{
+ t_chunk *remainder;
+ size_t min_split;
+
+ min_split = ALIGN (sizeof (t_chunk)) + ALIGNMENT;
+ if (chunk->size >= aligned_size + min_split)
+ {
+ remainder = (t_chunk *)((char *)chunk + ALIGN (sizeof (t_chunk))
+ + aligned_size);
+ remainder->size = chunk->size - aligned_size - ALIGN (sizeof (t_chunk));
+ remainder->next = chunk->next;
+ remainder->is_free = 1;
+ chunk->size = aligned_size;
+ chunk->next = remainder;
+ }
+ chunk->is_free = 0;
+}
+
void *
malloc (size_t size)
{
@@ -15,6 +50,7 @@ malloc (size_t size)
t_zone *zone;
t_chunk *chunk;
size_t alloc_max;
+ size_t aligned_size;
if (size == 0)
size = 1;
@@ -33,16 +69,24 @@ malloc (size_t size)
zone_list = &g_heap.large;
alloc_max = size;
}
- if (*zone_list == NULL)
+ aligned_size = ALIGN (size);
+ chunk = NULL;
+ zone = *zone_list;
+ while (zone && !chunk)
+ {
+ chunk = _s_find_free_chunk (zone, aligned_size);
+ if (!chunk)
+ zone = zone->next;
+ }
+ if (!chunk)
{
zone = zone_new (alloc_max);
- if (zone == NULL)
+ if (!zone)
return (NULL);
+ zone->next = *zone_list;
*zone_list = zone;
+ chunk = _s_find_free_chunk (zone, aligned_size);
}
- else
- zone = *zone_list;
- chunk = (t_chunk *)((char *)zone + ALIGN (sizeof (t_zone)));
- chunk->is_free = 0;
+ _s_split_chunk (chunk, aligned_size);
return ((char *)chunk + ALIGN (sizeof (t_chunk)));
}