Skip to content

Commit

Permalink
Allocator uses protocols. Fix bug where it was not possible to pass a…
Browse files Browse the repository at this point in the history
… ref variable as a ref variable. Correct codegen for !anyptr.
  • Loading branch information
lerno committed Oct 14, 2023
1 parent 54f32ed commit c9b0e2c
Show file tree
Hide file tree
Showing 31 changed files with 1,461 additions and 1,453 deletions.
2 changes: 1 addition & 1 deletion lib/std/collections/linkedlist.c3
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ fn void LinkedList.tinit(&self) => self.init(mem::temp()) @inline;
**/
macro void LinkedList.free_node(&self, Node* node) @private
{
self.allocator.free(node)!!;
self.allocator.free(node);
}
macro Node* LinkedList.alloc_node(&self) @private
{
Expand Down
2 changes: 1 addition & 1 deletion lib/std/collections/list.c3
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ struct List (Printable)
}

/**
* @require using != null "A valid allocator must be provided"
* @require using "A valid allocator must be provided"
**/
fn void List.init(&self, usz initial_capacity = 16, Allocator* using = mem::heap())
{
Expand Down
6 changes: 3 additions & 3 deletions lib/std/collections/map.c3
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ struct HashMap
* @require load_factor > 0.0 "The load factor must be higher than 0"
* @require !map.allocator "Map was already initialized"
* @require capacity < MAXIMUM_CAPACITY "Capacity cannot exceed maximum"
* @require using != null "The allocator must be non-null"
* @require (bool)using "The allocator must be non-null"
**/
fn void HashMap.init(&map, uint capacity = DEFAULT_INITIAL_CAPACITY, float load_factor = DEFAULT_LOAD_FACTOR, Allocator* using = mem::heap())
{
Expand Down Expand Up @@ -54,7 +54,7 @@ fn void HashMap.tinit(&map, uint capacity = DEFAULT_INITIAL_CAPACITY, float load
**/
fn bool HashMap.is_initialized(&map)
{
return map.allocator != null;
return (bool)map.allocator;
}

fn void HashMap.init_from_map(&map, HashMap* other_map, Allocator* using = mem::heap())
Expand Down Expand Up @@ -354,7 +354,7 @@ fn void HashMap.put_for_create(&map, Key key, Value value) @private

fn void HashMap.free_internal(&map, void* ptr) @inline @private
{
map.allocator.free(ptr)!!;
map.allocator.free(ptr);
}

fn bool HashMap.remove_entry_for_key(&map, Key key) @private
Expand Down
89 changes: 30 additions & 59 deletions lib/std/core/allocators/arena_allocator.c3
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@
// a copy of which can be found in the LICENSE_STDLIB file.
module std::core::mem::allocator;

struct ArenaAllocator
struct ArenaAllocator (Allocator)
{
inline Allocator allocator;
char[] data;
usz used;
}
Expand All @@ -15,83 +14,46 @@ struct ArenaAllocator
**/
fn void ArenaAllocator.init(&self, char[] data)
{
self.function = &arena_allocator_function;
self.data = data;
self.used = 0;
}

fn void ArenaAllocator.reset(&self)
fn void ArenaAllocator.clear(&self)
{
self.used = 0;
}


module std::core::mem::allocator @private;

struct ArenaAllocatorHeader
struct ArenaAllocatorHeader @local
{
usz size;
char[*] data;
}

/**
* @require !alignment || math::is_power_of_2(alignment)
* @require data `unexpectedly missing the allocator`
*/
fn void*! arena_allocator_function(Allocator* data, usz size, usz alignment, usz offset, void* old_pointer, AllocationKind kind)
fn void ArenaAllocator.release(&self, void* ptr, bool) @dynamic
{
ArenaAllocator* arena = (ArenaAllocator*)data;
bool clear = false;
switch (kind)
if (!ptr) return;
assert((uptr)ptr >= (uptr)self.data.ptr, "Pointer originates from a different allocator.");
ArenaAllocatorHeader* header = ptr - ArenaAllocatorHeader.sizeof;
// Reclaim memory if it's the last element.
if (ptr + header.size == &self.data[self.used])
{
case CALLOC:
case ALIGNED_CALLOC:
clear = true;
nextcase;
case ALLOC:
case ALIGNED_ALLOC:
assert(!old_pointer, "Unexpected old pointer for alloc.");
if (!size) return null;
alignment = alignment_for_allocation(alignment);
void* mem = arena._alloc(size, alignment, offset)!;
if (clear) mem::clear(mem, size, mem::DEFAULT_MEM_ALIGNMENT);
return mem;
case ALIGNED_REALLOC:
case REALLOC:
if (!size) nextcase FREE;
if (!old_pointer) nextcase ALLOC;
alignment = alignment_for_allocation(alignment);
return arena._realloc(old_pointer, size, alignment, offset)!;
case ALIGNED_FREE:
case FREE:
if (!old_pointer) return null;
assert((uptr)old_pointer >= (uptr)arena.data.ptr, "Pointer originates from a different allocator.");
ArenaAllocatorHeader* header = old_pointer - ArenaAllocatorHeader.sizeof;
// Reclaim memory if it's the last element.
if (old_pointer + header.size == &arena.data[arena.used])
{
arena.used -= header.size + ArenaAllocatorHeader.sizeof;
}
return null;
case MARK:
return (void*)(uptr)arena.used;
case RESET:
arena.used = size;
return null;
self.used -= header.size + ArenaAllocatorHeader.sizeof;
}
}
fn usz ArenaAllocator.mark(&self) @dynamic => self.used;
fn void ArenaAllocator.reset(&self, usz mark) @dynamic => self.used = mark;

/**
* @require alignment > 0 `alignment must be non zero`
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment <= mem::MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= mem::MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, ArenaAllocatorHeader.alignof) == offset
**/
fn void*! ArenaAllocator._alloc(&self, usz size, usz alignment, usz offset)
fn void*! ArenaAllocator.acquire(&self, usz size, bool clear, usz alignment, usz offset) @dynamic
{
if (!size) return null;
alignment = alignment_for_allocation(alignment);
usz total_len = self.data.len;
if (size > total_len) return AllocationFailure.CHUNK_TOO_LARGE?;
void* start_mem = self.data.ptr;
Expand All @@ -103,20 +65,29 @@ fn void*! ArenaAllocator._alloc(&self, usz size, usz alignment, usz offset)
void* mem = aligned_pointer_to_offset - offset;
ArenaAllocatorHeader* header = mem - ArenaAllocatorHeader.sizeof;
header.size = size;
if (clear) mem::clear(mem, size, mem::DEFAULT_MEM_ALIGNMENT);
return mem;
}

/**
* @require alignment > 0 `alignment must be non zero`
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment <= mem::MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= mem::MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, ArenaAllocatorHeader.alignof) == offset
**/
fn void*! ArenaAllocator._realloc(&self, void *old_pointer, usz size, usz alignment, usz offset)
fn void*! ArenaAllocator.resize(&self, void *old_pointer, usz size, usz alignment, usz offset) @dynamic
{
if (!size)
{
self.release(old_pointer, alignment > 0);
return null;
}
if (!old_pointer)
{
return self.acquire(size, true, alignment, offset);
}
alignment = alignment_for_allocation(alignment);
assert(old_pointer >= self.data.ptr, "Pointer originates from a different allocator.");
usz total_len = self.data.len;
if (size > total_len) return AllocationFailure.CHUNK_TOO_LARGE?;
Expand All @@ -139,7 +110,7 @@ fn void*! ArenaAllocator._realloc(&self, void *old_pointer, usz size, usz alignm
return old_pointer;
}
// Otherwise just allocate new memory.
void* mem = self._alloc(size, alignment, offset)!;
void* mem = self.acquire(size, false, alignment, offset)!;
mem::copy(mem, old_pointer, old_size, mem::DEFAULT_MEM_ALIGNMENT, mem::DEFAULT_MEM_ALIGNMENT);
return mem;
}
Loading

0 comments on commit c9b0e2c

Please sign in to comment.