From f3248db01cebd393ff7734383eb08eecd0d5cfbc Mon Sep 17 00:00:00 2001 From: Michael Sippel Date: Fri, 8 Dec 2023 15:21:29 +0100 Subject: [PATCH] ChunkedList: fix removal of unused chunks created through race in `push()` by decrementing its item count after each insertion of a new chunk --- redGrapes/util/atomic_list.hpp | 11 ++++++++--- redGrapes/util/chunked_list.hpp | 26 ++++++++++++++------------ 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/redGrapes/util/atomic_list.hpp b/redGrapes/util/atomic_list.hpp index c0fb1f3f..105690ff 100644 --- a/redGrapes/util/atomic_list.hpp +++ b/redGrapes/util/atomic_list.hpp @@ -159,7 +159,7 @@ struct AtomicList /* initializes a new chunk */ - void allocate_item() + auto allocate_item() { TRACE_EVENT("Allocator", "AtomicList::allocate_item()"); @@ -174,7 +174,7 @@ struct AtomicList */ StaticAlloc chunk_alloc( this->alloc, chunk_size ); uintptr_t base = (uintptr_t)chunk_alloc.ptr; - append_item( + return append_item( std::allocate_shared< ItemPtr >( chunk_alloc, @@ -189,8 +189,10 @@ struct AtomicList } /* atomically appends a floating chunk to this list + * and returns the previous head to which the new_head + * is now linked. */ - void append_item( std::shared_ptr< ItemPtr > new_head ) + MutBackwardIterator append_item( std::shared_ptr< ItemPtr > new_head ) { TRACE_EVENT("Allocator", "AtomicList::append_item()"); bool append_successful = false; @@ -199,6 +201,9 @@ struct AtomicList std::shared_ptr< ItemPtr > old_head = std::atomic_load( &head ); std::atomic_store( &new_head->prev, old_head ); append_successful = std::atomic_compare_exchange_strong( &head, &old_head, new_head ); + + if( append_successful ) + return old_head; } } diff --git a/redGrapes/util/chunked_list.hpp b/redGrapes/util/chunked_list.hpp index be8870b3..6b997e7c 100644 --- a/redGrapes/util/chunked_list.hpp +++ b/redGrapes/util/chunked_list.hpp @@ -170,7 +170,7 @@ struct ChunkedList * compensated by not increasing the count when the last * element is inserted. */ - std::atomic< chunk_offset_t > item_count{ 1 }; + std::atomic< chunk_offset_t > item_count{ 0 }; /* lowest index with free slot that can * be used to add a new element @@ -435,6 +435,15 @@ struct ChunkedList spdlog::error("copy construct ChunkedList!!"); } + /* decrement item_count and in case all items of this chunk are deleted, + * and this chunk is not `head`, delete the chunk too + */ + void release_chunk( typename memory::AtomicList< Chunk, Allocator >::BackwardsIterator chunk ) + { + if( prev_chunk->item_count.fetch_sub(1) == 0 ) + chunks.erase( pos.chunk ); + } + MutBackwardIterator push( T const& item ) { TRACE_EVENT("ChunkedList", "push"); @@ -448,8 +457,7 @@ struct ChunkedList if( chunk_off < chunk_size ) { - if( chunk_off+1 < chunk_size ) - chunk->item_count ++; + chunk->item_count ++; chunk->items()[ chunk_off ] = item; chunk->last_idx ++; @@ -457,7 +465,8 @@ struct ChunkedList } } - chunks.allocate_item(); + auto prev_chunk = chunks.allocate_item(); + release_chunk( prev_chunk ); } } @@ -484,14 +493,7 @@ struct ChunkedList */ pos.item().remove(); - /* in case all items of this chunk are deleted, - * delete the chunk too - */ - if( pos.chunk->item_count.fetch_sub(1) == 1 ) - { - // spdlog::info("last item!!"); - chunks.erase( pos.chunk ); - } + release_chunk( pos.chunk ); } else throw std::runtime_error("remove invalid position");