Skip to content

Commit

Permalink
ChunkedList: fix removal of unused chunks created through race in `pu…
Browse files Browse the repository at this point in the history
…sh()` by decrementing its item count after each insertion of a new chunk
  • Loading branch information
michaelsippel committed Dec 8, 2023
1 parent b2cd061 commit 61a90d3
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 28 deletions.
39 changes: 23 additions & 16 deletions redGrapes/util/atomic_list.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ struct AtomicList

/* initializes a new chunk
*/
void allocate_item()
auto allocate_item()
{
TRACE_EVENT("Allocator", "AtomicList::allocate_item()");

Expand All @@ -174,7 +174,7 @@ struct AtomicList
*/
StaticAlloc<void> chunk_alloc( this->alloc, chunk_size );
uintptr_t base = (uintptr_t)chunk_alloc.ptr;
append_item(
return append_item(
std::allocate_shared< ItemPtr >(
chunk_alloc,

Expand All @@ -188,20 +188,6 @@ struct AtomicList
);
}

/* atomically appends a floating chunk to this list
*/
void append_item( std::shared_ptr< ItemPtr > new_head )
{
TRACE_EVENT("Allocator", "AtomicList::append_item()");
bool append_successful = false;
while( ! append_successful )
{
std::shared_ptr< ItemPtr > old_head = std::atomic_load( &head );
std::atomic_store( &new_head->prev, old_head );
append_successful = std::atomic_compare_exchange_strong<ItemPtr>( &head, &old_head, new_head );
}
}

template < bool is_const = false >
struct BackwardIterator
{
Expand Down Expand Up @@ -296,6 +282,27 @@ struct AtomicList
{
pos.erase();
}

/* atomically appends a floating chunk to this list
* and returns the previous head to which the new_head
* is now linked.
*/
auto append_item( std::shared_ptr< ItemPtr > new_head )
{
TRACE_EVENT("Allocator", "AtomicList::append_item()");
bool append_successful = false;
while( ! append_successful )
{
std::shared_ptr< ItemPtr > old_head = std::atomic_load( &head );
std::atomic_store( &new_head->prev, old_head );
append_successful = std::atomic_compare_exchange_strong<ItemPtr>( &head, &old_head, new_head );

if( append_successful )
return MutBackwardIterator{ old_head };
}
}


};

} // namespace memory
Expand Down
26 changes: 14 additions & 12 deletions redGrapes/util/chunked_list.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ struct ChunkedList
* compensated by not increasing the count when the last
* element is inserted.
*/
std::atomic< chunk_offset_t > item_count{ 1 };
std::atomic< chunk_offset_t > item_count{ 0 };

/* lowest index with free slot that can
* be used to add a new element
Expand Down Expand Up @@ -435,6 +435,15 @@ struct ChunkedList
spdlog::error("copy construct ChunkedList!!");
}

/* decrement item_count and in case all items of this chunk are deleted,
* and this chunk is not `head`, delete the chunk too
*/
void release_chunk( typename memory::AtomicList< Chunk, Allocator >::MutBackwardIterator chunk )
{
if( chunk->item_count.fetch_sub(1) == 0 )
chunks.erase( chunk );
}

MutBackwardIterator push( T const& item )
{
TRACE_EVENT("ChunkedList", "push");
Expand All @@ -448,16 +457,16 @@ struct ChunkedList

if( chunk_off < chunk_size )
{
if( chunk_off+1 < chunk_size )
chunk->item_count ++;
chunk->item_count ++;

chunk->items()[ chunk_off ] = item;
chunk->last_idx ++;
return MutBackwardIterator( chunk_size, chunk, chunk_off );
}
}

chunks.allocate_item();
auto prev_chunk = chunks.allocate_item();
release_chunk( prev_chunk );
}
}

Expand All @@ -484,14 +493,7 @@ struct ChunkedList
*/
pos.item().remove();

/* in case all items of this chunk are deleted,
* delete the chunk too
*/
if( pos.chunk->item_count.fetch_sub(1) == 1 )
{
// spdlog::info("last item!!");
chunks.erase( pos.chunk );
}
release_chunk( pos.chunk );
}
else
throw std::runtime_error("remove invalid position");
Expand Down

0 comments on commit 61a90d3

Please sign in to comment.