Skip to content

Commit

Permalink
fix docs
Browse files Browse the repository at this point in the history
  • Loading branch information
jdonszelmann committed Sep 16, 2023
1 parent cf0437d commit fbb1f31
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 16 deletions.
9 changes: 3 additions & 6 deletions src/ringbuffer_trait.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,12 +95,9 @@ pub unsafe trait RingBuffer<T>:
/// Iterates over the slice `other`, clones each element, and then appends
/// it to this `RingBuffer`. The `other` slice is traversed in-order.
///
/// Depending on the `RingBuffer` implementation, may be faster than inserting items in a loop
///
/// Note that this function is same as [`extend`] except that it is
/// specialized to work with slices instead. If and when Rust gets
/// specialization this function will likely be deprecated (but still
/// available).
/// Depending on the `RingBuffer` implementation, may be faster than inserting items in a loop.
/// `ConstGenericRingBuffer` is especially optimised in this regard.
/// See also: [`ConstGenericRingBuffer::custom_extend_batched`](crate::with_const_generics::ConstGenericRingBuffer::custom_extend_batched)
///
/// # Examples
///
Expand Down
35 changes: 25 additions & 10 deletions src/with_const_generics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -394,7 +394,10 @@ impl<T, const CAP: usize> ConstGenericRingBuffer<T, CAP> {

#[inline]
#[cfg(feature = "batched_extend")]
fn extend_batched<const BATCH_SIZE: usize>(&mut self, mut other: impl Iterator<Item = T>) {
fn extend_batched_internal<const BATCH_SIZE: usize>(
&mut self,
mut other: impl Iterator<Item = T>,
) {
// SAFETY: if CAP < Self::BATCH_SIZE we can't run extend_from_arr_batch so we catch that here
if CAP < BATCH_SIZE {
for i in other {
Expand Down Expand Up @@ -454,15 +457,18 @@ impl<T, const CAP: usize> ConstGenericRingBuffer<T, CAP> {
self.writeptr = index;
} else {
self.writeptr = CAP;
self.extend_batched::<BATCH_SIZE>(iter);
self.extend_batched_internal::<BATCH_SIZE>(iter);
}
}
}

impl<T, const CAP: usize> Extend<T> for ConstGenericRingBuffer<T, CAP> {
/// NOTE: correctness (but not soundness) of extend depends on `size_hint` on iter being correct.
#[inline]
fn extend<A: IntoIterator<Item = T>>(&mut self, iter: A) {
/// Alias of [`Extend::extend`](ConstGenericRingBuffer::extend) but can take a custom batch size.
///
/// We found that `30` works well for us, which is the batch size we use in `extend`,
/// but on different architectures this may not be true.
pub fn custom_extend_batched<const BATCH_SIZE: usize>(
&mut self,
iter: impl IntoIterator<Item = T>,
) {
#[cfg(not(feature = "batched_extend"))]
{
for i in iter {
Expand All @@ -472,8 +478,6 @@ impl<T, const CAP: usize> Extend<T> for ConstGenericRingBuffer<T, CAP> {

#[cfg(feature = "batched_extend")]
{
const BATCH_SIZE: usize = 30;

let iter = iter.into_iter();

let (lower, _) = iter.size_hint();
Expand All @@ -497,12 +501,23 @@ impl<T, const CAP: usize> Extend<T> for ConstGenericRingBuffer<T, CAP> {
// Safety: clear above
unsafe { self.finish_iter::<BATCH_SIZE>(iter) };
} else {
self.extend_batched::<BATCH_SIZE>(iter);
self.extend_batched_internal::<BATCH_SIZE>(iter);
}
}
}
}

impl<T, const CAP: usize> Extend<T> for ConstGenericRingBuffer<T, CAP> {
/// NOTE: correctness (but not soundness) of extend depends on `size_hint` on iter being correct.
#[inline]
fn extend<A: IntoIterator<Item = T>>(&mut self, iter: A) {
/// good number, found through benchmarking.
/// gives ~30% performance boost over not batching
const BATCH_SIZE: usize = 30;
self.custom_extend_batched::<BATCH_SIZE>(iter);
}
}

unsafe impl<T, const CAP: usize> RingBuffer<T> for ConstGenericRingBuffer<T, CAP> {
#[inline]
unsafe fn ptr_capacity(_: *const Self) -> usize {
Expand Down

1 comment on commit fbb1f31

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.