diff --git a/src/ringbuffer_trait.rs b/src/ringbuffer_trait.rs index 9f869c4..5940b97 100644 --- a/src/ringbuffer_trait.rs +++ b/src/ringbuffer_trait.rs @@ -1,4 +1,4 @@ -use core::ops::{Index, IndexMut}; +use core::ops::{Index, IndexMut, Range}; #[cfg(feature = "alloc")] extern crate alloc; @@ -170,6 +170,11 @@ pub unsafe trait RingBuffer: unsafe { Self::ptr_get_mut(self, index).map(|i| &mut *i) } } + /// Gets values relative to the current index. 0 is the next index to be written to with push. + fn get_range<'a>(&'a self, range: Range) -> impl Iterator + where + T: 'a; + /// same as [`get_mut`](RingBuffer::get_mut) but on raw pointers. /// /// # Safety diff --git a/src/with_alloc/alloc_ringbuffer.rs b/src/with_alloc/alloc_ringbuffer.rs index f21769e..d1ad12d 100644 --- a/src/with_alloc/alloc_ringbuffer.rs +++ b/src/with_alloc/alloc_ringbuffer.rs @@ -1,4 +1,5 @@ -use core::ops::{Index, IndexMut}; +use alloc::slice; +use core::ops::{Index, IndexMut, Range}; use crate::ringbuffer_trait::{ RingBuffer, RingBufferIntoIterator, RingBufferIterator, RingBufferMutIterator, @@ -292,6 +293,29 @@ unsafe impl RingBuffer for AllocRingBuffer { unsafe { ptr::write(get_unchecked_mut(self, i), f()) }; } } + + #[inline] + fn get_range<'a>(&'a self, range: Range) -> impl Iterator + where + T: 'a, + { + let offset = range.start.rem_euclid(self.len()); + let normalized_index = self.readptr + offset; + let index = crate::mask_modulo(self.buffer_size(), normalized_index); + let buf = unsafe { slice::from_raw_parts(self.buf, self.buffer_size()) }; + buf[index..] + .iter() + .chain(buf[..index].iter()) + .take(self.len() - offset) + .chain( + buf[self.readptr..] + .iter() + .chain(buf[..self.readptr].iter()) + .take(self.len()) + .cycle(), + ) + .take(range.len()) + } } impl AllocRingBuffer { diff --git a/src/with_alloc/vecdeque.rs b/src/with_alloc/vecdeque.rs index 978bbe5..2dee5af 100644 --- a/src/with_alloc/vecdeque.rs +++ b/src/with_alloc/vecdeque.rs @@ -1,7 +1,7 @@ use crate::ringbuffer_trait::{RingBufferIntoIterator, RingBufferIterator, RingBufferMutIterator}; use crate::{AllocRingBuffer, RingBuffer}; use alloc::collections::VecDeque; -use core::ops::{Deref, DerefMut, Index, IndexMut}; +use core::ops::{Deref, DerefMut, Index, IndexMut, Range}; /// A growable ringbuffer. Once capacity is reached, the size is doubled. /// Wrapper of the built-in [`VecDeque`] struct. @@ -255,6 +255,18 @@ unsafe impl RingBuffer for GrowableAllocRingBuffer { } .map(|i| i as *mut T) } + + /// Gets values relative to the current index. 0 is the next index to be written to with push. + /// + /// # Panics + /// Panics if the starting point is greater than the end point + /// or if the end point is greater than the length of the deque. + fn get_range<'a>(&'a self, range: Range) -> impl Iterator + where + T: 'a, + { + self.0.range(range) + } } impl Extend for GrowableAllocRingBuffer { diff --git a/src/with_const_generics.rs b/src/with_const_generics.rs index 4f76e23..385aab5 100644 --- a/src/with_const_generics.rs +++ b/src/with_const_generics.rs @@ -3,7 +3,7 @@ use crate::RingBuffer; use core::iter::FromIterator; use core::mem; use core::mem::MaybeUninit; -use core::ops::{Index, IndexMut}; +use core::ops::{Index, IndexMut, Range}; /// The `ConstGenericRingBuffer` struct is a `RingBuffer` implementation which does not require `alloc` but /// uses const generics instead. @@ -321,6 +321,30 @@ unsafe impl RingBuffer for ConstGenericRingBuffer(&'a self, range: Range) -> impl Iterator + where + T: 'a, + { + let offset = range.start.rem_euclid(self.len()); + let normalized_index = self.readptr + offset; + let index = crate::mask_modulo(self.buffer_size(), normalized_index); + self.buf[index..] + .iter() + .chain(self.buf[..index].iter()) + .take(self.len() - offset) + .map(|entry| unsafe { entry.assume_init_ref() }) + .chain( + self.buf[self.readptr..] + .iter() + .chain(self.buf[..self.readptr].iter()) + .take(self.len()) + .map(|entry| unsafe { entry.assume_init_ref() }) + .cycle(), + ) + .take(range.len()) + } } impl Default for ConstGenericRingBuffer {