From 7ab9e3caa44ce399f49e505a2d79f7b7be44431f Mon Sep 17 00:00:00 2001 From: jdonszelmann Date: Fri, 15 Sep 2023 15:24:23 +0200 Subject: [PATCH 1/8] add append --- src/ringbuffer_trait.rs | 45 +++++++++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 11 deletions(-) diff --git a/src/ringbuffer_trait.rs b/src/ringbuffer_trait.rs index ae92f09..f3aebb6 100644 --- a/src/ringbuffer_trait.rs +++ b/src/ringbuffer_trait.rs @@ -2,6 +2,7 @@ use core::ops::{Index, IndexMut}; #[cfg(feature = "alloc")] extern crate alloc; + #[cfg(feature = "alloc")] use alloc::vec::Vec; @@ -19,8 +20,10 @@ use alloc::vec::Vec; /// implementation, since these safety guarantees are necessary for /// [`iter_mut`](RingBuffer::iter_mut) to work pub unsafe trait RingBuffer: - Sized + IntoIterator + Extend + Index + IndexMut -{ + Sized + + IntoIterator + + Extend + + Index + IndexMut { /// Returns the length of the internal buffer. /// This length grows up to the capacity and then stops growing. /// This is because when the length is reached, new items are appended at the start. @@ -119,21 +122,41 @@ pub unsafe trait RingBuffer: RingBufferDrainingIterator::new(self) } + /// Moves all the elements of `other` into `self`, leaving `other` empty. + /// + /// # Examples + /// + /// ``` + /// use ringbuffer::{ConstGenericRingBuffer, RingBuffer}; + /// + /// let mut vec = ConstGenericRingBuffer::<_, 6>::from(vec![1, 2, 3]); + /// let mut vec2 = ConstGenericRingBuffer::<_, 6>::from(vec![4, 5, 6]); + /// + /// vec.append(&mut vec2); + /// assert_eq!(vec.to_vec(), &[1, 2, 3, 4, 5, 6]); + /// assert_eq!(vec2.to_vec(), &[]); + /// ``` + fn append(&mut self, other: &mut Self) { + for i in other.drain() { + self.push(i); + } + } + /// Sets every element in the ringbuffer to the value returned by f. fn fill_with T>(&mut self, f: F); /// Sets every element in the ringbuffer to it's default value fn fill_default(&mut self) - where - T: Default, + where + T: Default, { self.fill_with(Default::default); } /// Sets every element in the ringbuffer to `value` fn fill(&mut self, value: T) - where - T: Clone, + where + T: Clone, { self.fill_with(|| value.clone()); } @@ -233,16 +256,16 @@ pub unsafe trait RingBuffer: /// Converts the buffer to a vector. This Copies all elements in the ringbuffer. #[cfg(feature = "alloc")] fn to_vec(&self) -> Vec - where - T: Clone, + where + T: Clone, { self.iter().cloned().collect() } /// Returns true if elem is in the ringbuffer. fn contains(&self, elem: &T) -> bool - where - T: PartialEq, + where + T: PartialEq, { self.iter().any(|i| i == elem) } @@ -339,7 +362,7 @@ mod iter { impl<'rb, T: 'rb, RB: RingBuffer + 'rb> ExactSizeIterator for RingBufferMutIterator<'rb, T, RB> {} impl<'rb, T: 'rb, RB: RingBuffer + 'rb> DoubleEndedIterator - for RingBufferMutIterator<'rb, T, RB> + for RingBufferMutIterator<'rb, T, RB> { #[inline] fn next_back(&mut self) -> Option { From d5a78de16d77cc5ee12e2cf4a02afa86f2a6fc11 Mon Sep 17 00:00:00 2001 From: jdonszelmann Date: Fri, 15 Sep 2023 15:34:21 +0200 Subject: [PATCH 2/8] add extend from slice --- src/ringbuffer_trait.rs | 39 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 36 insertions(+), 3 deletions(-) diff --git a/src/ringbuffer_trait.rs b/src/ringbuffer_trait.rs index f3aebb6..d374bac 100644 --- a/src/ringbuffer_trait.rs +++ b/src/ringbuffer_trait.rs @@ -86,6 +86,41 @@ pub unsafe trait RingBuffer: self.push(value); } + /// alias for [`extend`](RingBuffer::extend). + #[inline] + fn enqueue_many>(&mut self, items: I) { + self.extend(items); + } + + /// Clones and appends all elements in a slice to the `Vec`. + /// + /// Iterates over the slice `other`, clones each element, and then appends + /// it to this `RingBuffer`. The `other` slice is traversed in-order. + /// + /// Depending on the RingBuffer implementation, may be faster than inserting items in a loop + /// + /// Note that this function is same as [`extend`] except that it is + /// specialized to work with slices instead. If and when Rust gets + /// specialization this function will likely be deprecated (but still + /// available). + /// + /// # Examples + /// + /// ``` + /// use ringbuffer::{ConstGenericRingBuffer, RingBuffer}; + /// + /// let mut rb = ConstGenericRingBuffer::<_, 6>::new(); + /// rb.push(1); + /// + /// rb.extend_from_slice(&[2, 3, 4]); + /// assert_eq!(rb.to_vec(), vec![1, 2, 3, 4]); + /// ``` + /// + /// [`extend`]: RingBuffer::extend + fn extend_from_slice(&mut self, other: &[T]) where T: Clone { + self.extend(other.into_iter().cloned()) + } + /// dequeues the top item off the ringbuffer, and moves this item out. fn dequeue(&mut self) -> Option; @@ -137,9 +172,7 @@ pub unsafe trait RingBuffer: /// assert_eq!(vec2.to_vec(), &[]); /// ``` fn append(&mut self, other: &mut Self) { - for i in other.drain() { - self.push(i); - } + self.extend(other.drain()); } /// Sets every element in the ringbuffer to the value returned by f. From 4e3b3ca0827597d35274a4bfaad43a295ef891f8 Mon Sep 17 00:00:00 2001 From: jdonszelmann Date: Sat, 16 Sep 2023 00:49:53 +0200 Subject: [PATCH 3/8] batched extend --- benches/bench.rs | 46 +++++- src/with_const_generics.rs | 328 +++++++++++++++++++++++++++++++++++-- 2 files changed, 360 insertions(+), 14 deletions(-) diff --git a/benches/bench.rs b/benches/bench.rs index 6793a40..18f67a8 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -1,5 +1,6 @@ -#![no_coverage] -use criterion::{black_box, criterion_group, criterion_main, Bencher, Criterion}; +#![cfg(not(tarpaulin_include))] + +use criterion::{black_box, criterion_group, criterion_main, Bencher, Criterion, BatchSize}; use ringbuffer::{AllocRingBuffer, ConstGenericRingBuffer, RingBuffer}; fn benchmark_push, F: Fn() -> T>(b: &mut Bencher, new: F) { @@ -180,6 +181,47 @@ fn criterion_benchmark(c: &mut Criterion) { 8192, 8195 ]; + + c.bench_function("extend too many", extend_too_many); + c.bench_function("extend many too many", extend_many_too_many); + c.bench_function("extend exact cap", extend_exact_cap); + c.bench_function("extend too few", extend_too_few); +} + +fn extend_many_too_many(b: &mut Bencher) { + let rb = ConstGenericRingBuffer::new::<8192>(); + let input = (0..16384).collect::>(); + + b.iter_batched(&|| rb.clone(), |mut r| { + black_box(r.extend(black_box(input.as_slice()))) + }, BatchSize::SmallInput); +} + +fn extend_too_many(b: &mut Bencher) { + let rb = ConstGenericRingBuffer::new::<8192>(); + let input = (0..10000).collect::>(); + + b.iter_batched(&|| rb.clone(), |mut r| { + black_box(r.extend(black_box(input.as_slice()))) + }, BatchSize::SmallInput); +} + +fn extend_exact_cap(b: &mut Bencher) { + let rb = ConstGenericRingBuffer::new::<8192>(); + let input = (0..8192).collect::>(); + + b.iter_batched(&|| rb.clone(), |mut r| { + black_box(r.extend(black_box(input.as_slice()))) + }, BatchSize::SmallInput); +} + +fn extend_too_few(b: &mut Bencher) { + let rb = ConstGenericRingBuffer::new::<8192>(); + let input = (0..4096).collect::>(); + + b.iter_batched(&|| rb.clone(), |mut r| { + black_box(r.extend(black_box(input.as_slice()))) + }, BatchSize::LargeInput); } criterion_group!(benches, criterion_benchmark); diff --git a/src/with_const_generics.rs b/src/with_const_generics.rs index e4b6570..ea22308 100644 --- a/src/with_const_generics.rs +++ b/src/with_const_generics.rs @@ -93,7 +93,7 @@ impl From> for ConstGeneric #[cfg(feature = "alloc")] impl From> - for ConstGenericRingBuffer +for ConstGenericRingBuffer { fn from(value: alloc::collections::LinkedList) -> Self { value.into_iter().collect() @@ -115,7 +115,7 @@ impl From<&str> for ConstGenericRingBuffer { #[cfg(feature = "alloc")] impl From> - for ConstGenericRingBuffer +for ConstGenericRingBuffer { fn from(mut value: crate::GrowableAllocRingBuffer) -> Self { value.drain().collect() @@ -172,11 +172,11 @@ impl ConstGenericRingBuffer { #[inline] #[must_use] pub const fn new() -> Self - where - ConstGenericRingBuffer: From>, + where + ConstGenericRingBuffer: From>, { #[allow(clippy::let_unit_value)] - let _ = Self::ERROR_CAPACITY_IS_NOT_ALLOWED_TO_BE_ZERO; + let _ = Self::ERROR_CAPACITY_IS_NOT_ALLOWED_TO_BE_ZERO; // allow here since we are constructing an array of MaybeUninit // which explicitly *is* defined behavior @@ -241,12 +241,227 @@ impl<'a, T, const CAP: usize> IntoIterator for &'a mut ConstGenericRingBuffer ConstGenericRingBuffer { + const BATCH_SIZE: usize = 8; + + /// splits the ringbuffer into two slices. One from the old pointer to the end of the buffer, + /// and one from the start of the buffer to the new pointer + /// + /// # Safety + /// Only safe when old != new + #[inline] + unsafe fn split_pointer_move(&mut self, old: usize, new: usize) -> (&mut [MaybeUninit], &mut [MaybeUninit]) { + let old_mod = crate::mask_modulo(CAP, old); + let new_mod = crate::mask_modulo(CAP, new); + + if old_mod < new_mod { + // if there's no wrapping, nice! we can just return one slice + (&mut self.buf[old_mod..new_mod], &mut []) + } else { + // the first part is from old_mod to CAP + let (start, p1) = self.buf.split_at_mut(old_mod); + + // and the second part from 0 to new_mod + let (p2, _) = start.split_at_mut(new_mod); + + (p1, p2) + } + } + + /// # Safety + /// Only safe when CAP >= Self::BATCH_SIZE + #[inline] + unsafe fn extend_from_arr_batch(&mut self, data: [T; BATCH_SIZE]) { + debug_assert!(CAP >= BATCH_SIZE); + + // algorithm to push 1 item: + // + // if self.is_full() { + // let previous_value = mem::replace( + // &mut self.buf[crate::mask_modulo(CAP, self.readptr)], + // MaybeUninit::uninit(), + // ); + // // make sure we drop whatever is being overwritten + // // SAFETY: the buffer is full, so this must be initialized + // // : also, index has been masked + // // make sure we drop because it won't happen automatically + // unsafe { + // drop(previous_value.assume_init()); + // } + // self.readptr += 1; + // } + // let index = crate::mask_modulo(CAP, self.writeptr); + // self.buf[index] = MaybeUninit::new(value); + // self.writeptr += 1; + + let old_writeptr = self.writeptr; + let old_readptr = self.readptr; + + // so essentially, we need to update the write pointer by Self::BATCH_SIZE + self.writeptr += BATCH_SIZE; + + // but maybe we also need to update the readptr + // first we calculate if we will be full. if not, no need to update the readptr + let num_items_until_full = self.capacity() - self.len(); + if num_items_until_full < BATCH_SIZE { + // the difference is how much the read ptr needs to move + self.readptr += BATCH_SIZE - num_items_until_full; + + debug_assert_ne!(old_readptr, self.readptr); + + // if readptr moves, we also need to free some items. + // Safety: same safety guarantees as this function and old != new by the assertion above + let (p1, p2) = unsafe{self.split_pointer_move(old_readptr, self.readptr)}; + // assertion: we can never be in a situation where we have to drop more than a batch size of items + debug_assert!(p1.len() + p2.len() <= BATCH_SIZE); + + for i in p1 { + i.assume_init_drop(); + } + for i in p2 { + i.assume_init_drop(); + } + } + + debug_assert_ne!(old_writeptr, self.writeptr); + // now we need to write some items between old_writeptr and self.writeptr + // Safety: same safety guarantees as this function and old != new by the assertion above + let (p1, p2) = unsafe{self.split_pointer_move(old_writeptr, self.writeptr)}; + // assertion: we can never be in a situation where we have to write more than a batch size of items + debug_assert!(p1.len() + p2.len() <= Self::BATCH_SIZE); + + // if we are lucky, we're not on the boundary so either p1 or p2 has a length of Self::BATCH_SIZE + if p1.len() == Self::BATCH_SIZE { + for (index, i) in data.into_iter().enumerate() { + p1[index] = MaybeUninit::new(i); + } + } else if p2.len() == Self::BATCH_SIZE { + for (index, i) in data.into_iter().enumerate() { + p2[index] = MaybeUninit::new(i); + } + } else { + // oof, unfortunately we're on a boundary + + // iterate over the data + let mut data_iter = data.into_iter(); + + // put p1.len() in p1 + for index in 0..p1.len() { + let next_item = data_iter.next(); + // Safety: p1.len() + p2.len() <= Self::BATCH_SIZE so the two loops here + // together cannot run for more than Self::BATCH_SIZE iterations + p1[index] = MaybeUninit::new(unsafe{next_item.unwrap_unchecked()}); + } + + // put p2.len() in p2 + for index in 0..p2.len() { + let next_item = data_iter.next(); + // Safety: p1.len() + p2.len() <= Self::BATCH_SIZE so the two loops here + // together cannot run for more than Self::BATCH_SIZE iterations + p2[index] = MaybeUninit::new(unsafe{next_item.unwrap_unchecked()}); + } + } + } + + #[inline] + fn fill_batch(batch: &mut [MaybeUninit; BATCH_SIZE], iter: &mut impl Iterator) -> usize { + for index in 0..BATCH_SIZE { + if let Some(i) = iter.next() { + batch[index] = MaybeUninit::new(i); + } else { + return index; + } + } + + BATCH_SIZE + } + + #[inline] + fn extend_batched(&mut self, mut other: impl Iterator) { + // SAFETY: if CAP < Self::BATCH_SIZE we can't run extend_from_arr_batch so we catch that here + if CAP < BATCH_SIZE { + for i in other { + self.push(i) + } + } else { + // Safety: assume init to MaybeUninit slice is safe + let mut batch: [MaybeUninit; BATCH_SIZE] = unsafe{MaybeUninit::uninit().assume_init()}; + + // repeat until we find an empty batch + loop { + // fill up a batch + let how_full = Self::fill_batch(&mut batch, &mut other); + + // if the batch isn't complete, individually add the items from that batch + if how_full < BATCH_SIZE { + for i in 0..how_full { + // Safety: fill_batch filled up at least `how_full` items so if we iterate + // until there this is safe + self.push(unsafe { batch[i].assume_init_read() }) + } + + // then we're done! + return; + } + + // else the batch is full, and we can transmute it to an init slice + let batch = unsafe { mem::transmute_copy::<[MaybeUninit; BATCH_SIZE], [T; BATCH_SIZE]>(&batch) }; + + // SAFETY: if CAP < Self::BATCH_SIZE we woudn't be here + unsafe { + self.extend_from_arr_batch(batch) + } + } + } + } +} + impl Extend for ConstGenericRingBuffer { - fn extend>(&mut self, iter: A) { - let iter = iter.into_iter(); + /// NOTE: correctness (but not soundness) of extend depends on `size_hint` on iter being correct. + #[inline] + fn extend>(&mut self, iter: A) { + const BATCH_SIZE: usize = 32; - for i in iter { - self.push(i); + let mut iter = iter.into_iter(); + + let (lower, _) = iter.size_hint(); + + macro_rules! finish_iter { + ($iter: ident) => { + let mut index = 0; + while let Some(i) = $iter.next() { + self.buf[index] = MaybeUninit::new(i); + index += 1; + + if index > CAP - 1 { + break; + } + } + + if index <= CAP-1 { + self.writeptr = index + 1; + } else { + self.writeptr = CAP; + self.extend_batched::($iter); + } + }; + } + + if lower >= CAP { + // if there are more elements in our iterator than we have size in the ringbuffer + // drain the ringbuffer + self.clear(); + + // we need exactly CAP elements. + // so we need to drop until the number of elements in the iterator is exactly CAP + let num_we_can_drop = lower - CAP; + + let mut iter = iter.skip(num_we_can_drop); + finish_iter!(iter); + } else if self.is_empty() { + finish_iter!(iter); + } else { + self.extend_batched::(iter); } } } @@ -328,7 +543,7 @@ impl Default for ConstGenericRingBuffer { } impl FromIterator for ConstGenericRingBuffer { - fn from_iter>(iter: T) -> Self { + fn from_iter>(iter: T) -> Self { let mut res = Self::default(); for i in iter { res.push(i); @@ -354,6 +569,7 @@ impl IndexMut for ConstGenericRingBuffer { #[cfg(test)] mod tests { + use core::ops::Range; use super::*; #[test] @@ -418,6 +634,94 @@ mod tests { } } + struct WeirdIterator(::IntoIter, SizeHint); + + impl Iterator for WeirdIterator { + type Item = ::Item; + + fn next(&mut self) -> Option { + self.0.next() + } + + fn size_hint(&self) -> (usize, Option) { + let (lower, upper) = self.0.size_hint(); + + match self.1 { + SizeHint::TooHigh => (lower + 10, upper), + SizeHint::TooLow => (lower - 10, upper), + SizeHint::Good => (lower, upper) + } + } + } + + #[derive(Debug, Copy, Clone)] + pub enum SizeHint { + TooHigh, + TooLow, + Good, + } + + struct WeirdIntoIterator(pub T, SizeHint); + + impl IntoIterator for WeirdIntoIterator + where ::IntoIter: Sized + { + type Item = ::Item; + type IntoIter = WeirdIterator; + + fn into_iter(self) -> Self::IntoIter { + WeirdIterator(self.0.into_iter(), self.1) + } + } + + #[test] + fn test_verify_extend() { + const CAP: usize = 16; + + for start in 0..5 { + for size in [SizeHint::TooLow, SizeHint::Good, SizeHint::TooHigh] { + let mut rb = ConstGenericRingBuffer::::new(); + for i in 0..start { + rb.push(i); + } + + rb.extend(WeirdIterator::>(0..CAP, size)); + rb.push(17); + rb.push(18); + rb.push(19); + + for _ in 0..CAP { + let _ = rb.dequeue(); + } + + let mut rb = ConstGenericRingBuffer::::new(); + for i in 0..start { + rb.push(i); + } + + rb.extend(WeirdIterator::>(0..(CAP + 1), size)); + rb.push(18); + rb.push(19); + + for _ in 0..CAP { + let _ = rb.dequeue(); + } + + let mut rb = ConstGenericRingBuffer::::new(); + for i in 0..start { + rb.push(i); + } + + rb.extend(WeirdIterator::>(0..(CAP + 2), size)); + rb.push(19); + + for _ in 0..CAP { + let _ = rb.dequeue(); + } + } + } + } + #[cfg(test)] mod tests { use crate::{AllocRingBuffer, ConstGenericRingBuffer, GrowableAllocRingBuffer, RingBuffer}; @@ -464,14 +768,14 @@ mod tests { ConstGenericRingBuffer::::from( vec![1, 2, 3].into_iter().collect::>() ) - .to_vec(), + .to_vec(), vec![1, 2, 3] ); assert_eq!( ConstGenericRingBuffer::::from( vec![1, 2, 3].into_iter().collect::>() ) - .to_vec(), + .to_vec(), vec![1, 2, 3] ); assert_eq!( From 0a681352f136b7e0851a771ffbd6624108a1c172 Mon Sep 17 00:00:00 2001 From: jdonszelmann Date: Sat, 16 Sep 2023 01:48:24 +0200 Subject: [PATCH 4/8] definitely some UB here oof --- benches/bench.rs | 34 +-- src/ringbuffer_trait.rs | 35 +-- src/with_const_generics.rs | 201 ++++++++++-------- .../test_const_generic_array_zero_length.rs | 9 - ...est_const_generic_array_zero_length_new.rs | 10 - tests/compiletests.rs | 23 -- tests/conversions.rs | 135 ------------ 7 files changed, 157 insertions(+), 290 deletions(-) delete mode 100644 tests/compile-fail/test_const_generic_array_zero_length.rs delete mode 100644 tests/compile-fail/test_const_generic_array_zero_length_new.rs delete mode 100644 tests/compiletests.rs delete mode 100644 tests/conversions.rs diff --git a/benches/bench.rs b/benches/bench.rs index 18f67a8..04dc86d 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -1,6 +1,6 @@ #![cfg(not(tarpaulin_include))] -use criterion::{black_box, criterion_group, criterion_main, Bencher, Criterion, BatchSize}; +use criterion::{black_box, criterion_group, criterion_main, BatchSize, Bencher, Criterion}; use ringbuffer::{AllocRingBuffer, ConstGenericRingBuffer, RingBuffer}; fn benchmark_push, F: Fn() -> T>(b: &mut Bencher, new: F) { @@ -192,36 +192,44 @@ fn extend_many_too_many(b: &mut Bencher) { let rb = ConstGenericRingBuffer::new::<8192>(); let input = (0..16384).collect::>(); - b.iter_batched(&|| rb.clone(), |mut r| { - black_box(r.extend(black_box(input.as_slice()))) - }, BatchSize::SmallInput); + b.iter_batched( + &|| rb.clone(), + |mut r| black_box(r.extend(black_box(input.as_slice()))), + BatchSize::SmallInput, + ); } fn extend_too_many(b: &mut Bencher) { let rb = ConstGenericRingBuffer::new::<8192>(); let input = (0..10000).collect::>(); - b.iter_batched(&|| rb.clone(), |mut r| { - black_box(r.extend(black_box(input.as_slice()))) - }, BatchSize::SmallInput); + b.iter_batched( + &|| rb.clone(), + |mut r| black_box(r.extend(black_box(input.as_slice()))), + BatchSize::SmallInput, + ); } fn extend_exact_cap(b: &mut Bencher) { let rb = ConstGenericRingBuffer::new::<8192>(); let input = (0..8192).collect::>(); - b.iter_batched(&|| rb.clone(), |mut r| { - black_box(r.extend(black_box(input.as_slice()))) - }, BatchSize::SmallInput); + b.iter_batched( + &|| rb.clone(), + |mut r| black_box(r.extend(black_box(input.as_slice()))), + BatchSize::SmallInput, + ); } fn extend_too_few(b: &mut Bencher) { let rb = ConstGenericRingBuffer::new::<8192>(); let input = (0..4096).collect::>(); - b.iter_batched(&|| rb.clone(), |mut r| { - black_box(r.extend(black_box(input.as_slice()))) - }, BatchSize::LargeInput); + b.iter_batched( + &|| rb.clone(), + |mut r| black_box(r.extend(black_box(input.as_slice()))), + BatchSize::LargeInput, + ); } criterion_group!(benches, criterion_benchmark); diff --git a/src/ringbuffer_trait.rs b/src/ringbuffer_trait.rs index d374bac..3ccb424 100644 --- a/src/ringbuffer_trait.rs +++ b/src/ringbuffer_trait.rs @@ -20,10 +20,8 @@ use alloc::vec::Vec; /// implementation, since these safety guarantees are necessary for /// [`iter_mut`](RingBuffer::iter_mut) to work pub unsafe trait RingBuffer: - Sized + - IntoIterator + - Extend + - Index + IndexMut { + Sized + IntoIterator + Extend + Index + IndexMut +{ /// Returns the length of the internal buffer. /// This length grows up to the capacity and then stops growing. /// This is because when the length is reached, new items are appended at the start. @@ -88,7 +86,7 @@ pub unsafe trait RingBuffer: /// alias for [`extend`](RingBuffer::extend). #[inline] - fn enqueue_many>(&mut self, items: I) { + fn enqueue_many>(&mut self, items: I) { self.extend(items); } @@ -97,7 +95,7 @@ pub unsafe trait RingBuffer: /// Iterates over the slice `other`, clones each element, and then appends /// it to this `RingBuffer`. The `other` slice is traversed in-order. /// - /// Depending on the RingBuffer implementation, may be faster than inserting items in a loop + /// Depending on the `RingBuffer` implementation, may be faster than inserting items in a loop /// /// Note that this function is same as [`extend`] except that it is /// specialized to work with slices instead. If and when Rust gets @@ -117,8 +115,11 @@ pub unsafe trait RingBuffer: /// ``` /// /// [`extend`]: RingBuffer::extend - fn extend_from_slice(&mut self, other: &[T]) where T: Clone { - self.extend(other.into_iter().cloned()) + fn extend_from_slice(&mut self, other: &[T]) + where + T: Clone, + { + self.extend(other.iter().cloned()); } /// dequeues the top item off the ringbuffer, and moves this item out. @@ -180,16 +181,16 @@ pub unsafe trait RingBuffer: /// Sets every element in the ringbuffer to it's default value fn fill_default(&mut self) - where - T: Default, + where + T: Default, { self.fill_with(Default::default); } /// Sets every element in the ringbuffer to `value` fn fill(&mut self, value: T) - where - T: Clone, + where + T: Clone, { self.fill_with(|| value.clone()); } @@ -289,16 +290,16 @@ pub unsafe trait RingBuffer: /// Converts the buffer to a vector. This Copies all elements in the ringbuffer. #[cfg(feature = "alloc")] fn to_vec(&self) -> Vec - where - T: Clone, + where + T: Clone, { self.iter().cloned().collect() } /// Returns true if elem is in the ringbuffer. fn contains(&self, elem: &T) -> bool - where - T: PartialEq, + where + T: PartialEq, { self.iter().any(|i| i == elem) } @@ -395,7 +396,7 @@ mod iter { impl<'rb, T: 'rb, RB: RingBuffer + 'rb> ExactSizeIterator for RingBufferMutIterator<'rb, T, RB> {} impl<'rb, T: 'rb, RB: RingBuffer + 'rb> DoubleEndedIterator - for RingBufferMutIterator<'rb, T, RB> + for RingBufferMutIterator<'rb, T, RB> { #[inline] fn next_back(&mut self) -> Option { diff --git a/src/with_const_generics.rs b/src/with_const_generics.rs index ea22308..e82e079 100644 --- a/src/with_const_generics.rs +++ b/src/with_const_generics.rs @@ -93,7 +93,7 @@ impl From> for ConstGeneric #[cfg(feature = "alloc")] impl From> -for ConstGenericRingBuffer + for ConstGenericRingBuffer { fn from(value: alloc::collections::LinkedList) -> Self { value.into_iter().collect() @@ -115,7 +115,7 @@ impl From<&str> for ConstGenericRingBuffer { #[cfg(feature = "alloc")] impl From> -for ConstGenericRingBuffer + for ConstGenericRingBuffer { fn from(mut value: crate::GrowableAllocRingBuffer) -> Self { value.drain().collect() @@ -172,11 +172,11 @@ impl ConstGenericRingBuffer { #[inline] #[must_use] pub const fn new() -> Self - where - ConstGenericRingBuffer: From>, + where + ConstGenericRingBuffer: From>, { #[allow(clippy::let_unit_value)] - let _ = Self::ERROR_CAPACITY_IS_NOT_ALLOWED_TO_BE_ZERO; + let _ = Self::ERROR_CAPACITY_IS_NOT_ALLOWED_TO_BE_ZERO; // allow here since we are constructing an array of MaybeUninit // which explicitly *is* defined behavior @@ -242,15 +242,17 @@ impl<'a, T, const CAP: usize> IntoIterator for &'a mut ConstGenericRingBuffer ConstGenericRingBuffer { - const BATCH_SIZE: usize = 8; - /// splits the ringbuffer into two slices. One from the old pointer to the end of the buffer, /// and one from the start of the buffer to the new pointer /// /// # Safety /// Only safe when old != new #[inline] - unsafe fn split_pointer_move(&mut self, old: usize, new: usize) -> (&mut [MaybeUninit], &mut [MaybeUninit]) { + unsafe fn split_pointer_move( + &mut self, + old: usize, + new: usize, + ) -> (&mut [MaybeUninit], &mut [MaybeUninit]) { let old_mod = crate::mask_modulo(CAP, old); let new_mod = crate::mask_modulo(CAP, new); @@ -269,7 +271,7 @@ impl ConstGenericRingBuffer { } /// # Safety - /// Only safe when CAP >= Self::BATCH_SIZE + /// Only safe when `CAP` >= `BATCH_SIZE` #[inline] unsafe fn extend_from_arr_batch(&mut self, data: [T; BATCH_SIZE]) { debug_assert!(CAP >= BATCH_SIZE); @@ -294,6 +296,8 @@ impl ConstGenericRingBuffer { // self.buf[index] = MaybeUninit::new(value); // self.writeptr += 1; + let old_len = self.len(); + let old_writeptr = self.writeptr; let old_readptr = self.readptr; @@ -302,7 +306,7 @@ impl ConstGenericRingBuffer { // but maybe we also need to update the readptr // first we calculate if we will be full. if not, no need to update the readptr - let num_items_until_full = self.capacity() - self.len(); + let num_items_until_full = self.capacity() - old_len; if num_items_until_full < BATCH_SIZE { // the difference is how much the read ptr needs to move self.readptr += BATCH_SIZE - num_items_until_full; @@ -311,7 +315,7 @@ impl ConstGenericRingBuffer { // if readptr moves, we also need to free some items. // Safety: same safety guarantees as this function and old != new by the assertion above - let (p1, p2) = unsafe{self.split_pointer_move(old_readptr, self.readptr)}; + let (p1, p2) = unsafe { self.split_pointer_move(old_readptr, self.readptr) }; // assertion: we can never be in a situation where we have to drop more than a batch size of items debug_assert!(p1.len() + p2.len() <= BATCH_SIZE); @@ -326,16 +330,22 @@ impl ConstGenericRingBuffer { debug_assert_ne!(old_writeptr, self.writeptr); // now we need to write some items between old_writeptr and self.writeptr // Safety: same safety guarantees as this function and old != new by the assertion above - let (p1, p2) = unsafe{self.split_pointer_move(old_writeptr, self.writeptr)}; + let (p1, p2) = unsafe { self.split_pointer_move(old_writeptr, self.writeptr) }; // assertion: we can never be in a situation where we have to write more than a batch size of items - debug_assert!(p1.len() + p2.len() <= Self::BATCH_SIZE); + debug_assert!( + p1.len() + p2.len() <= BATCH_SIZE, + "p1: {}; p2: {}; batch: {}", + p1.len(), + p2.len(), + BATCH_SIZE + ); // if we are lucky, we're not on the boundary so either p1 or p2 has a length of Self::BATCH_SIZE - if p1.len() == Self::BATCH_SIZE { + if p1.len() == BATCH_SIZE { for (index, i) in data.into_iter().enumerate() { p1[index] = MaybeUninit::new(i); } - } else if p2.len() == Self::BATCH_SIZE { + } else if p2.len() == BATCH_SIZE { for (index, i) in data.into_iter().enumerate() { p2[index] = MaybeUninit::new(i); } @@ -346,28 +356,31 @@ impl ConstGenericRingBuffer { let mut data_iter = data.into_iter(); // put p1.len() in p1 - for index in 0..p1.len() { + for i in p1 { let next_item = data_iter.next(); // Safety: p1.len() + p2.len() <= Self::BATCH_SIZE so the two loops here // together cannot run for more than Self::BATCH_SIZE iterations - p1[index] = MaybeUninit::new(unsafe{next_item.unwrap_unchecked()}); + *i = MaybeUninit::new(unsafe { next_item.unwrap_unchecked() }); } // put p2.len() in p2 - for index in 0..p2.len() { + for i in p2 { let next_item = data_iter.next(); // Safety: p1.len() + p2.len() <= Self::BATCH_SIZE so the two loops here // together cannot run for more than Self::BATCH_SIZE iterations - p2[index] = MaybeUninit::new(unsafe{next_item.unwrap_unchecked()}); + *i = MaybeUninit::new(unsafe { next_item.unwrap_unchecked() }); } } } #[inline] - fn fill_batch(batch: &mut [MaybeUninit; BATCH_SIZE], iter: &mut impl Iterator) -> usize { - for index in 0..BATCH_SIZE { + fn fill_batch( + batch: &mut [MaybeUninit; BATCH_SIZE], + iter: &mut impl Iterator, + ) -> usize { + for (index, b) in batch.iter_mut().enumerate() { if let Some(i) = iter.next() { - batch[index] = MaybeUninit::new(i); + *b = MaybeUninit::new(i); } else { return index; } @@ -377,15 +390,16 @@ impl ConstGenericRingBuffer { } #[inline] - fn extend_batched(&mut self, mut other: impl Iterator) { + fn extend_batched(&mut self, mut other: impl Iterator) { // SAFETY: if CAP < Self::BATCH_SIZE we can't run extend_from_arr_batch so we catch that here if CAP < BATCH_SIZE { for i in other { - self.push(i) + self.push(i); } } else { // Safety: assume init to MaybeUninit slice is safe - let mut batch: [MaybeUninit; BATCH_SIZE] = unsafe{MaybeUninit::uninit().assume_init()}; + let mut batch: [MaybeUninit; BATCH_SIZE] = + unsafe { MaybeUninit::uninit().assume_init() }; // repeat until we find an empty batch loop { @@ -394,10 +408,10 @@ impl ConstGenericRingBuffer { // if the batch isn't complete, individually add the items from that batch if how_full < BATCH_SIZE { - for i in 0..how_full { + for b in batch.iter().take(how_full) { // Safety: fill_batch filled up at least `how_full` items so if we iterate // until there this is safe - self.push(unsafe { batch[i].assume_init_read() }) + self.push(unsafe { b.assume_init_read() }); } // then we're done! @@ -405,12 +419,12 @@ impl ConstGenericRingBuffer { } // else the batch is full, and we can transmute it to an init slice - let batch = unsafe { mem::transmute_copy::<[MaybeUninit; BATCH_SIZE], [T; BATCH_SIZE]>(&batch) }; + let batch = unsafe { + mem::transmute_copy::<[MaybeUninit; BATCH_SIZE], [T; BATCH_SIZE]>(&batch) + }; // SAFETY: if CAP < Self::BATCH_SIZE we woudn't be here - unsafe { - self.extend_from_arr_batch(batch) - } + unsafe { self.extend_from_arr_batch(batch) } } } } @@ -419,13 +433,14 @@ impl ConstGenericRingBuffer { impl Extend for ConstGenericRingBuffer { /// NOTE: correctness (but not soundness) of extend depends on `size_hint` on iter being correct. #[inline] - fn extend>(&mut self, iter: A) { - const BATCH_SIZE: usize = 32; + fn extend>(&mut self, iter: A) { + const BATCH_SIZE: usize = 128; let mut iter = iter.into_iter(); let (lower, _) = iter.size_hint(); + // WARNING: ONLY USE WHEN WORKING ON A CLEARED RINGBUFFER macro_rules! finish_iter { ($iter: ident) => { let mut index = 0; @@ -438,8 +453,10 @@ impl Extend for ConstGenericRingBuffer { } } - if index <= CAP-1 { - self.writeptr = index + 1; + if index < CAP { + // we set writepointer to however many elements we managed to write (up to CAP-1) + // WARNING: ONLY WORKS WHEN WORKING ON A CLEARED RINGBUFFER + self.writeptr = index; } else { self.writeptr = CAP; self.extend_batched::($iter); @@ -457,8 +474,13 @@ impl Extend for ConstGenericRingBuffer { let num_we_can_drop = lower - CAP; let mut iter = iter.skip(num_we_can_drop); + + // WARNING: ONLY WORKS WHEN WORKING ON A CLEARED RINGBUFFER finish_iter!(iter); } else if self.is_empty() { + self.clear(); + + // WARNING: ONLY WORKS WHEN WORKING ON A CLEARED RINGBUFFER finish_iter!(iter); } else { self.extend_batched::(iter); @@ -543,7 +565,7 @@ impl Default for ConstGenericRingBuffer { } impl FromIterator for ConstGenericRingBuffer { - fn from_iter>(iter: T) -> Self { + fn from_iter>(iter: T) -> Self { let mut res = Self::default(); for i in iter { res.push(i); @@ -569,8 +591,8 @@ impl IndexMut for ConstGenericRingBuffer { #[cfg(test)] mod tests { - use core::ops::Range; use super::*; + use core::ops::Range; #[test] fn test_not_power_of_two() { @@ -649,7 +671,7 @@ mod tests { match self.1 { SizeHint::TooHigh => (lower + 10, upper), SizeHint::TooLow => (lower - 10, upper), - SizeHint::Good => (lower, upper) + SizeHint::Good => (lower, upper), } } } @@ -664,7 +686,8 @@ mod tests { struct WeirdIntoIterator(pub T, SizeHint); impl IntoIterator for WeirdIntoIterator - where ::IntoIter: Sized + where + ::IntoIter: Sized, { type Item = ::Item; type IntoIter = WeirdIterator; @@ -676,50 +699,62 @@ mod tests { #[test] fn test_verify_extend() { - const CAP: usize = 16; - - for start in 0..5 { - for size in [SizeHint::TooLow, SizeHint::Good, SizeHint::TooHigh] { - let mut rb = ConstGenericRingBuffer::::new(); - for i in 0..start { - rb.push(i); - } - - rb.extend(WeirdIterator::>(0..CAP, size)); - rb.push(17); - rb.push(18); - rb.push(19); - - for _ in 0..CAP { - let _ = rb.dequeue(); - } - - let mut rb = ConstGenericRingBuffer::::new(); - for i in 0..start { - rb.push(i); - } - - rb.extend(WeirdIterator::>(0..(CAP + 1), size)); - rb.push(18); - rb.push(19); - - for _ in 0..CAP { - let _ = rb.dequeue(); - } - - let mut rb = ConstGenericRingBuffer::::new(); - for i in 0..start { - rb.push(i); - } - - rb.extend(WeirdIterator::>(0..(CAP + 2), size)); - rb.push(19); - - for _ in 0..CAP { - let _ = rb.dequeue(); + extern crate std; + + macro_rules! for_cap { + ($cap: expr) => {{ + const CAP: usize = $cap; + + for start in 0..5 { + for size in [SizeHint::TooLow, SizeHint::Good, SizeHint::TooHigh] { + std::println!("{start} {size:?}"); + + let mut rb = ConstGenericRingBuffer::::new(); + for i in 0..start { + rb.push(i); + } + + rb.extend(WeirdIterator::>(0..CAP, size)); + rb.push(17); + rb.push(18); + rb.push(19); + + for _ in 0..CAP { + let _ = rb.dequeue(); + } + + let mut rb = ConstGenericRingBuffer::::new(); + for i in 0..start { + rb.push(i); + } + + rb.extend(WeirdIterator::>(0..(CAP + 1), size)); + rb.push(18); + rb.push(19); + + for _ in 0..CAP { + let _ = rb.dequeue(); + } + + let mut rb = ConstGenericRingBuffer::::new(); + for i in 0..start { + rb.push(i); + } + + rb.extend(WeirdIterator::>(0..(CAP + 2), size)); + rb.push(19); + + for _ in 0..CAP { + let _ = rb.dequeue(); + } + } } - } + };}; } + + for_cap!(17); + for_cap!(70); + for_cap!(128); } #[cfg(test)] @@ -768,14 +803,14 @@ mod tests { ConstGenericRingBuffer::::from( vec![1, 2, 3].into_iter().collect::>() ) - .to_vec(), + .to_vec(), vec![1, 2, 3] ); assert_eq!( ConstGenericRingBuffer::::from( vec![1, 2, 3].into_iter().collect::>() ) - .to_vec(), + .to_vec(), vec![1, 2, 3] ); assert_eq!( diff --git a/tests/compile-fail/test_const_generic_array_zero_length.rs b/tests/compile-fail/test_const_generic_array_zero_length.rs deleted file mode 100644 index 3b69f1a..0000000 --- a/tests/compile-fail/test_const_generic_array_zero_length.rs +++ /dev/null @@ -1,9 +0,0 @@ -extern crate ringbuffer; - -use ringbuffer::ConstGenericRingBuffer; - -fn main() { - let _ = ConstGenericRingBuffer::::new(); - //~^ note: the above error was encountered while instantiating `fn ringbuffer::ConstGenericRingBuffer::::new::<0>` - // ringbuffer can't be zero length -} diff --git a/tests/compile-fail/test_const_generic_array_zero_length_new.rs b/tests/compile-fail/test_const_generic_array_zero_length_new.rs deleted file mode 100644 index b080de1..0000000 --- a/tests/compile-fail/test_const_generic_array_zero_length_new.rs +++ /dev/null @@ -1,10 +0,0 @@ -extern crate ringbuffer; - -use ringbuffer::{ConstGenericRingBuffer, RingBuffer}; - -fn main() { - let mut buf = ConstGenericRingBuffer::new::<0>(); - //~^ note: the above error was encountered while instantiating `fn ringbuffer::ConstGenericRingBuffer::::new::<0>` - // ringbuffer can't be zero length - buf.push(5); -} diff --git a/tests/compiletests.rs b/tests/compiletests.rs deleted file mode 100644 index f48163e..0000000 --- a/tests/compiletests.rs +++ /dev/null @@ -1,23 +0,0 @@ -extern crate compiletest_rs as compiletest; - -use std::path::PathBuf; - -#[cfg(test)] -mod conversions; - -fn run_mode(mode: &'static str) { - let mut config = compiletest::Config::default(); - - config.mode = mode.parse().expect("Invalid mode"); - config.src_base = PathBuf::from(format!("tests/{}", mode)); - config.link_deps(); // Populate config.target_rustcflags with dependencies on the path - config.clean_rmeta(); // If your tests import the parent crate, this helps with E0464 - - compiletest::run_tests(&config); -} - -#[test] -#[cfg_attr(miri, ignore)] -fn compile_test() { - run_mode("compile-fail"); -} diff --git a/tests/conversions.rs b/tests/conversions.rs deleted file mode 100644 index e3c13a2..0000000 --- a/tests/conversions.rs +++ /dev/null @@ -1,135 +0,0 @@ -extern crate alloc; - -use alloc::collections::{LinkedList, VecDeque}; -use alloc::string::ToString; -use core::ops::Deref; -use ringbuffer::RingBuffer; -use ringbuffer::{AllocRingBuffer, ConstGenericRingBuffer, GrowableAllocRingBuffer}; -use std::vec; - -macro_rules! convert_test { - ($name: ident: $from: expr => $to: ty) => { - #[test] - fn $name() { - let a = $from; - - let mut b: $to = a.into(); - assert_eq!(b.to_vec(), vec!['1', '2']); - b.push('3'); - assert_eq!(b, b); - } - }; -} - -macro_rules! convert_tests { - ( - [$($name: ident: $from: expr),* $(,)?] - => $to: ty - ) => { - $( - convert_test!($name: $from => $to); - )* - }; -} - -convert_tests!( - [ - alloc_from_vec: vec!['1', '2'], - alloc_from_ll: {let mut l = LinkedList::new(); l.push_back('1'); l.push_back('2'); l}, - alloc_from_vd: {let mut l = VecDeque::new(); l.push_back('1'); l.push_back('2'); l}, - alloc_from_str: "12".to_string(), - alloc_from_str_slice: "12", - alloc_from_slice: {let a: &[char] = &['1', '2']; a}, - alloc_from_const_slice: {let a: &[char; 2] = &['1', '2']; a}, - alloc_from_arr: {let a: [char; 2] = ['1', '2']; a}, - - alloc_from_cgrb: {let a = ConstGenericRingBuffer::from(['1', '2']); a}, - alloc_from_garb: {let a = GrowableAllocRingBuffer::from(['1', '2']); a}, - ] => AllocRingBuffer::<_> -); - -convert_tests!( - [ - growable_alloc_from_vec: vec!['1', '2'], - growable_alloc_from_ll: {let mut l = LinkedList::new(); l.push_back('1'); l.push_back('2'); l}, - growable_alloc_from_vd: {let mut l = VecDeque::new(); l.push_back('1'); l.push_back('2'); l}, - growable_alloc_from_str: "12".to_string(), - growable_alloc_from_str_slice: "12", - growable_alloc_from_slice: {let a: &[char] = &['1', '2']; a}, - growable_alloc_from_const_slice: {let a: &[char; 2] = &['1', '2']; a}, - growable_alloc_from_arr: {let a: [char; 2] = ['1', '2']; a}, - - growable_alloc_from_cgrb: {let a = ConstGenericRingBuffer::from(['1', '2']); a}, - growable_alloc_from_arb: {let a = AllocRingBuffer::from(['1', '2']); a}, - ] => GrowableAllocRingBuffer::<_> -); - -convert_tests!( - [ - const_from_vec: vec!['1', '2'], - const_from_ll: {let mut l = LinkedList::new(); l.push_back('1'); l.push_back('2'); l}, - const_from_vd: {let mut l = VecDeque::new(); l.push_back('1'); l.push_back('2'); l}, - const_from_str: "12".to_string(), - const_from_str_slice: "12", - const_from_slice: {let a: &[char] = &['1', '2']; a}, - const_from_const_slice: {let a: &[char; 2] = &['1', '2']; a}, - const_from_arr: {let a: [char; 2] = ['1', '2']; a}, - - const_from_garb: {let a = GrowableAllocRingBuffer::from(['1', '2']); a}, - const_from_arb: {let a = AllocRingBuffer::from(['1', '2']); a}, - ] => ConstGenericRingBuffer::<_, 2> -); - -#[test] -fn test_extra_conversions_growable() { - let a: &mut [i32; 2] = &mut [1, 2]; - let a = GrowableAllocRingBuffer::from(a); - assert_eq!(a.to_vec(), vec![1, 2]); - - let a: &mut [i32] = &mut [1, 2]; - let a = GrowableAllocRingBuffer::from(a); - assert_eq!(a.to_vec(), vec![1, 2]); - - let mut b = VecDeque::::new(); - b.push_back(1); - b.push_back(2); - assert_eq!(a.deref(), &b); - assert_eq!(a.as_ref(), &b); -} - -#[test] -fn test_extra_conversions_alloc() { - let a: &mut [i32; 2] = &mut [1, 2]; - let a = AllocRingBuffer::from(a); - assert_eq!(a.to_vec(), vec![1, 2]); - - let a: &mut [i32] = &mut [1, 2]; - let a = AllocRingBuffer::from(a); - assert_eq!(a.to_vec(), vec![1, 2]); -} - -#[test] -fn test_extra_conversions_const() { - let a: &mut [i32; 2] = &mut [1, 2]; - let a = ConstGenericRingBuffer::<_, 2>::from(a); - assert_eq!(a.to_vec(), vec![1, 2]); - - let a: &mut [i32] = &mut [1, 2]; - let a = ConstGenericRingBuffer::<_, 2>::from(a); - assert_eq!(a.to_vec(), vec![1, 2]); -} - -#[test] -fn test_const_generic_new_parameter() { - // Can we specify size only on the method? - let mut a = ConstGenericRingBuffer::new::<2>(); - a.push(5); - - // Can we specify size in both positions? - let mut a = ConstGenericRingBuffer::::new::<50>(); - a.push(5); - - // Can we specify size only on the struct? - let mut a = ConstGenericRingBuffer::::new(); - a.push(5); -} From 14824ab37b1a86a32b00d6f42415ea9e95c0b740 Mon Sep 17 00:00:00 2001 From: jdonszelmann Date: Sat, 16 Sep 2023 11:09:23 +0200 Subject: [PATCH 5/8] more tests, miri doesn't mind --- src/lib.rs | 4 +- src/with_const_generics.rs | 96 ++++++++++++++++++++++---------------- 2 files changed, 59 insertions(+), 41 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index fa3dd17..9f0f09d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1356,8 +1356,8 @@ mod tests { } test_fill(AllocRingBuffer::new(4)); - test_fill(GrowableAllocRingBuffer::with_capacity(4)); - test_fill(ConstGenericRingBuffer::::new()); + // test_fill(GrowableAllocRingBuffer::with_capacity(4)); + // test_fill(ConstGenericRingBuffer::::new()); } mod test_dropping { diff --git a/src/with_const_generics.rs b/src/with_const_generics.rs index e82e079..1690da6 100644 --- a/src/with_const_generics.rs +++ b/src/with_const_generics.rs @@ -428,6 +428,29 @@ impl ConstGenericRingBuffer { } } } + + /// # Safety + /// ONLY USE WHEN WORKING ON A CLEARED RINGBUFFER + unsafe fn finish_iter(&mut self, mut iter: impl Iterator) { + let mut index = 0; + for i in iter.by_ref() { + self.buf[index] = MaybeUninit::new(i); + index += 1; + + if index > CAP - 1 { + break; + } + } + + if index < CAP { + // we set writepointer to however many elements we managed to write (up to CAP-1) + // WARNING: ONLY WORKS WHEN WORKING ON A CLEARED RINGBUFFER + self.writeptr = index; + } else { + self.writeptr = CAP; + self.extend_batched::(iter); + } + } } impl Extend for ConstGenericRingBuffer { @@ -436,34 +459,10 @@ impl Extend for ConstGenericRingBuffer { fn extend>(&mut self, iter: A) { const BATCH_SIZE: usize = 128; - let mut iter = iter.into_iter(); + let iter = iter.into_iter(); let (lower, _) = iter.size_hint(); - // WARNING: ONLY USE WHEN WORKING ON A CLEARED RINGBUFFER - macro_rules! finish_iter { - ($iter: ident) => { - let mut index = 0; - while let Some(i) = $iter.next() { - self.buf[index] = MaybeUninit::new(i); - index += 1; - - if index > CAP - 1 { - break; - } - } - - if index < CAP { - // we set writepointer to however many elements we managed to write (up to CAP-1) - // WARNING: ONLY WORKS WHEN WORKING ON A CLEARED RINGBUFFER - self.writeptr = index; - } else { - self.writeptr = CAP; - self.extend_batched::($iter); - } - }; - } - if lower >= CAP { // if there are more elements in our iterator than we have size in the ringbuffer // drain the ringbuffer @@ -473,15 +472,15 @@ impl Extend for ConstGenericRingBuffer { // so we need to drop until the number of elements in the iterator is exactly CAP let num_we_can_drop = lower - CAP; - let mut iter = iter.skip(num_we_can_drop); + let iter = iter.skip(num_we_can_drop); - // WARNING: ONLY WORKS WHEN WORKING ON A CLEARED RINGBUFFER - finish_iter!(iter); + // Safety: clear above + unsafe { self.finish_iter::(iter) }; } else if self.is_empty() { self.clear(); - // WARNING: ONLY WORKS WHEN WORKING ON A CLEARED RINGBUFFER - finish_iter!(iter); + // Safety: clear above + unsafe { self.finish_iter::(iter) }; } else { self.extend_batched::(iter); } @@ -592,6 +591,7 @@ impl IndexMut for ConstGenericRingBuffer { #[cfg(test)] mod tests { use super::*; + use core::hint::black_box; use core::ops::Range; #[test] @@ -656,9 +656,9 @@ mod tests { } } - struct WeirdIterator(::IntoIter, SizeHint); + struct Weirderator(::IntoIter, SizeHint); - impl Iterator for WeirdIterator { + impl Iterator for Weirderator { type Item = ::Item; fn next(&mut self) -> Option { @@ -683,17 +683,35 @@ mod tests { Good, } - struct WeirdIntoIterator(pub T, SizeHint); + struct IntoWeirderator(pub T, SizeHint); - impl IntoIterator for WeirdIntoIterator + impl IntoIterator for IntoWeirderator where ::IntoIter: Sized, { type Item = ::Item; - type IntoIter = WeirdIterator; + type IntoIter = Weirderator; fn into_iter(self) -> Self::IntoIter { - WeirdIterator(self.0.into_iter(), self.1) + Weirderator(self.0.into_iter(), self.1) + } + } + + #[test] + // tests whether we correctly drop items when the batch crosses the boundary + fn boundary_drop_extend() { + for n in 50..300 { + let mut a = ConstGenericRingBuffer::<_, 128>::new(); + + for i in 0..n { + a.push(i); + } + + a.extend(0..n); + + for _ in 0..128 { + let _ = black_box(a.dequeue()); + } } } @@ -714,7 +732,7 @@ mod tests { rb.push(i); } - rb.extend(WeirdIterator::>(0..CAP, size)); + rb.extend(Weirderator::>(0..CAP, size)); rb.push(17); rb.push(18); rb.push(19); @@ -728,7 +746,7 @@ mod tests { rb.push(i); } - rb.extend(WeirdIterator::>(0..(CAP + 1), size)); + rb.extend(Weirderator::>(0..(CAP + 1), size)); rb.push(18); rb.push(19); @@ -741,7 +759,7 @@ mod tests { rb.push(i); } - rb.extend(WeirdIterator::>(0..(CAP + 2), size)); + rb.extend(Weirderator::>(0..(CAP + 2), size)); rb.push(19); for _ in 0..CAP { From 90675b6897e9dd80c16e3343294a72f38d7e044d Mon Sep 17 00:00:00 2001 From: jdonszelmann Date: Sat, 16 Sep 2023 12:46:25 +0200 Subject: [PATCH 6/8] bench after one --- benches/bench.rs | 13 +++++++++++++ src/with_const_generics.rs | 4 +++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/benches/bench.rs b/benches/bench.rs index 04dc86d..784283a 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -186,6 +186,7 @@ fn criterion_benchmark(c: &mut Criterion) { c.bench_function("extend many too many", extend_many_too_many); c.bench_function("extend exact cap", extend_exact_cap); c.bench_function("extend too few", extend_too_few); + c.bench_function("extend after one", extend_after_one); } fn extend_many_too_many(b: &mut Bencher) { @@ -232,5 +233,17 @@ fn extend_too_few(b: &mut Bencher) { ); } +fn extend_after_one(b: &mut Bencher) { + let mut rb = ConstGenericRingBuffer::new::<8192>(); + rb.push(0); + let input = (0..4096).collect::>(); + + b.iter_batched( + &|| rb.clone(), + |mut r| black_box(r.extend(black_box(input.as_slice()))), + BatchSize::LargeInput, + ); +} + criterion_group!(benches, criterion_benchmark); criterion_main!(benches); diff --git a/src/with_const_generics.rs b/src/with_const_generics.rs index 1690da6..aadbb38 100644 --- a/src/with_const_generics.rs +++ b/src/with_const_generics.rs @@ -431,6 +431,7 @@ impl ConstGenericRingBuffer { /// # Safety /// ONLY USE WHEN WORKING ON A CLEARED RINGBUFFER + #[inline] unsafe fn finish_iter(&mut self, mut iter: impl Iterator) { let mut index = 0; for i in iter.by_ref() { @@ -457,7 +458,8 @@ impl Extend for ConstGenericRingBuffer { /// NOTE: correctness (but not soundness) of extend depends on `size_hint` on iter being correct. #[inline] fn extend>(&mut self, iter: A) { - const BATCH_SIZE: usize = 128; + const BATCH_SIZE: usize = 1; + // const BATCH_SIZE: usize = 1024; let iter = iter.into_iter(); From cf0437d4ff2d4cab26ddd0f2393ba61073c9c88e Mon Sep 17 00:00:00 2001 From: jdonszelmann Date: Sat, 16 Sep 2023 13:39:02 +0200 Subject: [PATCH 7/8] batched extend implemented --- Cargo.toml | 4 +- benches/bench.rs | 2 +- src/lib.rs | 4 +- src/with_const_generics.rs | 54 ++++--- .../test_const_generic_array_zero_length.rs | 9 ++ ...est_const_generic_array_zero_length_new.rs | 10 ++ tests/compiletests.rs | 23 +++ tests/conversions.rs | 135 ++++++++++++++++++ 8 files changed, 217 insertions(+), 24 deletions(-) create mode 100644 tests/compile-fail/test_const_generic_array_zero_length.rs create mode 100644 tests/compile-fail/test_const_generic_array_zero_length_new.rs create mode 100644 tests/compiletests.rs create mode 100644 tests/conversions.rs diff --git a/Cargo.toml b/Cargo.toml index c883c99..dfe9460 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,10 +18,12 @@ criterion = "0.4.0" compiletest_rs = "0.10.0" [features] -default = ["alloc"] +default = ["alloc", "batched_extend"] # disable the alloc based ringbuffer, to make RingBuffers work in no_alloc environments alloc = [] +batched_extend = [] + [[bench]] name = "bench" harness = false diff --git a/benches/bench.rs b/benches/bench.rs index 784283a..3289d0e 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -235,7 +235,7 @@ fn extend_too_few(b: &mut Bencher) { fn extend_after_one(b: &mut Bencher) { let mut rb = ConstGenericRingBuffer::new::<8192>(); - rb.push(0); + rb.push(&0); let input = (0..4096).collect::>(); b.iter_batched( diff --git a/src/lib.rs b/src/lib.rs index 9f0f09d..fa3dd17 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1356,8 +1356,8 @@ mod tests { } test_fill(AllocRingBuffer::new(4)); - // test_fill(GrowableAllocRingBuffer::with_capacity(4)); - // test_fill(ConstGenericRingBuffer::::new()); + test_fill(GrowableAllocRingBuffer::with_capacity(4)); + test_fill(ConstGenericRingBuffer::::new()); } mod test_dropping { diff --git a/src/with_const_generics.rs b/src/with_const_generics.rs index aadbb38..c4d9ad8 100644 --- a/src/with_const_generics.rs +++ b/src/with_const_generics.rs @@ -248,6 +248,7 @@ impl ConstGenericRingBuffer { /// # Safety /// Only safe when old != new #[inline] + #[cfg(feature = "batched_extend")] unsafe fn split_pointer_move( &mut self, old: usize, @@ -273,6 +274,7 @@ impl ConstGenericRingBuffer { /// # Safety /// Only safe when `CAP` >= `BATCH_SIZE` #[inline] + #[cfg(feature = "batched_extend")] unsafe fn extend_from_arr_batch(&mut self, data: [T; BATCH_SIZE]) { debug_assert!(CAP >= BATCH_SIZE); @@ -374,6 +376,7 @@ impl ConstGenericRingBuffer { } #[inline] + #[cfg(feature = "batched_extend")] fn fill_batch( batch: &mut [MaybeUninit; BATCH_SIZE], iter: &mut impl Iterator, @@ -390,6 +393,7 @@ impl ConstGenericRingBuffer { } #[inline] + #[cfg(feature = "batched_extend")] fn extend_batched(&mut self, mut other: impl Iterator) { // SAFETY: if CAP < Self::BATCH_SIZE we can't run extend_from_arr_batch so we catch that here if CAP < BATCH_SIZE { @@ -431,6 +435,7 @@ impl ConstGenericRingBuffer { /// # Safety /// ONLY USE WHEN WORKING ON A CLEARED RINGBUFFER + #[cfg(feature = "batched_extend")] #[inline] unsafe fn finish_iter(&mut self, mut iter: impl Iterator) { let mut index = 0; @@ -458,33 +463,42 @@ impl Extend for ConstGenericRingBuffer { /// NOTE: correctness (but not soundness) of extend depends on `size_hint` on iter being correct. #[inline] fn extend>(&mut self, iter: A) { - const BATCH_SIZE: usize = 1; - // const BATCH_SIZE: usize = 1024; + #[cfg(not(feature = "batched_extend"))] + { + for i in iter { + self.push(i); + } + } - let iter = iter.into_iter(); + #[cfg(feature = "batched_extend")] + { + const BATCH_SIZE: usize = 30; - let (lower, _) = iter.size_hint(); + let iter = iter.into_iter(); - if lower >= CAP { - // if there are more elements in our iterator than we have size in the ringbuffer - // drain the ringbuffer - self.clear(); + let (lower, _) = iter.size_hint(); - // we need exactly CAP elements. - // so we need to drop until the number of elements in the iterator is exactly CAP - let num_we_can_drop = lower - CAP; + if lower >= CAP { + // if there are more elements in our iterator than we have size in the ringbuffer + // drain the ringbuffer + self.clear(); - let iter = iter.skip(num_we_can_drop); + // we need exactly CAP elements. + // so we need to drop until the number of elements in the iterator is exactly CAP + let num_we_can_drop = lower - CAP; - // Safety: clear above - unsafe { self.finish_iter::(iter) }; - } else if self.is_empty() { - self.clear(); + let iter = iter.skip(num_we_can_drop); - // Safety: clear above - unsafe { self.finish_iter::(iter) }; - } else { - self.extend_batched::(iter); + // Safety: clear above + unsafe { self.finish_iter::(iter) }; + } else if self.is_empty() { + self.clear(); + + // Safety: clear above + unsafe { self.finish_iter::(iter) }; + } else { + self.extend_batched::(iter); + } } } } diff --git a/tests/compile-fail/test_const_generic_array_zero_length.rs b/tests/compile-fail/test_const_generic_array_zero_length.rs new file mode 100644 index 0000000..3b69f1a --- /dev/null +++ b/tests/compile-fail/test_const_generic_array_zero_length.rs @@ -0,0 +1,9 @@ +extern crate ringbuffer; + +use ringbuffer::ConstGenericRingBuffer; + +fn main() { + let _ = ConstGenericRingBuffer::::new(); + //~^ note: the above error was encountered while instantiating `fn ringbuffer::ConstGenericRingBuffer::::new::<0>` + // ringbuffer can't be zero length +} diff --git a/tests/compile-fail/test_const_generic_array_zero_length_new.rs b/tests/compile-fail/test_const_generic_array_zero_length_new.rs new file mode 100644 index 0000000..b080de1 --- /dev/null +++ b/tests/compile-fail/test_const_generic_array_zero_length_new.rs @@ -0,0 +1,10 @@ +extern crate ringbuffer; + +use ringbuffer::{ConstGenericRingBuffer, RingBuffer}; + +fn main() { + let mut buf = ConstGenericRingBuffer::new::<0>(); + //~^ note: the above error was encountered while instantiating `fn ringbuffer::ConstGenericRingBuffer::::new::<0>` + // ringbuffer can't be zero length + buf.push(5); +} diff --git a/tests/compiletests.rs b/tests/compiletests.rs new file mode 100644 index 0000000..f48163e --- /dev/null +++ b/tests/compiletests.rs @@ -0,0 +1,23 @@ +extern crate compiletest_rs as compiletest; + +use std::path::PathBuf; + +#[cfg(test)] +mod conversions; + +fn run_mode(mode: &'static str) { + let mut config = compiletest::Config::default(); + + config.mode = mode.parse().expect("Invalid mode"); + config.src_base = PathBuf::from(format!("tests/{}", mode)); + config.link_deps(); // Populate config.target_rustcflags with dependencies on the path + config.clean_rmeta(); // If your tests import the parent crate, this helps with E0464 + + compiletest::run_tests(&config); +} + +#[test] +#[cfg_attr(miri, ignore)] +fn compile_test() { + run_mode("compile-fail"); +} diff --git a/tests/conversions.rs b/tests/conversions.rs new file mode 100644 index 0000000..e3c13a2 --- /dev/null +++ b/tests/conversions.rs @@ -0,0 +1,135 @@ +extern crate alloc; + +use alloc::collections::{LinkedList, VecDeque}; +use alloc::string::ToString; +use core::ops::Deref; +use ringbuffer::RingBuffer; +use ringbuffer::{AllocRingBuffer, ConstGenericRingBuffer, GrowableAllocRingBuffer}; +use std::vec; + +macro_rules! convert_test { + ($name: ident: $from: expr => $to: ty) => { + #[test] + fn $name() { + let a = $from; + + let mut b: $to = a.into(); + assert_eq!(b.to_vec(), vec!['1', '2']); + b.push('3'); + assert_eq!(b, b); + } + }; +} + +macro_rules! convert_tests { + ( + [$($name: ident: $from: expr),* $(,)?] + => $to: ty + ) => { + $( + convert_test!($name: $from => $to); + )* + }; +} + +convert_tests!( + [ + alloc_from_vec: vec!['1', '2'], + alloc_from_ll: {let mut l = LinkedList::new(); l.push_back('1'); l.push_back('2'); l}, + alloc_from_vd: {let mut l = VecDeque::new(); l.push_back('1'); l.push_back('2'); l}, + alloc_from_str: "12".to_string(), + alloc_from_str_slice: "12", + alloc_from_slice: {let a: &[char] = &['1', '2']; a}, + alloc_from_const_slice: {let a: &[char; 2] = &['1', '2']; a}, + alloc_from_arr: {let a: [char; 2] = ['1', '2']; a}, + + alloc_from_cgrb: {let a = ConstGenericRingBuffer::from(['1', '2']); a}, + alloc_from_garb: {let a = GrowableAllocRingBuffer::from(['1', '2']); a}, + ] => AllocRingBuffer::<_> +); + +convert_tests!( + [ + growable_alloc_from_vec: vec!['1', '2'], + growable_alloc_from_ll: {let mut l = LinkedList::new(); l.push_back('1'); l.push_back('2'); l}, + growable_alloc_from_vd: {let mut l = VecDeque::new(); l.push_back('1'); l.push_back('2'); l}, + growable_alloc_from_str: "12".to_string(), + growable_alloc_from_str_slice: "12", + growable_alloc_from_slice: {let a: &[char] = &['1', '2']; a}, + growable_alloc_from_const_slice: {let a: &[char; 2] = &['1', '2']; a}, + growable_alloc_from_arr: {let a: [char; 2] = ['1', '2']; a}, + + growable_alloc_from_cgrb: {let a = ConstGenericRingBuffer::from(['1', '2']); a}, + growable_alloc_from_arb: {let a = AllocRingBuffer::from(['1', '2']); a}, + ] => GrowableAllocRingBuffer::<_> +); + +convert_tests!( + [ + const_from_vec: vec!['1', '2'], + const_from_ll: {let mut l = LinkedList::new(); l.push_back('1'); l.push_back('2'); l}, + const_from_vd: {let mut l = VecDeque::new(); l.push_back('1'); l.push_back('2'); l}, + const_from_str: "12".to_string(), + const_from_str_slice: "12", + const_from_slice: {let a: &[char] = &['1', '2']; a}, + const_from_const_slice: {let a: &[char; 2] = &['1', '2']; a}, + const_from_arr: {let a: [char; 2] = ['1', '2']; a}, + + const_from_garb: {let a = GrowableAllocRingBuffer::from(['1', '2']); a}, + const_from_arb: {let a = AllocRingBuffer::from(['1', '2']); a}, + ] => ConstGenericRingBuffer::<_, 2> +); + +#[test] +fn test_extra_conversions_growable() { + let a: &mut [i32; 2] = &mut [1, 2]; + let a = GrowableAllocRingBuffer::from(a); + assert_eq!(a.to_vec(), vec![1, 2]); + + let a: &mut [i32] = &mut [1, 2]; + let a = GrowableAllocRingBuffer::from(a); + assert_eq!(a.to_vec(), vec![1, 2]); + + let mut b = VecDeque::::new(); + b.push_back(1); + b.push_back(2); + assert_eq!(a.deref(), &b); + assert_eq!(a.as_ref(), &b); +} + +#[test] +fn test_extra_conversions_alloc() { + let a: &mut [i32; 2] = &mut [1, 2]; + let a = AllocRingBuffer::from(a); + assert_eq!(a.to_vec(), vec![1, 2]); + + let a: &mut [i32] = &mut [1, 2]; + let a = AllocRingBuffer::from(a); + assert_eq!(a.to_vec(), vec![1, 2]); +} + +#[test] +fn test_extra_conversions_const() { + let a: &mut [i32; 2] = &mut [1, 2]; + let a = ConstGenericRingBuffer::<_, 2>::from(a); + assert_eq!(a.to_vec(), vec![1, 2]); + + let a: &mut [i32] = &mut [1, 2]; + let a = ConstGenericRingBuffer::<_, 2>::from(a); + assert_eq!(a.to_vec(), vec![1, 2]); +} + +#[test] +fn test_const_generic_new_parameter() { + // Can we specify size only on the method? + let mut a = ConstGenericRingBuffer::new::<2>(); + a.push(5); + + // Can we specify size in both positions? + let mut a = ConstGenericRingBuffer::::new::<50>(); + a.push(5); + + // Can we specify size only on the struct? + let mut a = ConstGenericRingBuffer::::new(); + a.push(5); +} From fbb1f31373bb1526554bd141d004c0dd9290ae15 Mon Sep 17 00:00:00 2001 From: jdonszelmann Date: Sat, 16 Sep 2023 13:49:40 +0200 Subject: [PATCH 8/8] fix docs --- src/ringbuffer_trait.rs | 9 +++------ src/with_const_generics.rs | 35 +++++++++++++++++++++++++---------- 2 files changed, 28 insertions(+), 16 deletions(-) diff --git a/src/ringbuffer_trait.rs b/src/ringbuffer_trait.rs index 3ccb424..854be52 100644 --- a/src/ringbuffer_trait.rs +++ b/src/ringbuffer_trait.rs @@ -95,12 +95,9 @@ pub unsafe trait RingBuffer: /// Iterates over the slice `other`, clones each element, and then appends /// it to this `RingBuffer`. The `other` slice is traversed in-order. /// - /// Depending on the `RingBuffer` implementation, may be faster than inserting items in a loop - /// - /// Note that this function is same as [`extend`] except that it is - /// specialized to work with slices instead. If and when Rust gets - /// specialization this function will likely be deprecated (but still - /// available). + /// Depending on the `RingBuffer` implementation, may be faster than inserting items in a loop. + /// `ConstGenericRingBuffer` is especially optimised in this regard. + /// See also: [`ConstGenericRingBuffer::custom_extend_batched`](crate::with_const_generics::ConstGenericRingBuffer::custom_extend_batched) /// /// # Examples /// diff --git a/src/with_const_generics.rs b/src/with_const_generics.rs index c4d9ad8..b65860a 100644 --- a/src/with_const_generics.rs +++ b/src/with_const_generics.rs @@ -394,7 +394,10 @@ impl ConstGenericRingBuffer { #[inline] #[cfg(feature = "batched_extend")] - fn extend_batched(&mut self, mut other: impl Iterator) { + fn extend_batched_internal( + &mut self, + mut other: impl Iterator, + ) { // SAFETY: if CAP < Self::BATCH_SIZE we can't run extend_from_arr_batch so we catch that here if CAP < BATCH_SIZE { for i in other { @@ -454,15 +457,18 @@ impl ConstGenericRingBuffer { self.writeptr = index; } else { self.writeptr = CAP; - self.extend_batched::(iter); + self.extend_batched_internal::(iter); } } -} -impl Extend for ConstGenericRingBuffer { - /// NOTE: correctness (but not soundness) of extend depends on `size_hint` on iter being correct. - #[inline] - fn extend>(&mut self, iter: A) { + /// Alias of [`Extend::extend`](ConstGenericRingBuffer::extend) but can take a custom batch size. + /// + /// We found that `30` works well for us, which is the batch size we use in `extend`, + /// but on different architectures this may not be true. + pub fn custom_extend_batched( + &mut self, + iter: impl IntoIterator, + ) { #[cfg(not(feature = "batched_extend"))] { for i in iter { @@ -472,8 +478,6 @@ impl Extend for ConstGenericRingBuffer { #[cfg(feature = "batched_extend")] { - const BATCH_SIZE: usize = 30; - let iter = iter.into_iter(); let (lower, _) = iter.size_hint(); @@ -497,12 +501,23 @@ impl Extend for ConstGenericRingBuffer { // Safety: clear above unsafe { self.finish_iter::(iter) }; } else { - self.extend_batched::(iter); + self.extend_batched_internal::(iter); } } } } +impl Extend for ConstGenericRingBuffer { + /// NOTE: correctness (but not soundness) of extend depends on `size_hint` on iter being correct. + #[inline] + fn extend>(&mut self, iter: A) { + /// good number, found through benchmarking. + /// gives ~30% performance boost over not batching + const BATCH_SIZE: usize = 30; + self.custom_extend_batched::(iter); + } +} + unsafe impl RingBuffer for ConstGenericRingBuffer { #[inline] unsafe fn ptr_capacity(_: *const Self) -> usize {