From 9c4bddfa2c9ac7a2c76018b11401132d783b70bb Mon Sep 17 00:00:00 2001 From: onestacked Date: Sat, 25 Feb 2023 17:33:58 +0100 Subject: [PATCH] make `slice::Iter` ~const Iterator. --- library/core/src/iter/traits/double_ended.rs | 22 ++ library/core/src/lib.rs | 2 + library/core/src/slice/iter.rs | 33 ++- library/core/src/slice/iter/macros.rs | 283 ++++++++++++++++++- library/core/src/slice/mod.rs | 3 +- library/core/tests/iter/mod.rs | 47 +++ library/core/tests/lib.rs | 1 + 7 files changed, 369 insertions(+), 22 deletions(-) diff --git a/library/core/src/iter/traits/double_ended.rs b/library/core/src/iter/traits/double_ended.rs index 7a10dea500a96..d5962325c0f3b 100644 --- a/library/core/src/iter/traits/double_ended.rs +++ b/library/core/src/iter/traits/double_ended.rs @@ -370,6 +370,28 @@ pub trait DoubleEndedIterator: Iterator { } #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_const_unstable(feature = "const_iter", issue = "92476")] +#[cfg(not(bootstrap))] +impl<'a, I: ~const DoubleEndedIterator + ?Sized> const DoubleEndedIterator for &'a mut I { + fn next_back(&mut self) -> Option { + (**self).next_back() + } + fn advance_back_by(&mut self, n: usize) -> Result<(), usize> + where + Self::Item: ~const Destruct, + { + (**self).advance_back_by(n) + } + fn nth_back(&mut self, n: usize) -> Option + where + Self::Item: ~const Destruct, + { + (**self).nth_back(n) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +#[cfg(bootstrap)] impl<'a, I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for &'a mut I { fn next_back(&mut self) -> Option { (**self).next_back() diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index 1076d357070ef..045a0c635df70 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -98,6 +98,7 @@ #![warn(multiple_supertrait_upcastable)] // // Library features: +#![feature(const_assume)] #![feature(const_align_offset)] #![feature(const_align_of_val)] #![feature(const_align_of_val_raw)] @@ -143,6 +144,7 @@ #![feature(const_ptr_sub_ptr)] #![feature(const_replace)] #![feature(const_result_drop)] +#![feature(const_pointer_byte_offsets)] #![feature(const_ptr_as_ref)] #![feature(const_ptr_is_null)] #![feature(const_ptr_read)] diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index c4317799bcc68..bd10f795db667 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -4,12 +4,13 @@ mod macros; use crate::cmp; -use crate::cmp::Ordering; use crate::fmt; use crate::intrinsics::assume; use crate::iter::{ FusedIterator, TrustedLen, TrustedRandomAccess, TrustedRandomAccessNoCoerce, UncheckedIterator, }; +#[cfg(not(bootstrap))] +use crate::marker::Destruct; use crate::marker::{PhantomData, Send, Sized, Sync}; use crate::mem::{self, SizedTypeProperties}; use crate::num::NonZeroUsize; @@ -18,7 +19,8 @@ use crate::ptr::NonNull; use super::{from_raw_parts, from_raw_parts_mut}; #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> IntoIterator for &'a [T] { +#[rustc_const_unstable(feature = "const_iter", issue = "92476")] +impl<'a, T> const IntoIterator for &'a [T] { type Item = &'a T; type IntoIter = Iter<'a, T>; @@ -80,8 +82,9 @@ unsafe impl Sync for Iter<'_, T> {} unsafe impl Send for Iter<'_, T> {} impl<'a, T> Iter<'a, T> { + #[rustc_const_unstable(feature = "const_iter", issue = "92476")] #[inline] - pub(super) fn new(slice: &'a [T]) -> Self { + pub(super) const fn new(slice: &'a [T]) -> Self { let ptr = slice.as_ptr(); // SAFETY: Similar to `IterMut::new`. unsafe { @@ -121,21 +124,23 @@ impl<'a, T> Iter<'a, T> { #[must_use] #[stable(feature = "iter_to_slice", since = "1.4.0")] #[inline] - pub fn as_slice(&self) -> &'a [T] { + #[rustc_const_unstable(feature = "const_iter", issue = "92476")] + pub const fn as_slice(&self) -> &'a [T] { self.make_slice() } } iterator! {struct Iter -> *const T, &'a T, const, {/* no mut */}, { - fn is_sorted_by(self, mut compare: F) -> bool - where - Self: Sized, - F: FnMut(&Self::Item, &Self::Item) -> Option, - { - self.as_slice().windows(2).all(|w| { - compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false) - }) - } + // FIXME(const_trait_impl) + // fn is_sorted_by(self, mut compare: F) -> bool + // where + // Self: Sized, + // F: FnMut(&Self::Item, &Self::Item) -> Option, + // { + // self.as_slice().windows(2).all(|w| { + // compare(&&w[0], &&w[1]).map(|o| o != Ordering::Greater).unwrap_or(false) + // }) + // } }} #[stable(feature = "rust1", since = "1.0.0")] @@ -202,7 +207,7 @@ unsafe impl Send for IterMut<'_, T> {} impl<'a, T> IterMut<'a, T> { #[inline] - pub(super) fn new(slice: &'a mut [T]) -> Self { + pub(super) const fn new(slice: &'a mut [T]) -> Self { let ptr = slice.as_mut_ptr(); // SAFETY: There are several things here: // diff --git a/library/core/src/slice/iter/macros.rs b/library/core/src/slice/iter/macros.rs index 89b92a7d5975f..cf61fe834d24f 100644 --- a/library/core/src/slice/iter/macros.rs +++ b/library/core/src/slice/iter/macros.rs @@ -5,7 +5,7 @@ macro_rules! is_empty { // The way we encode the length of a ZST iterator, this works both for ZST // and non-ZST. ($self: ident) => { - $self.ptr.as_ptr() as *const T == $self.end + todo!("blocked on #92512") }; } @@ -15,9 +15,7 @@ macro_rules! len { let start = $self.ptr; if T::IS_ZST { - // This _cannot_ use `ptr_sub` because we depend on wrapping - // to represent the length of long ZST slice iterators. - $self.end.addr().wrapping_sub(start.as_ptr().addr()) + todo!("blocked on #92512") } else { // To get rid of some bounds checks (see `position`), we use ptr_sub instead of // offset_from (Tested by `codegen/slice-position-bounds-check`.) @@ -61,7 +59,7 @@ macro_rules! iterator { impl<'a, T> $name<'a, T> { // Helper function for creating a slice from the iterator. #[inline(always)] - fn make_slice(&self) -> &'a [T] { + const fn make_slice(&self) -> &'a [T] { // SAFETY: the iterator was created from a slice with pointer // `self.ptr` and length `len!(self)`. This guarantees that all // the prerequisites for `from_raw_parts` are fulfilled. @@ -72,7 +70,7 @@ macro_rules! iterator { // returning the old start. // Unsafe because the offset must not exceed `self.len()`. #[inline(always)] - unsafe fn post_inc_start(&mut self, offset: usize) -> * $raw_mut T { + const unsafe fn post_inc_start(&mut self, offset: usize) -> * $raw_mut T { if mem::size_of::() == 0 { zst_shrink!(self, offset); self.ptr.as_ptr() @@ -89,7 +87,7 @@ macro_rules! iterator { // returning the new end. // Unsafe because the offset must not exceed `self.len()`. #[inline(always)] - unsafe fn pre_dec_end(&mut self, offset: usize) -> * $raw_mut T { + const unsafe fn pre_dec_end(&mut self, offset: usize) -> * $raw_mut T { if T::IS_ZST { zst_shrink!(self, offset); self.ptr.as_ptr() @@ -117,6 +115,276 @@ macro_rules! iterator { } #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_iter", issue = "92476")] + #[cfg(not(bootstrap))] + impl<'a, T> const Iterator for $name<'a, T> { + type Item = $elem; + + #[inline] + fn next(&mut self) -> Option<$elem> { + // could be implemented with slices, but this avoids bounds checks + + // SAFETY: `assume` calls are safe since a slice's start pointer + // must be non-null, and slices over non-ZSTs must also have a + // non-null end pointer. The call to `next_unchecked!` is safe + // since we check if the iterator is empty first. + unsafe { + assume(!self.ptr.as_ptr().is_null()); + if !::IS_ZST { + assume(!self.end.is_null()); + } + if is_empty!(self) { + None + } else { + Some(next_unchecked!(self)) + } + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let exact = len!(self); + (exact, Some(exact)) + } + + #[inline] + fn count(self) -> usize { + len!(self) + } + + #[inline] + fn nth(&mut self, n: usize) -> Option<$elem> { + if n >= len!(self) { + // This iterator is now empty. + if T::IS_ZST { + // We have to do it this way as `ptr` may never be 0, but `end` + // could be (due to wrapping). + self.end = self.ptr.as_ptr(); + } else { + // SAFETY: end can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr + unsafe { + self.ptr = NonNull::new_unchecked(self.end as *mut T); + } + } + return None; + } + // SAFETY: We are in bounds. `post_inc_start` does the right thing even for ZSTs. + unsafe { + self.post_inc_start(n); + Some(next_unchecked!(self)) + } + } + + #[inline] + fn advance_by(&mut self, n: usize) -> Result<(), usize> { + let advance = cmp::min(len!(self), n); + // SAFETY: By construction, `advance` does not exceed `self.len()`. + unsafe { self.post_inc_start(advance) }; + if advance == n { Ok(()) } else { Err(advance) } + } + + #[inline] + fn last(mut self) -> Option<$elem> { + self.next_back() + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. + #[inline] + fn for_each(mut self, mut f: F) + where + Self: Sized, + F: ~const FnMut(Self::Item) + ~const Destruct, + { + while let Some(x) = self.next() { + f(x); + } + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. + #[inline] + fn all(&mut self, mut f: F) -> bool + where + Self: Sized, + F: ~const FnMut(Self::Item) -> bool+ ~const Destruct, + { + while let Some(x) = self.next() { + if !f(x) { + return false; + } + } + true + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. + #[inline] + fn any(&mut self, mut f: F) -> bool + where + Self: Sized, + F: ~const FnMut(Self::Item) -> bool + ~const Destruct, + { + while let Some(x) = self.next() { + if f(x) { + return true; + } + } + false + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. + #[inline] + fn find

(&mut self, mut predicate: P) -> Option + where + Self: Sized, + P: ~const FnMut(&Self::Item) -> bool + ~const Destruct, + { + while let Some(x) = self.next() { + if predicate(&x) { + return Some(x); + } + } + None + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. + #[inline] + fn find_map(&mut self, mut f: F) -> Option + where + Self: Sized, + F: ~const FnMut(Self::Item) -> Option + ~const Destruct, + B: ~const Destruct + { + while let Some(x) = self.next() { + if let Some(y) = f(x) { + return Some(y); + } + } + None + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. Also, the `assume` avoids a bounds check. + #[inline] + #[rustc_inherit_overflow_checks] + fn position

(&mut self, mut predicate: P) -> Option where + Self: Sized, + P: ~const FnMut(Self::Item) -> bool + ~const Destruct, + { + let n = len!(self); + let mut i = 0; + while let Some(x) = self.next() { + if predicate(x) { + // SAFETY: we are guaranteed to be in bounds by the loop invariant: + // when `i >= n`, `self.next()` returns `None` and the loop breaks. + unsafe { assume(i < n) }; + return Some(i); + } + i += 1; + } + None + } + + // We override the default implementation, which uses `try_fold`, + // because this simple implementation generates less LLVM IR and is + // faster to compile. Also, the `assume` avoids a bounds check. + #[inline] + fn rposition

(&mut self, mut predicate: P) -> Option where + P: ~const FnMut(Self::Item) -> bool+ ~const Destruct + ~const Destruct, + Self: Sized + ExactSizeIterator + ~const DoubleEndedIterator, + Self::Item: ~const Destruct + { + let n = len!(self); + let mut i = n; + while let Some(x) = self.next_back() { + i -= 1; + if predicate(x) { + // SAFETY: `i` must be lower than `n` since it starts at `n` + // and is only decreasing. + unsafe { assume(i < n) }; + return Some(i); + } + } + None + } + + #[inline] + unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { + // SAFETY: the caller must guarantee that `i` is in bounds of + // the underlying slice, so `i` cannot overflow an `isize`, and + // the returned references is guaranteed to refer to an element + // of the slice and thus guaranteed to be valid. + // + // Also note that the caller also guarantees that we're never + // called with the same index again, and that no other methods + // that will access this subslice are called, so it is valid + // for the returned reference to be mutable in the case of + // `IterMut` + unsafe { & $( $mut_ )? * self.ptr.as_ptr().add(idx) } + } + + $($extra)* + } + + #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_iter", issue = "92476")] + #[cfg(not(bootstrap))] + impl<'a, T> const DoubleEndedIterator for $name<'a, T> { + #[inline] + fn next_back(&mut self) -> Option<$elem> { + // could be implemented with slices, but this avoids bounds checks + + // SAFETY: `assume` calls are safe since a slice's start pointer must be non-null, + // and slices over non-ZSTs must also have a non-null end pointer. + // The call to `next_back_unchecked!` is safe since we check if the iterator is + // empty first. + unsafe { + assume(!self.ptr.as_ptr().is_null()); + if !::IS_ZST { + assume(!self.end.is_null()); + } + if is_empty!(self) { + None + } else { + Some(next_back_unchecked!(self)) + } + } + } + + #[inline] + fn nth_back(&mut self, n: usize) -> Option<$elem> { + if n >= len!(self) { + // This iterator is now empty. + self.end = self.ptr.as_ptr(); + return None; + } + // SAFETY: We are in bounds. `pre_dec_end` does the right thing even for ZSTs. + unsafe { + self.pre_dec_end(n); + Some(next_back_unchecked!(self)) + } + } + + #[inline] + fn advance_back_by(&mut self, n: usize) -> Result<(), usize> { + let advance = cmp::min(len!(self), n); + // SAFETY: By construction, `advance` does not exceed `self.len()`. + unsafe { self.pre_dec_end(advance) }; + if advance == n { Ok(()) } else { Err(advance) } + } + } + + #[stable(feature = "rust1", since = "1.0.0")] + #[cfg(bootstrap)] impl<'a, T> Iterator for $name<'a, T> { type Item = $elem; @@ -334,6 +602,7 @@ macro_rules! iterator { } #[stable(feature = "rust1", since = "1.0.0")] + #[cfg(bootstrap)] impl<'a, T> DoubleEndedIterator for $name<'a, T> { #[inline] fn next_back(&mut self) -> Option<$elem> { diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index d319b2bc37fdd..fd8a6a5c3a07d 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -737,8 +737,9 @@ impl [T] { /// assert_eq!(iterator.next(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_iter", issue = "92476")] #[inline] - pub fn iter(&self) -> Iter<'_, T> { + pub const fn iter(&self) -> Iter<'_, T> { Iter::new(self) } diff --git a/library/core/tests/iter/mod.rs b/library/core/tests/iter/mod.rs index cbb18e79e2d43..5d63cecc97cbb 100644 --- a/library/core/tests/iter/mod.rs +++ b/library/core/tests/iter/mod.rs @@ -102,3 +102,50 @@ pub fn extend_for_unit() { } assert_eq!(x, 5); } + +#[test] +#[cfg(not(bootstrap))] +fn test_const_iter_slice() { + const ARRAY: [usize; 5] = [1, 2, 3, 4, 5]; + const X: usize = { + let mut sum = 0; + for val in ARRAY.iter() { + sum += val; + } + sum + }; + //FIXME(const_trait_impl): Change to .sum once it is const + const fn sum(a: usize, b: &usize) -> usize { + a + b + } + const Y: usize = ARRAY.iter().fold(0, sum); + + const _: () = assert!(X == 15); + assert_eq!(15, X); + const _: () = assert!(Y == 15); + assert_eq!(15, Y); +} + +#[test] +#[cfg(not(bootstrap))] +fn test_const_iter_zst_slice() { + const X: usize = { + let arr = [(); usize::MAX]; + let mut sum = 0; + for _ in arr.iter() { + sum += 1; + } + sum + }; + assert_eq!(usize::MAX, X); + + const Y: usize = { + let arr = [(); usize::MAX - 1]; + let mut sum = 0; + for _ in arr.iter() { + sum += 1; + } + sum + }; + assert_eq!(usize::MAX - 1, Y); +} diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs index 637cc6e9f629b..5b191bd30d623 100644 --- a/library/core/tests/lib.rs +++ b/library/core/tests/lib.rs @@ -21,6 +21,7 @@ #![feature(const_maybe_uninit_assume_init_read)] #![feature(const_nonnull_new)] #![feature(const_num_from_num)] +#![feature(const_ops)] #![feature(const_pointer_byte_offsets)] #![feature(const_pointer_is_aligned)] #![feature(const_ptr_as_ref)]