|
| 1 | +use crate::array; |
| 2 | +use crate::fmt; |
| 3 | +use crate::iter::{ |
| 4 | + FusedIterator, TrustedFused, TrustedLen, TrustedRandomAccessNoCoerce, UncheckedIterator, |
| 5 | +}; |
| 6 | +use crate::marker::PhantomData; |
| 7 | +use crate::mem::MaybeUninit; |
| 8 | +use crate::num::NonZero; |
| 9 | +use crate::ptr::NonNull; |
| 10 | +use crate::slice::NonNullIter; |
| 11 | + |
| 12 | +/// An iterator which takes ownership of items out of a slice, dropping any |
| 13 | +/// remaining items when the iterator drops. |
| 14 | +/// |
| 15 | +/// Note that, like a raw pointer, it's **up to you** to get the lifetime right. |
| 16 | +/// In some ways it's actually harder to get right, as the iterator interface |
| 17 | +/// appears safe, but as you promise when creating one of these, you still must |
| 18 | +/// ensure that the mentioned memory is usable the whole time this lives. |
| 19 | +/// |
| 20 | +/// Ideally you won't be using this directly, but rather a version encapsulated |
| 21 | +/// in a safer interface, like `vec::IntoIter`. |
| 22 | +/// |
| 23 | +/// This raw version may be removed in favour of a future language feature, |
| 24 | +/// such as using `unsafe<'a> Drain<'a, T>` instead of `DrainRaw<T>`. |
| 25 | +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 26 | +pub struct DrainRaw<T>(NonNullIter<T>, PhantomData<T>); |
| 27 | + |
| 28 | +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 29 | +impl<T> Drop for DrainRaw<T> { |
| 30 | + fn drop(&mut self) { |
| 31 | + let slice = self.as_nonnull_slice(); |
| 32 | + // SAFETY: By type invariant, we're allowed to drop the rest of the items. |
| 33 | + unsafe { slice.drop_in_place() }; |
| 34 | + } |
| 35 | +} |
| 36 | + |
| 37 | +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 38 | +impl<T: fmt::Debug> fmt::Debug for DrainRaw<T> { |
| 39 | + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
| 40 | + f.debug_tuple("DrainRaw").field(&self.0.make_shortlived_slice()).finish() |
| 41 | + } |
| 42 | +} |
| 43 | + |
| 44 | +impl<T> DrainRaw<T> { |
| 45 | + /// Creates a new iterator which moves the `len` items starting at `ptr` |
| 46 | + /// while it's iterated, or drops them when the iterator is dropped. |
| 47 | + /// |
| 48 | + /// # Safety |
| 49 | + /// |
| 50 | + /// - `ptr` through `ptr.add(len)` must be a single allocated object |
| 51 | + /// such that that it's sound to `offset` through it. |
| 52 | + /// - All those elements must be readable, including being sufficiently aligned. |
| 53 | + /// - All those elements are valid for dropping. |
| 54 | + #[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 55 | + #[inline] |
| 56 | + pub unsafe fn from_parts(ptr: NonNull<T>, len: usize) -> Self { |
| 57 | + // SAFETY: this function's safety conditions are stricter than NonNullIter, |
| 58 | + // and include allowing the type to drop the items in `Drop`. |
| 59 | + Self(unsafe { NonNullIter::from_parts(ptr, len) }, PhantomData) |
| 60 | + } |
| 61 | + |
| 62 | + /// Returns a pointer to the remaining elements of the iterator |
| 63 | + #[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 64 | + #[inline] |
| 65 | + pub fn as_nonnull_slice(&self) -> NonNull<[T]> { |
| 66 | + self.0.make_nonnull_slice() |
| 67 | + } |
| 68 | + |
| 69 | + /// Equivalent to exhausting the iterator normally, but faster. |
| 70 | + #[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 71 | + #[inline] |
| 72 | + pub fn drop_remaining(&mut self) { |
| 73 | + let all = self.forget_remaining(); |
| 74 | + // SAFETY: We "forgot" these elements so our `Drop` won't drop them, |
| 75 | + // so it's ok to drop them here without risking double-frees. |
| 76 | + unsafe { all.drop_in_place() } |
| 77 | + } |
| 78 | + |
| 79 | + /// Exhaust the iterator without actually dropping the rest of the items. |
| 80 | + /// |
| 81 | + /// Returns the forgotten items. |
| 82 | + #[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 83 | + #[inline] |
| 84 | + pub fn forget_remaining(&mut self) -> NonNull<[T]> { |
| 85 | + let all = self.as_nonnull_slice(); |
| 86 | + self.0.exhaust(); |
| 87 | + all |
| 88 | + } |
| 89 | +} |
| 90 | + |
| 91 | +impl<T> UncheckedIterator for DrainRaw<T> { |
| 92 | + #[inline] |
| 93 | + unsafe fn next_unchecked(&mut self) -> T { |
| 94 | + // SAFETY: we're a 1:1 mapping of the inner iterator, so if the caller |
| 95 | + // proved we have another item, the inner iterator has another one too. |
| 96 | + // Also, the `next_unchecked` means the returned item is no longer part |
| 97 | + // of the inner iterator, and thus `read`ing it here -- and giving it |
| 98 | + // to the caller who will (probably) drop it -- is ok. |
| 99 | + unsafe { self.0.next_unchecked().read() } |
| 100 | + } |
| 101 | +} |
| 102 | + |
| 103 | +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 104 | +impl<T> Iterator for DrainRaw<T> { |
| 105 | + type Item = T; |
| 106 | + |
| 107 | + #[inline] |
| 108 | + fn next(&mut self) -> Option<T> { |
| 109 | + match self.0.next() { |
| 110 | + // SAFETY: The `next` means the returned item is no longer part of |
| 111 | + // the inner iterator, and thus `read`ing it here -- and giving it |
| 112 | + // to the caller who will (probably) drop it -- is ok. |
| 113 | + Some(ptr) => Some(unsafe { ptr.read() }), |
| 114 | + None => None, |
| 115 | + } |
| 116 | + } |
| 117 | + |
| 118 | + #[inline] |
| 119 | + fn size_hint(&self) -> (usize, Option<usize>) { |
| 120 | + self.0.size_hint() |
| 121 | + } |
| 122 | + |
| 123 | + #[inline] |
| 124 | + fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> { |
| 125 | + let clamped = self.len().min(n); |
| 126 | + // SAFETY: By construction, `clamped` is always in-bounds. |
| 127 | + // The skipped elements are removed from the inner iterator so won't be |
| 128 | + // dropped in `Drop`, so dropping there here is fine. |
| 129 | + unsafe { |
| 130 | + let to_drop = self.0.skip_forward_unchecked(clamped); |
| 131 | + to_drop.drop_in_place(); |
| 132 | + } |
| 133 | + NonZero::new(n - clamped).map_or(Ok(()), Err) |
| 134 | + } |
| 135 | + |
| 136 | + #[inline] |
| 137 | + fn count(self) -> usize { |
| 138 | + self.len() |
| 139 | + } |
| 140 | + |
| 141 | + #[inline] |
| 142 | + fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> { |
| 143 | + let len = self.len(); |
| 144 | + let clamped = len.min(N); |
| 145 | + |
| 146 | + // SAFETY: By construction, `clamped` is always in-bounds. |
| 147 | + let to_copy = unsafe { self.0.skip_forward_unchecked(clamped) }; |
| 148 | + if len >= N { |
| 149 | + // SAFETY: If we have more elements than were requested, they can be |
| 150 | + // read directly because arrays need no extra alignment. |
| 151 | + Ok(unsafe { to_copy.cast::<[T; N]>().read() }) |
| 152 | + } else { |
| 153 | + let mut raw_ary = MaybeUninit::uninit_array(); |
| 154 | + // SAFETY: If we don't have enough elements left, then copy all the |
| 155 | + // ones we do have into the local array, which cannot overlap because |
| 156 | + // new locals are always distinct storage. |
| 157 | + Err(unsafe { |
| 158 | + MaybeUninit::<T>::slice_as_mut_ptr(&mut raw_ary) |
| 159 | + .copy_from_nonoverlapping(to_copy.as_mut_ptr(), len); |
| 160 | + array::IntoIter::new_unchecked(raw_ary, 0..len) |
| 161 | + }) |
| 162 | + } |
| 163 | + } |
| 164 | + |
| 165 | + unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item |
| 166 | + where |
| 167 | + Self: TrustedRandomAccessNoCoerce, |
| 168 | + { |
| 169 | + // SAFETY: the caller must guarantee that `i` is in bounds of the slice, |
| 170 | + // so the `get_unchecked_mut(i)` is guaranteed to pointer to an element |
| 171 | + // and thus guaranteed to be valid to dereference. |
| 172 | + // |
| 173 | + // Also note the implementation of `Self: TrustedRandomAccess` requires |
| 174 | + // that `T: Copy` so reading elements from the buffer doesn't invalidate |
| 175 | + // them for `Drop`. |
| 176 | + unsafe { self.as_nonnull_slice().get_unchecked_mut(i).read() } |
| 177 | + } |
| 178 | +} |
| 179 | + |
| 180 | +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 181 | +impl<T> DoubleEndedIterator for DrainRaw<T> { |
| 182 | + #[inline] |
| 183 | + fn next_back(&mut self) -> Option<T> { |
| 184 | + match self.0.next_back() { |
| 185 | + // SAFETY: The `next_back` means the returned item is no longer part of |
| 186 | + // the inner iterator, and thus `read`ing it here -- and giving it |
| 187 | + // to the caller who will (probably) drop it -- is ok. |
| 188 | + Some(ptr) => Some(unsafe { ptr.read() }), |
| 189 | + None => None, |
| 190 | + } |
| 191 | + } |
| 192 | + |
| 193 | + #[inline] |
| 194 | + fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> { |
| 195 | + let clamped = self.len().min(n); |
| 196 | + // SAFETY: By construction, `clamped` is always in-bounds. |
| 197 | + // The skipped elements are removed from the inner iterator so won't be |
| 198 | + // dropped in `Drop`, so dropping there here is fine. |
| 199 | + unsafe { |
| 200 | + let to_drop = self.0.skip_backward_unchecked(clamped); |
| 201 | + to_drop.drop_in_place(); |
| 202 | + } |
| 203 | + NonZero::new(n - clamped).map_or(Ok(()), Err) |
| 204 | + } |
| 205 | +} |
| 206 | + |
| 207 | +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 208 | +impl<T> ExactSizeIterator for DrainRaw<T> { |
| 209 | + fn is_empty(&self) -> bool { |
| 210 | + self.0.is_empty() |
| 211 | + } |
| 212 | + |
| 213 | + fn len(&self) -> usize { |
| 214 | + self.0.len() |
| 215 | + } |
| 216 | +} |
| 217 | + |
| 218 | +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 219 | +impl<T> FusedIterator for DrainRaw<T> {} |
| 220 | + |
| 221 | +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 222 | +#[doc(hidden)] |
| 223 | +unsafe impl<T> TrustedFused for DrainRaw<T> {} |
| 224 | + |
| 225 | +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 226 | +unsafe impl<T> TrustedLen for DrainRaw<T> {} |
| 227 | + |
| 228 | +#[doc(hidden)] |
| 229 | +#[unstable(issue = "none", feature = "std_internals")] |
| 230 | +#[rustc_unsafe_specialization_marker] |
| 231 | +pub trait NonDrop {} |
| 232 | + |
| 233 | +// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr |
| 234 | +// and thus we can't implement drop-handling |
| 235 | +#[unstable(issue = "none", feature = "std_internals")] |
| 236 | +impl<T: Copy> NonDrop for T {} |
| 237 | + |
| 238 | +// SAFETY: If they're accessing things in random order we don't know when to drop |
| 239 | +// things, so only allow this for `Copy` things where that doesn't matter. |
| 240 | +#[unstable(feature = "slice_drain_raw_iter", issue = "none")] |
| 241 | +unsafe impl<T: NonDrop> TrustedRandomAccessNoCoerce for DrainRaw<T> { |
| 242 | + const MAY_HAVE_SIDE_EFFECT: bool = false; |
| 243 | +} |
0 commit comments