Skip to content

Commit cb27039

Browse files
committed
Use min_specialization in liballoc
- Remove a type parameter from `[A]RcFromIter`. - Remove an implementation of `[A]RcFromIter` that didn't actually specialize anything. - Remove unused implementation of `IsZero` for `Option<&mut T>`. - Change specializations of `[A]RcEqIdent` to use a marker trait version of `Eq`. - Remove `BTreeClone`. I couldn't find a way to make this work with `min_specialization`. - Add `rustc_unsafe_specialization_marker` to `Copy` and `TrustedLen`.
1 parent ec1f28f commit cb27039

File tree

7 files changed

+51
-120
lines changed

7 files changed

+51
-120
lines changed

src/liballoc/collections/btree/map.rs

-53
Original file line numberDiff line numberDiff line change
@@ -215,59 +215,6 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
215215
clone_subtree(self.root.as_ref().unwrap().as_ref())
216216
}
217217
}
218-
219-
fn clone_from(&mut self, other: &Self) {
220-
BTreeClone::clone_from(self, other);
221-
}
222-
}
223-
224-
trait BTreeClone {
225-
fn clone_from(&mut self, other: &Self);
226-
}
227-
228-
impl<K: Clone, V: Clone> BTreeClone for BTreeMap<K, V> {
229-
default fn clone_from(&mut self, other: &Self) {
230-
*self = other.clone();
231-
}
232-
}
233-
234-
impl<K: Clone + Ord, V: Clone> BTreeClone for BTreeMap<K, V> {
235-
fn clone_from(&mut self, other: &Self) {
236-
// This truncates `self` to `other.len()` by calling `split_off` on
237-
// the first key after `other.len()` elements if it exists.
238-
let split_off_key = if self.len() > other.len() {
239-
let diff = self.len() - other.len();
240-
if diff <= other.len() {
241-
self.iter().nth_back(diff - 1).map(|pair| (*pair.0).clone())
242-
} else {
243-
self.iter().nth(other.len()).map(|pair| (*pair.0).clone())
244-
}
245-
} else {
246-
None
247-
};
248-
if let Some(key) = split_off_key {
249-
self.split_off(&key);
250-
}
251-
252-
let mut siter = self.range_mut(..);
253-
let mut oiter = other.iter();
254-
// After truncation, `self` is at most as long as `other` so this loop
255-
// replaces every key-value pair in `self`. Since `oiter` is in sorted
256-
// order and the structure of the `BTreeMap` stays the same,
257-
// the BTree invariants are maintained at the end of the loop.
258-
while !siter.is_empty() {
259-
if let Some((ok, ov)) = oiter.next() {
260-
// SAFETY: This is safe because `siter` is nonempty.
261-
let (sk, sv) = unsafe { siter.next_unchecked() };
262-
sk.clone_from(ok);
263-
sv.clone_from(ov);
264-
} else {
265-
break;
266-
}
267-
}
268-
// If `other` is longer than `self`, the remaining elements are inserted.
269-
self.extend(oiter.map(|(k, v)| ((*k).clone(), (*v).clone())));
270-
}
271218
}
272219

273220
impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>

src/liballoc/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@
108108
#![feature(ptr_offset_from)]
109109
#![feature(rustc_attrs)]
110110
#![feature(receiver_trait)]
111-
#![feature(specialization)]
111+
#![feature(min_specialization)]
112112
#![feature(staged_api)]
113113
#![feature(std_internals)]
114114
#![feature(str_internals)]

src/liballoc/rc.rs

+19-27
Original file line numberDiff line numberDiff line change
@@ -249,7 +249,7 @@ use core::mem::{self, align_of, align_of_val, forget, size_of_val};
249249
use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
250250
use core::pin::Pin;
251251
use core::ptr::{self, NonNull};
252-
use core::slice::{self, from_raw_parts_mut};
252+
use core::slice::from_raw_parts_mut;
253253

254254
use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout};
255255
use crate::string::String;
@@ -1221,6 +1221,12 @@ impl<T: ?Sized + PartialEq> RcEqIdent<T> for Rc<T> {
12211221
}
12221222
}
12231223

1224+
// Hack to allow specializing on `Eq` even though `Eq` has a method.
1225+
#[rustc_unsafe_specialization_marker]
1226+
pub(crate) trait MarkerEq: PartialEq<Self> {}
1227+
1228+
impl<T: Eq> MarkerEq for T {}
1229+
12241230
/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
12251231
/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
12261232
/// store large values, that are slow to clone, but also heavy to check for equality, causing this
@@ -1229,7 +1235,7 @@ impl<T: ?Sized + PartialEq> RcEqIdent<T> for Rc<T> {
12291235
///
12301236
/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
12311237
#[stable(feature = "rust1", since = "1.0.0")]
1232-
impl<T: ?Sized + Eq> RcEqIdent<T> for Rc<T> {
1238+
impl<T: ?Sized + MarkerEq> RcEqIdent<T> for Rc<T> {
12331239
#[inline]
12341240
fn eq(&self, other: &Rc<T>) -> bool {
12351241
Rc::ptr_eq(self, other) || **self == **other
@@ -1548,25 +1554,25 @@ impl<T> iter::FromIterator<T> for Rc<[T]> {
15481554
/// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
15491555
/// ```
15501556
fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
1551-
RcFromIter::from_iter(iter.into_iter())
1557+
ToRcSlice::to_rc_slice(iter.into_iter())
15521558
}
15531559
}
15541560

15551561
/// Specialization trait used for collecting into `Rc<[T]>`.
1556-
trait RcFromIter<T, I> {
1557-
fn from_iter(iter: I) -> Self;
1562+
trait ToRcSlice<T>: Iterator<Item = T> + Sized {
1563+
fn to_rc_slice(self) -> Rc<[T]>;
15581564
}
15591565

1560-
impl<T, I: Iterator<Item = T>> RcFromIter<T, I> for Rc<[T]> {
1561-
default fn from_iter(iter: I) -> Self {
1562-
iter.collect::<Vec<T>>().into()
1566+
impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
1567+
default fn to_rc_slice(self) -> Rc<[T]> {
1568+
self.collect::<Vec<T>>().into()
15631569
}
15641570
}
15651571

1566-
impl<T, I: iter::TrustedLen<Item = T>> RcFromIter<T, I> for Rc<[T]> {
1567-
default fn from_iter(iter: I) -> Self {
1572+
impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
1573+
fn to_rc_slice(self) -> Rc<[T]> {
15681574
// This is the case for a `TrustedLen` iterator.
1569-
let (low, high) = iter.size_hint();
1575+
let (low, high) = self.size_hint();
15701576
if let Some(high) = high {
15711577
debug_assert_eq!(
15721578
low,
@@ -1577,29 +1583,15 @@ impl<T, I: iter::TrustedLen<Item = T>> RcFromIter<T, I> for Rc<[T]> {
15771583

15781584
unsafe {
15791585
// SAFETY: We need to ensure that the iterator has an exact length and we have.
1580-
Rc::from_iter_exact(iter, low)
1586+
Rc::from_iter_exact(self, low)
15811587
}
15821588
} else {
15831589
// Fall back to normal implementation.
1584-
iter.collect::<Vec<T>>().into()
1590+
self.collect::<Vec<T>>().into()
15851591
}
15861592
}
15871593
}
15881594

1589-
impl<'a, T: 'a + Clone> RcFromIter<&'a T, slice::Iter<'a, T>> for Rc<[T]> {
1590-
fn from_iter(iter: slice::Iter<'a, T>) -> Self {
1591-
// Delegate to `impl<T: Clone> From<&[T]> for Rc<[T]>`.
1592-
//
1593-
// In the case that `T: Copy`, we get to use `ptr::copy_nonoverlapping`
1594-
// which is even more performant.
1595-
//
1596-
// In the fall-back case we have `T: Clone`. This is still better
1597-
// than the `TrustedLen` implementation as slices have a known length
1598-
// and so we get to avoid calling `size_hint` and avoid the branching.
1599-
iter.as_slice().into()
1600-
}
1601-
}
1602-
16031595
/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
16041596
/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
16051597
/// pointer, which returns an [`Option`]`<`[`Rc`]`<T>>`.

src/liballoc/sync.rs

+13-27
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ use core::mem::{self, align_of, align_of_val, size_of_val};
2020
use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
2121
use core::pin::Pin;
2222
use core::ptr::{self, NonNull};
23-
use core::slice::{self, from_raw_parts_mut};
23+
use core::slice::from_raw_parts_mut;
2424
use core::sync::atomic;
2525
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
2626

@@ -1779,7 +1779,7 @@ impl<T: ?Sized + PartialEq> ArcEqIdent<T> for Arc<T> {
17791779
///
17801780
/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
17811781
#[stable(feature = "rust1", since = "1.0.0")]
1782-
impl<T: ?Sized + Eq> ArcEqIdent<T> for Arc<T> {
1782+
impl<T: ?Sized + crate::rc::MarkerEq> ArcEqIdent<T> for Arc<T> {
17831783
#[inline]
17841784
fn eq(&self, other: &Arc<T>) -> bool {
17851785
Arc::ptr_eq(self, other) || **self == **other
@@ -2105,25 +2105,25 @@ impl<T> iter::FromIterator<T> for Arc<[T]> {
21052105
/// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
21062106
/// ```
21072107
fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
2108-
ArcFromIter::from_iter(iter.into_iter())
2108+
ToArcSlice::to_arc_slice(iter.into_iter())
21092109
}
21102110
}
21112111

21122112
/// Specialization trait used for collecting into `Arc<[T]>`.
2113-
trait ArcFromIter<T, I> {
2114-
fn from_iter(iter: I) -> Self;
2113+
trait ToArcSlice<T>: Iterator<Item = T> + Sized {
2114+
fn to_arc_slice(self) -> Arc<[T]>;
21152115
}
21162116

2117-
impl<T, I: Iterator<Item = T>> ArcFromIter<T, I> for Arc<[T]> {
2118-
default fn from_iter(iter: I) -> Self {
2119-
iter.collect::<Vec<T>>().into()
2117+
impl<T, I: Iterator<Item = T>> ToArcSlice<T> for I {
2118+
default fn to_arc_slice(self) -> Arc<[T]> {
2119+
self.collect::<Vec<T>>().into()
21202120
}
21212121
}
21222122

2123-
impl<T, I: iter::TrustedLen<Item = T>> ArcFromIter<T, I> for Arc<[T]> {
2124-
default fn from_iter(iter: I) -> Self {
2123+
impl<T, I: iter::TrustedLen<Item = T>> ToArcSlice<T> for I {
2124+
fn to_arc_slice(self) -> Arc<[T]> {
21252125
// This is the case for a `TrustedLen` iterator.
2126-
let (low, high) = iter.size_hint();
2126+
let (low, high) = self.size_hint();
21272127
if let Some(high) = high {
21282128
debug_assert_eq!(
21292129
low,
@@ -2134,29 +2134,15 @@ impl<T, I: iter::TrustedLen<Item = T>> ArcFromIter<T, I> for Arc<[T]> {
21342134

21352135
unsafe {
21362136
// SAFETY: We need to ensure that the iterator has an exact length and we have.
2137-
Arc::from_iter_exact(iter, low)
2137+
Arc::from_iter_exact(self, low)
21382138
}
21392139
} else {
21402140
// Fall back to normal implementation.
2141-
iter.collect::<Vec<T>>().into()
2141+
self.collect::<Vec<T>>().into()
21422142
}
21432143
}
21442144
}
21452145

2146-
impl<'a, T: 'a + Clone> ArcFromIter<&'a T, slice::Iter<'a, T>> for Arc<[T]> {
2147-
fn from_iter(iter: slice::Iter<'a, T>) -> Self {
2148-
// Delegate to `impl<T: Clone> From<&[T]> for Arc<[T]>`.
2149-
//
2150-
// In the case that `T: Copy`, we get to use `ptr::copy_nonoverlapping`
2151-
// which is even more performant.
2152-
//
2153-
// In the fall-back case we have `T: Clone`. This is still better
2154-
// than the `TrustedLen` implementation as slices have a known length
2155-
// and so we get to avoid calling `size_hint` and avoid the branching.
2156-
iter.as_slice().into()
2157-
}
2158-
}
2159-
21602146
#[stable(feature = "rust1", since = "1.0.0")]
21612147
impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
21622148
fn borrow(&self) -> &T {

src/liballoc/vec.rs

+9-12
Original file line numberDiff line numberDiff line change
@@ -1619,8 +1619,8 @@ impl<T: Default> Vec<T> {
16191619
#[unstable(feature = "vec_resize_default", issue = "41758")]
16201620
#[rustc_deprecated(
16211621
reason = "This is moving towards being removed in favor \
1622-
of `.resize_with(Default::default)`. If you disagree, please comment \
1623-
in the tracking issue.",
1622+
of `.resize_with(Default::default)`. If you disagree, please comment \
1623+
in the tracking issue.",
16241624
since = "1.33.0"
16251625
)]
16261626
pub fn resize_default(&mut self, new_len: usize) {
@@ -1825,6 +1825,7 @@ impl<T: Clone + IsZero> SpecFromElem for T {
18251825
}
18261826
}
18271827

1828+
#[rustc_specialization_trait]
18281829
unsafe trait IsZero {
18291830
/// Whether this value is zero
18301831
fn is_zero(&self) -> bool;
@@ -1874,9 +1875,12 @@ unsafe impl<T> IsZero for *mut T {
18741875
}
18751876
}
18761877

1877-
// `Option<&T>`, `Option<&mut T>` and `Option<Box<T>>` are guaranteed to represent `None` as null.
1878-
// For fat pointers, the bytes that would be the pointer metadata in the `Some` variant
1879-
// are padding in the `None` variant, so ignoring them and zero-initializing instead is ok.
1878+
// `Option<&T>` and `Option<Box<T>>` are guaranteed to represent `None` as null.
1879+
// For fat pointers, the bytes that would be the pointer metadata in the `Some`
1880+
// variant are padding in the `None` variant, so ignoring them and
1881+
// zero-initializing instead is ok.
1882+
// `Option<&mut T>` never implements `Clone`, so there's no need for an impl of
1883+
// `SpecFromElem`.
18801884

18811885
unsafe impl<T: ?Sized> IsZero for Option<&T> {
18821886
#[inline]
@@ -1885,13 +1889,6 @@ unsafe impl<T: ?Sized> IsZero for Option<&T> {
18851889
}
18861890
}
18871891

1888-
unsafe impl<T: ?Sized> IsZero for Option<&mut T> {
1889-
#[inline]
1890-
fn is_zero(&self) -> bool {
1891-
self.is_none()
1892-
}
1893-
}
1894-
18951892
unsafe impl<T: ?Sized> IsZero for Option<Box<T>> {
18961893
#[inline]
18971894
fn is_zero(&self) -> bool {

src/libcore/iter/traits/marker.rs

+2
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
/// [`Iterator::fuse`]: ../../std/iter/trait.Iterator.html#method.fuse
1414
/// [`Fuse`]: ../../std/iter/struct.Fuse.html
1515
#[stable(feature = "fused", since = "1.26.0")]
16+
#[rustc_unsafe_specialization_marker]
1617
pub trait FusedIterator: Iterator {}
1718

1819
#[stable(feature = "fused", since = "1.26.0")]
@@ -38,6 +39,7 @@ impl<I: FusedIterator + ?Sized> FusedIterator for &mut I {}
3839
/// [`usize::MAX`]: ../../std/usize/constant.MAX.html
3940
/// [`.size_hint`]: ../../std/iter/trait.Iterator.html#method.size_hint
4041
#[unstable(feature = "trusted_len", issue = "37572")]
42+
#[rustc_unsafe_specialization_marker]
4143
pub unsafe trait TrustedLen: Iterator {}
4244

4345
#[unstable(feature = "trusted_len", issue = "37572")]

src/libcore/marker.rs

+7
Original file line numberDiff line numberDiff line change
@@ -363,6 +363,13 @@ pub trait StructuralEq {
363363
/// [impls]: #implementors
364364
#[stable(feature = "rust1", since = "1.0.0")]
365365
#[lang = "copy"]
366+
// FIXME(matthewjasper) This allows copying a type that doesn't implement
367+
// `Copy` because of unsatisfied lifetime bounds (copying `A<'_>` when only
368+
// `A<'static>: Copy` and `A<'_>: Clone`).
369+
// We have this attribute here for now only because there are quite a few
370+
// existing specializations on `Copy` that already exist in the standard
371+
// library, and there's no way to safely have this behavior right now.
372+
#[rustc_unsafe_specialization_marker]
366373
pub trait Copy: Clone {
367374
// Empty.
368375
}

0 commit comments

Comments
 (0)