Skip to content

Commit 7e7bc06

Browse files
committed
Auto merge of #54554 - RalfJung:maybe-uninit, r=nagisa
Revert most of MaybeUninit, except for the new API itself This reverts most of #53508 for perf reasons (first commit reverts that entire PR), except for the new API itself (added back in 2nd commit).
2 parents d95fd25 + 546e45a commit 7e7bc06

File tree

19 files changed

+85
-254
lines changed

19 files changed

+85
-254
lines changed

src/etc/gdb_rust_pretty_printing.py

+4-12
Original file line numberDiff line numberDiff line change
@@ -322,11 +322,8 @@ def to_string(self):
322322
def children(self):
323323
(length, data_ptr) = \
324324
rustpp.extract_length_and_ptr_from_std_btreeset(self.__val)
325-
leaf_node = GdbValue(data_ptr.get_wrapped_value().dereference())
326-
maybe_uninit_keys = leaf_node.get_child_at_index(3)
327-
manually_drop_keys = maybe_uninit_keys.get_child_at_index(1)
328-
keys = manually_drop_keys.get_child_at_index(0)
329-
gdb_ptr = keys.get_wrapped_value()
325+
val = GdbValue(data_ptr.get_wrapped_value().dereference()).get_child_at_index(3)
326+
gdb_ptr = val.get_wrapped_value()
330327
for index in xrange(length):
331328
yield (str(index), gdb_ptr[index])
332329

@@ -348,14 +345,9 @@ def to_string(self):
348345
def children(self):
349346
(length, data_ptr) = \
350347
rustpp.extract_length_and_ptr_from_std_btreemap(self.__val)
351-
leaf_node = GdbValue(data_ptr.get_wrapped_value().dereference())
352-
maybe_uninit_keys = leaf_node.get_child_at_index(3)
353-
manually_drop_keys = maybe_uninit_keys.get_child_at_index(1)
354-
keys = manually_drop_keys.get_child_at_index(0)
348+
keys = GdbValue(data_ptr.get_wrapped_value().dereference()).get_child_at_index(3)
355349
keys_ptr = keys.get_wrapped_value()
356-
maybe_uninit_vals = leaf_node.get_child_at_index(4)
357-
manually_drop_vals = maybe_uninit_vals.get_child_at_index(1)
358-
vals = manually_drop_vals.get_child_at_index(0)
350+
vals = GdbValue(data_ptr.get_wrapped_value().dereference()).get_child_at_index(4)
359351
vals_ptr = vals.get_wrapped_value()
360352
for index in xrange(length):
361353
yield (str(index), keys_ptr[index])

src/liballoc/collections/btree/node.rs

+20-20
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@
4242
// This implies that even an empty internal node has at least one edge.
4343

4444
use core::marker::PhantomData;
45-
use core::mem::{self, MaybeUninit};
45+
use core::mem;
4646
use core::ptr::{self, Unique, NonNull};
4747
use core::slice;
4848

@@ -73,7 +73,7 @@ struct LeafNode<K, V> {
7373
/// This node's index into the parent node's `edges` array.
7474
/// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
7575
/// This is only guaranteed to be initialized when `parent` is nonnull.
76-
parent_idx: MaybeUninit<u16>,
76+
parent_idx: u16,
7777

7878
/// The number of keys and values this node stores.
7979
///
@@ -83,8 +83,8 @@ struct LeafNode<K, V> {
8383

8484
/// The arrays storing the actual data of the node. Only the first `len` elements of each
8585
/// array are initialized and valid.
86-
keys: MaybeUninit<[K; CAPACITY]>,
87-
vals: MaybeUninit<[V; CAPACITY]>,
86+
keys: [K; CAPACITY],
87+
vals: [V; CAPACITY],
8888
}
8989

9090
impl<K, V> LeafNode<K, V> {
@@ -94,10 +94,10 @@ impl<K, V> LeafNode<K, V> {
9494
LeafNode {
9595
// As a general policy, we leave fields uninitialized if they can be, as this should
9696
// be both slightly faster and easier to track in Valgrind.
97-
keys: MaybeUninit::uninitialized(),
98-
vals: MaybeUninit::uninitialized(),
97+
keys: mem::uninitialized(),
98+
vals: mem::uninitialized(),
9999
parent: ptr::null(),
100-
parent_idx: MaybeUninit::uninitialized(),
100+
parent_idx: mem::uninitialized(),
101101
len: 0
102102
}
103103
}
@@ -115,10 +115,10 @@ unsafe impl Sync for LeafNode<(), ()> {}
115115
// ever take a pointer past the first key.
116116
static EMPTY_ROOT_NODE: LeafNode<(), ()> = LeafNode {
117117
parent: ptr::null(),
118-
parent_idx: MaybeUninit::uninitialized(),
118+
parent_idx: 0,
119119
len: 0,
120-
keys: MaybeUninit::uninitialized(),
121-
vals: MaybeUninit::uninitialized(),
120+
keys: [(); CAPACITY],
121+
vals: [(); CAPACITY],
122122
};
123123

124124
/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
@@ -430,7 +430,7 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
430430
root: self.root,
431431
_marker: PhantomData
432432
},
433-
idx: unsafe { usize::from(*self.as_leaf().parent_idx.get_ref()) },
433+
idx: self.as_leaf().parent_idx as usize,
434434
_marker: PhantomData
435435
})
436436
} else {
@@ -567,7 +567,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
567567
// the node, which is allowed by LLVM.
568568
unsafe {
569569
slice::from_raw_parts(
570-
self.as_leaf().keys.as_ptr() as *const K,
570+
self.as_leaf().keys.as_ptr(),
571571
self.len()
572572
)
573573
}
@@ -578,7 +578,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
578578
debug_assert!(!self.is_shared_root());
579579
unsafe {
580580
slice::from_raw_parts(
581-
self.as_leaf().vals.as_ptr() as *const V,
581+
self.as_leaf().vals.as_ptr(),
582582
self.len()
583583
)
584584
}
@@ -605,7 +605,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
605605
} else {
606606
unsafe {
607607
slice::from_raw_parts_mut(
608-
self.as_leaf_mut().keys.get_mut() as *mut [K] as *mut K,
608+
&mut self.as_leaf_mut().keys as *mut [K] as *mut K,
609609
self.len()
610610
)
611611
}
@@ -616,7 +616,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
616616
debug_assert!(!self.is_shared_root());
617617
unsafe {
618618
slice::from_raw_parts_mut(
619-
self.as_leaf_mut().vals.get_mut() as *mut [V] as *mut V,
619+
&mut self.as_leaf_mut().vals as *mut [V] as *mut V,
620620
self.len()
621621
)
622622
}
@@ -1013,7 +1013,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
10131013
let ptr = self.node.as_internal_mut() as *mut _;
10141014
let mut child = self.descend();
10151015
child.as_leaf_mut().parent = ptr;
1016-
child.as_leaf_mut().parent_idx.set(idx);
1016+
child.as_leaf_mut().parent_idx = idx;
10171017
}
10181018

10191019
/// Unsafely asserts to the compiler some static information about whether the underlying
@@ -1152,12 +1152,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV>
11521152

11531153
ptr::copy_nonoverlapping(
11541154
self.node.keys().as_ptr().add(self.idx + 1),
1155-
new_node.keys.as_mut_ptr() as *mut K,
1155+
new_node.keys.as_mut_ptr(),
11561156
new_len
11571157
);
11581158
ptr::copy_nonoverlapping(
11591159
self.node.vals().as_ptr().add(self.idx + 1),
1160-
new_node.vals.as_mut_ptr() as *mut V,
1160+
new_node.vals.as_mut_ptr(),
11611161
new_len
11621162
);
11631163

@@ -1210,12 +1210,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
12101210

12111211
ptr::copy_nonoverlapping(
12121212
self.node.keys().as_ptr().add(self.idx + 1),
1213-
new_node.data.keys.as_mut_ptr() as *mut K,
1213+
new_node.data.keys.as_mut_ptr(),
12141214
new_len
12151215
);
12161216
ptr::copy_nonoverlapping(
12171217
self.node.vals().as_ptr().add(self.idx + 1),
1218-
new_node.data.vals.as_mut_ptr() as *mut V,
1218+
new_node.data.vals.as_mut_ptr(),
12191219
new_len
12201220
);
12211221
ptr::copy_nonoverlapping(

src/liballoc/lib.rs

-1
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,6 @@
120120
#![feature(rustc_const_unstable)]
121121
#![feature(const_vec_new)]
122122
#![feature(slice_partition_dedup)]
123-
#![feature(maybe_uninit)]
124123

125124
// Allow testing this library
126125

src/libcore/fmt/float.rs

+13-14
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
// except according to those terms.
1010

1111
use fmt::{Formatter, Result, LowerExp, UpperExp, Display, Debug};
12-
use mem::MaybeUninit;
12+
use mem;
1313
use num::flt2dec;
1414

1515
// Don't inline this so callers don't use the stack space this function
@@ -20,11 +20,11 @@ fn float_to_decimal_common_exact<T>(fmt: &mut Formatter, num: &T,
2020
where T: flt2dec::DecodableFloat
2121
{
2222
unsafe {
23-
let mut buf = MaybeUninit::<[u8; 1024]>::uninitialized(); // enough for f32 and f64
24-
let mut parts = MaybeUninit::<[flt2dec::Part; 4]>::uninitialized();
23+
let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
24+
let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
2525
let formatted = flt2dec::to_exact_fixed_str(flt2dec::strategy::grisu::format_exact,
2626
*num, sign, precision,
27-
false, buf.get_mut(), parts.get_mut());
27+
false, &mut buf, &mut parts);
2828
fmt.pad_formatted_parts(&formatted)
2929
}
3030
}
@@ -38,11 +38,10 @@ fn float_to_decimal_common_shortest<T>(fmt: &mut Formatter, num: &T,
3838
{
3939
unsafe {
4040
// enough for f32 and f64
41-
let mut buf = MaybeUninit::<[u8; flt2dec::MAX_SIG_DIGITS]>::uninitialized();
42-
let mut parts = MaybeUninit::<[flt2dec::Part; 4]>::uninitialized();
41+
let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
42+
let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
4343
let formatted = flt2dec::to_shortest_str(flt2dec::strategy::grisu::format_shortest, *num,
44-
sign, precision, false, buf.get_mut(),
45-
parts.get_mut());
44+
sign, precision, false, &mut buf, &mut parts);
4645
fmt.pad_formatted_parts(&formatted)
4746
}
4847
}
@@ -76,11 +75,11 @@ fn float_to_exponential_common_exact<T>(fmt: &mut Formatter, num: &T,
7675
where T: flt2dec::DecodableFloat
7776
{
7877
unsafe {
79-
let mut buf = MaybeUninit::<[u8; 1024]>::uninitialized(); // enough for f32 and f64
80-
let mut parts = MaybeUninit::<[flt2dec::Part; 6]>::uninitialized();
78+
let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
79+
let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
8180
let formatted = flt2dec::to_exact_exp_str(flt2dec::strategy::grisu::format_exact,
8281
*num, sign, precision,
83-
upper, buf.get_mut(), parts.get_mut());
82+
upper, &mut buf, &mut parts);
8483
fmt.pad_formatted_parts(&formatted)
8584
}
8685
}
@@ -95,11 +94,11 @@ fn float_to_exponential_common_shortest<T>(fmt: &mut Formatter,
9594
{
9695
unsafe {
9796
// enough for f32 and f64
98-
let mut buf = MaybeUninit::<[u8; flt2dec::MAX_SIG_DIGITS]>::uninitialized();
99-
let mut parts = MaybeUninit::<[flt2dec::Part; 6]>::uninitialized();
97+
let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
98+
let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
10099
let formatted = flt2dec::to_shortest_exp_str(flt2dec::strategy::grisu::format_shortest,
101100
*num, sign, (0, 0), upper,
102-
buf.get_mut(), parts.get_mut());
101+
&mut buf, &mut parts);
103102
fmt.pad_formatted_parts(&formatted)
104103
}
105104
}

src/libcore/lib.rs

-2
Original file line numberDiff line numberDiff line change
@@ -246,8 +246,6 @@ macro_rules! test_v512 { ($item:item) => {}; }
246246
#[allow(unused_macros)]
247247
macro_rules! vector_impl { ($([$f:ident, $($args:tt)*]),*) => { $($f!($($args)*);)* } }
248248
#[path = "../stdsimd/coresimd/mod.rs"]
249-
// replacing uses of mem::{uninitialized,zeroed} with MaybeUninit needs to be in the stdsimd repo
250-
#[allow(deprecated)]
251249
#[allow(missing_docs, missing_debug_implementations, dead_code, unused_imports)]
252250
#[unstable(feature = "stdsimd", issue = "48556")]
253251
#[cfg(not(stage0))] // allow changes to how stdsimd works in stage0

src/libcore/ptr.rs

+18-14
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ use ops::CoerceUnsized;
7979
use fmt;
8080
use hash;
8181
use marker::{PhantomData, Unsize};
82-
use mem::{self, MaybeUninit};
82+
use mem;
8383
use nonzero::NonZero;
8484

8585
use cmp::Ordering::{self, Less, Equal, Greater};
@@ -294,12 +294,16 @@ pub const fn null_mut<T>() -> *mut T { 0 as *mut T }
294294
#[stable(feature = "rust1", since = "1.0.0")]
295295
pub unsafe fn swap<T>(x: *mut T, y: *mut T) {
296296
// Give ourselves some scratch space to work with
297-
let mut tmp = MaybeUninit::<T>::uninitialized();
297+
let mut tmp: T = mem::uninitialized();
298298

299299
// Perform the swap
300-
copy_nonoverlapping(x, tmp.as_mut_ptr(), 1);
300+
copy_nonoverlapping(x, &mut tmp, 1);
301301
copy(y, x, 1); // `x` and `y` may overlap
302-
copy_nonoverlapping(tmp.get_ref(), y, 1);
302+
copy_nonoverlapping(&tmp, y, 1);
303+
304+
// y and t now point to the same thing, but we need to completely forget `tmp`
305+
// because it's no longer relevant.
306+
mem::forget(tmp);
303307
}
304308

305309
/// Swaps `count * size_of::<T>()` bytes between the two regions of memory
@@ -386,8 +390,8 @@ unsafe fn swap_nonoverlapping_bytes(x: *mut u8, y: *mut u8, len: usize) {
386390
while i + block_size <= len {
387391
// Create some uninitialized memory as scratch space
388392
// Declaring `t` here avoids aligning the stack when this loop is unused
389-
let mut t = mem::MaybeUninit::<Block>::uninitialized();
390-
let t = t.as_mut_ptr() as *mut u8;
393+
let mut t: Block = mem::uninitialized();
394+
let t = &mut t as *mut _ as *mut u8;
391395
let x = x.add(i);
392396
let y = y.add(i);
393397

@@ -401,10 +405,10 @@ unsafe fn swap_nonoverlapping_bytes(x: *mut u8, y: *mut u8, len: usize) {
401405

402406
if i < len {
403407
// Swap any remaining bytes
404-
let mut t = mem::MaybeUninit::<UnalignedBlock>::uninitialized();
408+
let mut t: UnalignedBlock = mem::uninitialized();
405409
let rem = len - i;
406410

407-
let t = t.as_mut_ptr() as *mut u8;
411+
let t = &mut t as *mut _ as *mut u8;
408412
let x = x.add(i);
409413
let y = y.add(i);
410414

@@ -569,9 +573,9 @@ pub unsafe fn replace<T>(dst: *mut T, mut src: T) -> T {
569573
#[inline]
570574
#[stable(feature = "rust1", since = "1.0.0")]
571575
pub unsafe fn read<T>(src: *const T) -> T {
572-
let mut tmp = MaybeUninit::<T>::uninitialized();
573-
copy_nonoverlapping(src, tmp.as_mut_ptr(), 1);
574-
tmp.into_inner()
576+
let mut tmp: T = mem::uninitialized();
577+
copy_nonoverlapping(src, &mut tmp, 1);
578+
tmp
575579
}
576580

577581
/// Reads the value from `src` without moving it. This leaves the
@@ -636,11 +640,11 @@ pub unsafe fn read<T>(src: *const T) -> T {
636640
#[inline]
637641
#[stable(feature = "ptr_unaligned", since = "1.17.0")]
638642
pub unsafe fn read_unaligned<T>(src: *const T) -> T {
639-
let mut tmp = MaybeUninit::<T>::uninitialized();
643+
let mut tmp: T = mem::uninitialized();
640644
copy_nonoverlapping(src as *const u8,
641-
tmp.as_mut_ptr() as *mut u8,
645+
&mut tmp as *mut T as *mut u8,
642646
mem::size_of::<T>());
643-
tmp.into_inner()
647+
tmp
644648
}
645649

646650
/// Overwrites a memory location with the given value without reading or

src/libcore/slice/rotate.rs

+9-3
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
// except according to those terms.
1010

1111
use cmp;
12-
use mem::{self, MaybeUninit};
12+
use mem;
1313
use ptr;
1414

1515
/// Rotation is much faster if it has access to a little bit of memory. This
@@ -26,6 +26,12 @@ union RawArray<T> {
2626
}
2727

2828
impl<T> RawArray<T> {
29+
fn new() -> Self {
30+
unsafe { mem::uninitialized() }
31+
}
32+
fn ptr(&self) -> *mut T {
33+
unsafe { &self.typed as *const T as *mut T }
34+
}
2935
fn cap() -> usize {
3036
if mem::size_of::<T>() == 0 {
3137
usize::max_value()
@@ -82,8 +88,8 @@ pub unsafe fn ptr_rotate<T>(mut left: usize, mid: *mut T, mut right: usize) {
8288
}
8389
}
8490

85-
let mut rawarray = MaybeUninit::<RawArray<T>>::uninitialized();
86-
let buf = &mut (*rawarray.as_mut_ptr()).typed as *mut [T; 2] as *mut T;
91+
let rawarray = RawArray::new();
92+
let buf = rawarray.ptr();
8793

8894
let dim = mid.sub(left).add(right);
8995
if left <= right {

0 commit comments

Comments
 (0)