42
42
// This implies that even an empty internal node has at least one edge.
43
43
44
44
use core:: marker:: PhantomData ;
45
- use core:: mem:: { self , MaybeUninit } ;
45
+ use core:: mem;
46
46
use core:: ptr:: { self , Unique , NonNull } ;
47
47
use core:: slice;
48
48
@@ -73,7 +73,7 @@ struct LeafNode<K, V> {
73
73
/// This node's index into the parent node's `edges` array.
74
74
/// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
75
75
/// This is only guaranteed to be initialized when `parent` is nonnull.
76
- parent_idx : MaybeUninit < u16 > ,
76
+ parent_idx : u16 ,
77
77
78
78
/// The number of keys and values this node stores.
79
79
///
@@ -83,8 +83,8 @@ struct LeafNode<K, V> {
83
83
84
84
/// The arrays storing the actual data of the node. Only the first `len` elements of each
85
85
/// array are initialized and valid.
86
- keys : MaybeUninit < [ K ; CAPACITY ] > ,
87
- vals : MaybeUninit < [ V ; CAPACITY ] > ,
86
+ keys : [ K ; CAPACITY ] ,
87
+ vals : [ V ; CAPACITY ] ,
88
88
}
89
89
90
90
impl < K , V > LeafNode < K , V > {
@@ -94,10 +94,10 @@ impl<K, V> LeafNode<K, V> {
94
94
LeafNode {
95
95
// As a general policy, we leave fields uninitialized if they can be, as this should
96
96
// be both slightly faster and easier to track in Valgrind.
97
- keys : MaybeUninit :: uninitialized ( ) ,
98
- vals : MaybeUninit :: uninitialized ( ) ,
97
+ keys : mem :: uninitialized ( ) ,
98
+ vals : mem :: uninitialized ( ) ,
99
99
parent : ptr:: null ( ) ,
100
- parent_idx : MaybeUninit :: uninitialized ( ) ,
100
+ parent_idx : mem :: uninitialized ( ) ,
101
101
len : 0
102
102
}
103
103
}
@@ -115,10 +115,10 @@ unsafe impl Sync for LeafNode<(), ()> {}
115
115
// ever take a pointer past the first key.
116
116
static EMPTY_ROOT_NODE : LeafNode < ( ) , ( ) > = LeafNode {
117
117
parent : ptr:: null ( ) ,
118
- parent_idx : MaybeUninit :: uninitialized ( ) ,
118
+ parent_idx : 0 ,
119
119
len : 0 ,
120
- keys : MaybeUninit :: uninitialized ( ) ,
121
- vals : MaybeUninit :: uninitialized ( ) ,
120
+ keys : [ ( ) ; CAPACITY ] ,
121
+ vals : [ ( ) ; CAPACITY ] ,
122
122
} ;
123
123
124
124
/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
@@ -430,7 +430,7 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
430
430
root : self . root ,
431
431
_marker : PhantomData
432
432
} ,
433
- idx : unsafe { usize :: from ( * self . as_leaf ( ) . parent_idx . get_ref ( ) ) } ,
433
+ idx : self . as_leaf ( ) . parent_idx as usize ,
434
434
_marker : PhantomData
435
435
} )
436
436
} else {
@@ -567,7 +567,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
567
567
// the node, which is allowed by LLVM.
568
568
unsafe {
569
569
slice:: from_raw_parts (
570
- self . as_leaf ( ) . keys . as_ptr ( ) as * const K ,
570
+ self . as_leaf ( ) . keys . as_ptr ( ) ,
571
571
self . len ( )
572
572
)
573
573
}
@@ -578,7 +578,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
578
578
debug_assert ! ( !self . is_shared_root( ) ) ;
579
579
unsafe {
580
580
slice:: from_raw_parts (
581
- self . as_leaf ( ) . vals . as_ptr ( ) as * const V ,
581
+ self . as_leaf ( ) . vals . as_ptr ( ) ,
582
582
self . len ( )
583
583
)
584
584
}
@@ -605,7 +605,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
605
605
} else {
606
606
unsafe {
607
607
slice:: from_raw_parts_mut (
608
- self . as_leaf_mut ( ) . keys . get_mut ( ) as * mut [ K ] as * mut K ,
608
+ & mut self . as_leaf_mut ( ) . keys as * mut [ K ] as * mut K ,
609
609
self . len ( )
610
610
)
611
611
}
@@ -616,7 +616,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
616
616
debug_assert ! ( !self . is_shared_root( ) ) ;
617
617
unsafe {
618
618
slice:: from_raw_parts_mut (
619
- self . as_leaf_mut ( ) . vals . get_mut ( ) as * mut [ V ] as * mut V ,
619
+ & mut self . as_leaf_mut ( ) . vals as * mut [ V ] as * mut V ,
620
620
self . len ( )
621
621
)
622
622
}
@@ -1013,7 +1013,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
1013
1013
let ptr = self . node . as_internal_mut ( ) as * mut _ ;
1014
1014
let mut child = self . descend ( ) ;
1015
1015
child. as_leaf_mut ( ) . parent = ptr;
1016
- child. as_leaf_mut ( ) . parent_idx . set ( idx) ;
1016
+ child. as_leaf_mut ( ) . parent_idx = idx;
1017
1017
}
1018
1018
1019
1019
/// Unsafely asserts to the compiler some static information about whether the underlying
@@ -1152,12 +1152,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV>
1152
1152
1153
1153
ptr:: copy_nonoverlapping (
1154
1154
self . node . keys ( ) . as_ptr ( ) . add ( self . idx + 1 ) ,
1155
- new_node. keys . as_mut_ptr ( ) as * mut K ,
1155
+ new_node. keys . as_mut_ptr ( ) ,
1156
1156
new_len
1157
1157
) ;
1158
1158
ptr:: copy_nonoverlapping (
1159
1159
self . node . vals ( ) . as_ptr ( ) . add ( self . idx + 1 ) ,
1160
- new_node. vals . as_mut_ptr ( ) as * mut V ,
1160
+ new_node. vals . as_mut_ptr ( ) ,
1161
1161
new_len
1162
1162
) ;
1163
1163
@@ -1210,12 +1210,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
1210
1210
1211
1211
ptr:: copy_nonoverlapping (
1212
1212
self . node . keys ( ) . as_ptr ( ) . add ( self . idx + 1 ) ,
1213
- new_node. data . keys . as_mut_ptr ( ) as * mut K ,
1213
+ new_node. data . keys . as_mut_ptr ( ) ,
1214
1214
new_len
1215
1215
) ;
1216
1216
ptr:: copy_nonoverlapping (
1217
1217
self . node . vals ( ) . as_ptr ( ) . add ( self . idx + 1 ) ,
1218
- new_node. data . vals . as_mut_ptr ( ) as * mut V ,
1218
+ new_node. data . vals . as_mut_ptr ( ) ,
1219
1219
new_len
1220
1220
) ;
1221
1221
ptr:: copy_nonoverlapping (
0 commit comments