1
1
use std:: fmt;
2
- use std:: ptr;
3
2
use std:: slice;
4
- use std:: ptr:: NonNull ;
3
+ use std:: ptr:: { self , NonNull } ;
5
4
use std:: mem:: { self , MaybeUninit } ;
6
5
use std:: marker:: PhantomData ;
7
6
@@ -81,6 +80,17 @@ unsafe impl AllocStrategy for ExponentialAlloc {
81
80
}
82
81
}
83
82
83
+ /// A type-erased pointer to an element in the arena
84
+ ///
85
+ /// Ensures that elements can still be accessed performantly while also assigning the correct
86
+ /// lifetime to references created from this pointer.
87
+ ///
88
+ /// Needed because converting an index to a chunk index and item index can be expensive. Uses
89
+ /// `NonNull` so that `Option<Ptr>` gets the layout optimizations that `Option<NonNull>` gets.
90
+ #[ repr( transparent) ]
91
+ #[ derive( Debug , Clone , Copy , PartialEq , Eq , Hash ) ]
92
+ pub struct Ptr ( NonNull < ( ) > ) ;
93
+
84
94
/// An arena allocator that guarantees that the addresses produced remain usable regardless of how
85
95
/// many items are added.
86
96
///
@@ -188,11 +198,35 @@ impl<T> StableArena<T> {
188
198
self . capacity
189
199
}
190
200
201
+ /// Returns a reference to a value in the arena
202
+ ///
203
+ /// # Safety
204
+ ///
205
+ /// Calling this method with a pointer that is no longer valid is undefined behavior even if the
206
+ /// resulting reference is not used.
207
+ pub unsafe fn get_unchecked ( & self , ptr : Ptr ) -> & T {
208
+ // Safety: This pointer originated from a `NonNull<T>` so it is safe to cast it back
209
+ // (Technically it was a `NonNull<MaybeUninit<T>>` but that doesn't make a difference.)
210
+ & * ptr. 0 . cast ( ) . as_ptr ( )
211
+ }
212
+
213
+ /// Returns a mutable reference to a value in the arena
214
+ ///
215
+ /// # Safety
216
+ ///
217
+ /// Calling this method with a pointer that is no longer valid is undefined behavior even if the
218
+ /// resulting reference is not used.
219
+ pub unsafe fn get_unchecked_mut ( & mut self , ptr : Ptr ) -> & mut T {
220
+ // Safety: This pointer originated from a `NonNull<T>` so it is safe to cast it back
221
+ // (Technically it was a `NonNull<MaybeUninit<T>>` but that doesn't make a difference.)
222
+ & mut * ptr. 0 . cast ( ) . as_ptr ( )
223
+ }
224
+
191
225
/// Allocates the given value in the arena and returns a stable address to the value
192
226
///
193
227
/// The returned pointer is guaranteed to be valid as long as no method is called that would
194
228
/// invalidate the pointer (e.g. the `clear` method).
195
- pub fn alloc ( & mut self , value : T ) -> NonNull < T > {
229
+ pub fn alloc ( & mut self , value : T ) -> Ptr {
196
230
debug_assert ! ( self . len <= self . capacity) ;
197
231
// The length can never exceed the capacity
198
232
if self . len == self . capacity {
@@ -215,7 +249,7 @@ impl<T> StableArena<T> {
215
249
self . len += 1 ;
216
250
217
251
// Safety: `MaybeUninit<T>` is guaranteed to have the same size, alignment, and ABI as T
218
- unsafe { mem:: transmute ( item_ptr) }
252
+ Ptr ( unsafe { mem:: transmute ( item_ptr) } )
219
253
}
220
254
221
255
/// Returns an iterator over the arena
@@ -367,16 +401,6 @@ impl<T> StableArena<T> {
367
401
}
368
402
}
369
403
370
- impl < T > IntoIterator for StableArena < T > {
371
- type Item = T ;
372
-
373
- type IntoIter = IntoIter < T > ;
374
-
375
- fn into_iter ( self ) -> Self :: IntoIter {
376
- todo ! ( )
377
- }
378
- }
379
-
380
404
//TODO: This needs a `#[may_dangle]` attribute on `T`
381
405
// See: https://forge.rust-lang.org/libs/maintaining-std.html#is-there-a-manual-drop-implementation
382
406
impl < T > Drop for StableArena < T > {
@@ -396,6 +420,54 @@ impl<T> Drop for StableArena<T> {
396
420
}
397
421
}
398
422
423
+ impl < T > IntoIterator for StableArena < T > {
424
+ type Item = T ;
425
+
426
+ type IntoIter = IntoIter < T > ;
427
+
428
+ fn into_iter ( self ) -> Self :: IntoIter {
429
+ todo ! ( )
430
+ }
431
+ }
432
+
433
+ impl < ' a , T > IntoIterator for & ' a StableArena < T > {
434
+ type Item = & ' a T ;
435
+
436
+ type IntoIter = Iter < ' a , T > ;
437
+
438
+ fn into_iter ( self ) -> Self :: IntoIter {
439
+ self . iter ( )
440
+ }
441
+ }
442
+
443
+ impl < ' a , T > IntoIterator for & ' a mut StableArena < T > {
444
+ type Item = & ' a mut T ;
445
+
446
+ type IntoIter = IterMut < ' a , T > ;
447
+
448
+ fn into_iter ( self ) -> Self :: IntoIter {
449
+ self . iter_mut ( )
450
+ }
451
+ }
452
+
453
+ pub struct IntoIter < T > {
454
+ _marker : PhantomData < T > , //TODO
455
+ }
456
+
457
+ impl < T > Iterator for IntoIter < T > {
458
+ type Item = T ;
459
+
460
+ fn next ( & mut self ) -> Option < Self :: Item > {
461
+ self . next_ptr ( ) . map ( |( _, item) | item)
462
+ }
463
+ }
464
+
465
+ impl < T > ArenaIterator for IntoIter < T > {
466
+ fn next_ptr ( & mut self ) -> Option < ( Ptr , Self :: Item ) > {
467
+ todo ! ( )
468
+ }
469
+ }
470
+
399
471
pub struct Iter < ' a , T > {
400
472
_marker : PhantomData < & ' a T > , //TODO
401
473
}
@@ -404,6 +476,12 @@ impl<'a, T> Iterator for Iter<'a, T> {
404
476
type Item = & ' a T ;
405
477
406
478
fn next ( & mut self ) -> Option < Self :: Item > {
479
+ self . next_ptr ( ) . map ( |( _, item) | item)
480
+ }
481
+ }
482
+
483
+ impl < ' a , T > ArenaIterator for Iter < ' a , T > {
484
+ fn next_ptr ( & mut self ) -> Option < ( Ptr , Self :: Item ) > {
407
485
todo ! ( )
408
486
}
409
487
}
@@ -416,19 +494,35 @@ impl<'a, T> Iterator for IterMut<'a, T> {
416
494
type Item = & ' a mut T ;
417
495
418
496
fn next ( & mut self ) -> Option < Self :: Item > {
497
+ self . next_ptr ( ) . map ( |( _, item) | item)
498
+ }
499
+ }
500
+
501
+ impl < ' a , T > ArenaIterator for IterMut < ' a , T > {
502
+ fn next_ptr ( & mut self ) -> Option < ( Ptr , Self :: Item ) > {
419
503
todo ! ( )
420
504
}
421
505
}
422
506
423
- pub struct IntoIter < T > {
424
- _marker : PhantomData < T > , //TODO
507
+ pub trait ArenaIterator : Sized + Iterator {
508
+ /// Returns the next element of this iterator and its corresponding pointer
509
+ fn next_ptr ( & mut self ) -> Option < ( Ptr , Self :: Item ) > ;
510
+
511
+ /// Returns an iterator that also yields the corresponding pointer for each element
512
+ fn pointers ( self ) -> Pointers < Self > {
513
+ Pointers { iter : self }
514
+ }
425
515
}
426
516
427
- impl < T > Iterator for IntoIter < T > {
428
- type Item = T ;
517
+ pub struct Pointers < I : ArenaIterator > {
518
+ iter : I ,
519
+ }
520
+
521
+ impl < I : ArenaIterator > Iterator for Pointers < I > {
522
+ type Item = ( Ptr , I :: Item ) ;
429
523
430
524
fn next ( & mut self ) -> Option < Self :: Item > {
431
- todo ! ( )
525
+ self . iter . next_ptr ( )
432
526
}
433
527
}
434
528
@@ -483,19 +577,17 @@ mod tests {
483
577
#[ cfg( miri) ]
484
578
const ALLOCS : usize = 32 ;
485
579
486
- let mut addrs = Vec :: with_capacity ( ALLOCS ) ;
580
+ let mut ptrs = Vec :: with_capacity ( ALLOCS ) ;
487
581
488
582
// The arena allocator will resize multiple times during this test
489
583
let mut arena = StableArena :: new ( ) ;
490
584
for i in 0 ..ALLOCS {
491
585
// Pushing a type that implements drop
492
- addrs . push ( arena. alloc ( Rc :: new ( i) ) ) ;
586
+ ptrs . push ( arena. alloc ( Rc :: new ( i) ) ) ;
493
587
494
588
// Check that all addresses are still valid
495
- for ( j, addr) in addrs. iter ( ) . enumerate ( ) {
496
- unsafe {
497
- assert_eq ! ( * * addr. as_ref( ) , j) ;
498
- }
589
+ for ( j, ptr) in ptrs. iter ( ) . enumerate ( ) {
590
+ assert_eq ! ( unsafe { * * arena. get_unchecked( * ptr) } , j) ;
499
591
}
500
592
}
501
593
}
@@ -523,9 +615,9 @@ mod tests {
523
615
// push should not change capacity if capacity is greater than length
524
616
assert ! ( arena. capacity( ) > arena. len( ) ) ;
525
617
526
- let mut addrs = Vec :: new ( ) ;
618
+ let mut ptrs = Vec :: new ( ) ;
527
619
for i in 0 .. {
528
- addrs . push ( arena. alloc ( i. to_string ( ) ) ) ;
620
+ ptrs . push ( arena. alloc ( i. to_string ( ) ) ) ;
529
621
530
622
if arena. capacity ( ) <= arena. len ( ) {
531
623
break ;
@@ -538,8 +630,8 @@ mod tests {
538
630
// shrink to fit should not affect items
539
631
arena. shrink_to_fit ( ) ;
540
632
let capacity = arena. capacity ( ) ;
541
- for ( i, addr ) in addrs . iter ( ) . copied ( ) . enumerate ( ) {
542
- assert_eq ! ( unsafe { addr . as_ref ( ) } , & i. to_string( ) ) ;
633
+ for ( i, ptr ) in ptrs . iter ( ) . copied ( ) . enumerate ( ) {
634
+ assert_eq ! ( unsafe { arena . get_unchecked ( ptr ) } , & i. to_string( ) ) ;
543
635
assert_eq ! ( arena. capacity( ) , capacity) ;
544
636
}
545
637
@@ -551,7 +643,7 @@ mod tests {
551
643
552
644
//TODO: Add back once we have `pop()`
553
645
// pop should not change capacity
554
- // for index in indexes {
646
+ // for _ in ptrs {
555
647
// arena.pop();
556
648
// assert_eq!(slab.capacity(), capacity);
557
649
// }
0 commit comments