Skip to content

Commit 3d91766

Browse files
weclaw1phil-opp
authored andcommitted
Use new nightly GlobalAlloc API (#11)
* Use new GlobalAlloc API * Use Alloc for Heap and GlobalAlloc for LockedHeap * Removed LOCKED_ALLOCATOR static
1 parent afec3a8 commit 3d91766

File tree

3 files changed

+39
-37
lines changed

3 files changed

+39
-37
lines changed

src/hole.rs

+7-6
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
use core::ptr::NonNull;
12
use core::mem::size_of;
2-
use alloc::allocator::{Layout, AllocErr};
3+
use core::alloc::{Layout, Opaque, AllocErr};
34

45
use super::align_up;
56

@@ -42,7 +43,7 @@ impl HoleList {
4243
/// block is returned.
4344
/// This function uses the “first fit” strategy, so it uses the first hole that is big
4445
/// enough. Thus the runtime is in O(n) but it should be reasonably fast for small allocations.
45-
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
46+
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
4647
assert!(layout.size() >= Self::min_size());
4748

4849
allocate_first_fit(&mut self.first, layout).map(|allocation| {
@@ -52,7 +53,7 @@ impl HoleList {
5253
if let Some(padding) = allocation.back_padding {
5354
deallocate(&mut self.first, padding.addr, padding.size);
5455
}
55-
allocation.info.addr as *mut u8
56+
NonNull::new(allocation.info.addr as *mut Opaque).unwrap()
5657
})
5758
}
5859

@@ -62,8 +63,8 @@ impl HoleList {
6263
/// This function walks the list and inserts the given block at the correct place. If the freed
6364
/// block is adjacent to another free block, the blocks are merged again.
6465
/// This operation is in `O(n)` since the list needs to be sorted by address.
65-
pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) {
66-
deallocate(&mut self.first, ptr as usize, layout.size())
66+
pub unsafe fn deallocate(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
67+
deallocate(&mut self.first, ptr.as_ptr() as usize, layout.size())
6768
}
6869

6970
/// Returns the minimal allocation size. Smaller allocations or deallocations are not allowed.
@@ -199,7 +200,7 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result<Allocat
199200
}
200201
None => {
201202
// this was the last hole, so no hole is big enough -> allocation not possible
202-
return Err(AllocErr::Exhausted { request: layout });
203+
return Err(AllocErr);
203204
}
204205
}
205206
}

src/lib.rs

+16-15
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,6 @@
33
#![feature(pointer_methods)]
44
#![no_std]
55

6-
extern crate alloc;
7-
86
#[cfg(test)]
97
#[macro_use]
108
extern crate std;
@@ -14,9 +12,10 @@ extern crate spin;
1412

1513
use hole::{Hole, HoleList};
1614
use core::mem;
15+
use core::ptr::NonNull;
1716
#[cfg(feature = "use_spin")]
1817
use core::ops::Deref;
19-
use alloc::allocator::{Alloc, Layout, AllocErr};
18+
use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Opaque};
2019
#[cfg(feature = "use_spin")]
2120
use spin::Mutex;
2221

@@ -70,7 +69,7 @@ impl Heap {
7069
/// This function scans the list of free memory blocks and uses the first block that is big
7170
/// enough. The runtime is in O(n) where n is the number of free blocks, but it should be
7271
/// reasonably fast for small allocations.
73-
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
72+
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
7473
let mut size = layout.size();
7574
if size < HoleList::min_size() {
7675
size = HoleList::min_size();
@@ -88,7 +87,7 @@ impl Heap {
8887
/// This function walks the list of free memory blocks and inserts the freed block at the
8988
/// correct place. If the freed block is adjacent to another free block, the blocks are merged
9089
/// again. This operation is in `O(n)` since the list needs to be sorted by address.
91-
pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) {
90+
pub unsafe fn deallocate(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
9291
let mut size = layout.size();
9392
if size < HoleList::min_size() {
9493
size = HoleList::min_size();
@@ -122,21 +121,21 @@ impl Heap {
122121
pub unsafe fn extend(&mut self, by: usize) {
123122
let top = self.top();
124123
let layout = Layout::from_size_align(by, 1).unwrap();
125-
self.holes.deallocate(top as *mut u8, layout);
124+
self.holes.deallocate(NonNull::new_unchecked(top as *mut Opaque), layout);
126125
self.size += by;
127126
}
128127
}
129128

130129
unsafe impl Alloc for Heap {
131-
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
130+
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
132131
self.allocate_first_fit(layout)
133132
}
134133

135-
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
134+
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
136135
self.deallocate(ptr, layout)
137136
}
138137

139-
fn oom(&mut self, _: AllocErr) -> ! {
138+
fn oom(&mut self) -> ! {
140139
panic!("Out of memory");
141140
}
142141
}
@@ -174,16 +173,18 @@ impl Deref for LockedHeap {
174173
}
175174

176175
#[cfg(feature = "use_spin")]
177-
unsafe impl<'a> Alloc for &'a LockedHeap {
178-
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
179-
self.0.lock().allocate_first_fit(layout)
176+
unsafe impl GlobalAlloc for LockedHeap {
177+
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
178+
self.0.lock().allocate_first_fit(layout).ok().map_or(0 as *mut Opaque, |allocation| {
179+
allocation.as_ptr()
180+
})
180181
}
181182

182-
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
183-
self.0.lock().deallocate(ptr, layout)
183+
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
184+
self.0.lock().deallocate(NonNull::new_unchecked(ptr), layout)
184185
}
185186

186-
fn oom(&mut self, _: AllocErr) -> ! {
187+
fn oom(&self) -> ! {
187188
panic!("Out of memory");
188189
}
189190
}

src/test.rs

+16-16
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use std::prelude::v1::*;
22
use std::mem::{size_of, align_of};
3-
use alloc::allocator::Layout;
3+
use core::alloc::Layout;
44
use super::*;
55

66
fn new_heap() -> Heap {
@@ -46,7 +46,7 @@ fn allocate_double_usize() {
4646
let layout = Layout::from_size_align(size, align_of::<usize>());
4747
let addr = heap.allocate_first_fit(layout.unwrap());
4848
assert!(addr.is_ok());
49-
let addr = addr.unwrap() as usize;
49+
let addr = addr.unwrap().as_ptr() as usize;
5050
assert!(addr == heap.bottom);
5151
let (hole_addr, hole_size) = heap.holes.first_hole().expect("ERROR: no hole left");
5252
assert!(hole_addr == heap.bottom + size);
@@ -64,7 +64,7 @@ fn allocate_and_free_double_usize() {
6464
let layout = Layout::from_size_align(size_of::<usize>() * 2, align_of::<usize>()).unwrap();
6565
let x = heap.allocate_first_fit(layout.clone()).unwrap();
6666
unsafe {
67-
*(x as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe);
67+
*(x.as_ptr() as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe);
6868

6969
heap.deallocate(x, layout.clone());
7070
assert_eq!((*(heap.bottom as *const Hole)).size, heap.size);
@@ -83,11 +83,11 @@ fn deallocate_right_before() {
8383

8484
unsafe {
8585
heap.deallocate(y, layout.clone());
86-
assert_eq!((*(y as *const Hole)).size, layout.size());
86+
assert_eq!((*(y.as_ptr() as *const Hole)).size, layout.size());
8787
heap.deallocate(x, layout.clone());
88-
assert_eq!((*(x as *const Hole)).size, layout.size() * 2);
88+
assert_eq!((*(x.as_ptr() as *const Hole)).size, layout.size() * 2);
8989
heap.deallocate(z, layout.clone());
90-
assert_eq!((*(x as *const Hole)).size, heap.size);
90+
assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size);
9191
}
9292
}
9393

@@ -103,11 +103,11 @@ fn deallocate_right_behind() {
103103

104104
unsafe {
105105
heap.deallocate(x, layout.clone());
106-
assert_eq!((*(x as *const Hole)).size, size);
106+
assert_eq!((*(x.as_ptr() as *const Hole)).size, size);
107107
heap.deallocate(y, layout.clone());
108-
assert_eq!((*(x as *const Hole)).size, size * 2);
108+
assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 2);
109109
heap.deallocate(z, layout.clone());
110-
assert_eq!((*(x as *const Hole)).size, heap.size);
110+
assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size);
111111
}
112112
}
113113

@@ -124,14 +124,14 @@ fn deallocate_middle() {
124124

125125
unsafe {
126126
heap.deallocate(x, layout.clone());
127-
assert_eq!((*(x as *const Hole)).size, size);
127+
assert_eq!((*(x.as_ptr() as *const Hole)).size, size);
128128
heap.deallocate(z, layout.clone());
129-
assert_eq!((*(x as *const Hole)).size, size);
130-
assert_eq!((*(z as *const Hole)).size, size);
129+
assert_eq!((*(x.as_ptr() as *const Hole)).size, size);
130+
assert_eq!((*(z.as_ptr() as *const Hole)).size, size);
131131
heap.deallocate(y, layout.clone());
132-
assert_eq!((*(x as *const Hole)).size, size * 3);
132+
assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 3);
133133
heap.deallocate(a, layout.clone());
134-
assert_eq!((*(x as *const Hole)).size, heap.size);
134+
assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size);
135135
}
136136
}
137137

@@ -167,9 +167,9 @@ fn allocate_multiple_sizes() {
167167

168168
let x = heap.allocate_first_fit(layout_1.clone()).unwrap();
169169
let y = heap.allocate_first_fit(layout_2.clone()).unwrap();
170-
assert_eq!(y as usize, x as usize + base_size * 2);
170+
assert_eq!(y.as_ptr() as usize, x.as_ptr() as usize + base_size * 2);
171171
let z = heap.allocate_first_fit(layout_3.clone()).unwrap();
172-
assert_eq!(z as usize % (base_size * 4), 0);
172+
assert_eq!(z.as_ptr() as usize % (base_size * 4), 0);
173173

174174
unsafe {
175175
heap.deallocate(x, layout_1.clone());

0 commit comments

Comments
 (0)