diff --git a/src/hole.rs b/src/hole.rs index 30713e9..a93b96e 100644 --- a/src/hole.rs +++ b/src/hole.rs @@ -1,5 +1,6 @@ +use core::ptr::NonNull; use core::mem::size_of; -use alloc::allocator::{Layout, AllocErr}; +use core::alloc::{Layout, Opaque, AllocErr}; use super::align_up; @@ -42,7 +43,7 @@ impl HoleList { /// block is returned. /// This function uses the “first fit” strategy, so it uses the first hole that is big /// enough. Thus the runtime is in O(n) but it should be reasonably fast for small allocations. - pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + pub fn allocate_first_fit(&mut self, layout: Layout) -> Result, AllocErr> { assert!(layout.size() >= Self::min_size()); allocate_first_fit(&mut self.first, layout).map(|allocation| { @@ -52,7 +53,7 @@ impl HoleList { if let Some(padding) = allocation.back_padding { deallocate(&mut self.first, padding.addr, padding.size); } - allocation.info.addr as *mut u8 + NonNull::new(allocation.info.addr as *mut Opaque).unwrap() }) } @@ -62,8 +63,8 @@ impl HoleList { /// This function walks the list and inserts the given block at the correct place. If the freed /// block is adjacent to another free block, the blocks are merged again. /// This operation is in `O(n)` since the list needs to be sorted by address. - pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) { - deallocate(&mut self.first, ptr as usize, layout.size()) + pub unsafe fn deallocate(&mut self, ptr: NonNull, layout: Layout) { + deallocate(&mut self.first, ptr.as_ptr() as usize, layout.size()) } /// Returns the minimal allocation size. Smaller allocations or deallocations are not allowed. @@ -199,7 +200,7 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result { // this was the last hole, so no hole is big enough -> allocation not possible - return Err(AllocErr::Exhausted { request: layout }); + return Err(AllocErr); } } } diff --git a/src/lib.rs b/src/lib.rs index 9ddba46..7f6967a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -3,8 +3,6 @@ #![feature(pointer_methods)] #![no_std] -extern crate alloc; - #[cfg(test)] #[macro_use] extern crate std; @@ -14,9 +12,10 @@ extern crate spin; use hole::{Hole, HoleList}; use core::mem; +use core::ptr::NonNull; #[cfg(feature = "use_spin")] use core::ops::Deref; -use alloc::allocator::{Alloc, Layout, AllocErr}; +use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Opaque}; #[cfg(feature = "use_spin")] use spin::Mutex; @@ -70,7 +69,7 @@ impl Heap { /// This function scans the list of free memory blocks and uses the first block that is big /// enough. The runtime is in O(n) where n is the number of free blocks, but it should be /// reasonably fast for small allocations. - pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + pub fn allocate_first_fit(&mut self, layout: Layout) -> Result, AllocErr> { let mut size = layout.size(); if size < HoleList::min_size() { size = HoleList::min_size(); @@ -88,7 +87,7 @@ impl Heap { /// This function walks the list of free memory blocks and inserts the freed block at the /// correct place. If the freed block is adjacent to another free block, the blocks are merged /// again. This operation is in `O(n)` since the list needs to be sorted by address. - pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) { + pub unsafe fn deallocate(&mut self, ptr: NonNull, layout: Layout) { let mut size = layout.size(); if size < HoleList::min_size() { size = HoleList::min_size(); @@ -122,21 +121,21 @@ impl Heap { pub unsafe fn extend(&mut self, by: usize) { let top = self.top(); let layout = Layout::from_size_align(by, 1).unwrap(); - self.holes.deallocate(top as *mut u8, layout); + self.holes.deallocate(NonNull::new_unchecked(top as *mut Opaque), layout); self.size += by; } } unsafe impl Alloc for Heap { - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { self.allocate_first_fit(layout) } - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { self.deallocate(ptr, layout) } - fn oom(&mut self, _: AllocErr) -> ! { + fn oom(&mut self) -> ! { panic!("Out of memory"); } } @@ -174,16 +173,18 @@ impl Deref for LockedHeap { } #[cfg(feature = "use_spin")] -unsafe impl<'a> Alloc for &'a LockedHeap { - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { - self.0.lock().allocate_first_fit(layout) +unsafe impl GlobalAlloc for LockedHeap { + unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { + self.0.lock().allocate_first_fit(layout).ok().map_or(0 as *mut Opaque, |allocation| { + allocation.as_ptr() + }) } - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { - self.0.lock().deallocate(ptr, layout) + unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { + self.0.lock().deallocate(NonNull::new_unchecked(ptr), layout) } - fn oom(&mut self, _: AllocErr) -> ! { + fn oom(&self) -> ! { panic!("Out of memory"); } } diff --git a/src/test.rs b/src/test.rs index f60e85a..e731725 100644 --- a/src/test.rs +++ b/src/test.rs @@ -1,6 +1,6 @@ use std::prelude::v1::*; use std::mem::{size_of, align_of}; -use alloc::allocator::Layout; +use core::alloc::Layout; use super::*; fn new_heap() -> Heap { @@ -46,7 +46,7 @@ fn allocate_double_usize() { let layout = Layout::from_size_align(size, align_of::()); let addr = heap.allocate_first_fit(layout.unwrap()); assert!(addr.is_ok()); - let addr = addr.unwrap() as usize; + let addr = addr.unwrap().as_ptr() as usize; assert!(addr == heap.bottom); let (hole_addr, hole_size) = heap.holes.first_hole().expect("ERROR: no hole left"); assert!(hole_addr == heap.bottom + size); @@ -64,7 +64,7 @@ fn allocate_and_free_double_usize() { let layout = Layout::from_size_align(size_of::() * 2, align_of::()).unwrap(); let x = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { - *(x as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe); + *(x.as_ptr() as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe); heap.deallocate(x, layout.clone()); assert_eq!((*(heap.bottom as *const Hole)).size, heap.size); @@ -83,11 +83,11 @@ fn deallocate_right_before() { unsafe { heap.deallocate(y, layout.clone()); - assert_eq!((*(y as *const Hole)).size, layout.size()); + assert_eq!((*(y.as_ptr() as *const Hole)).size, layout.size()); heap.deallocate(x, layout.clone()); - assert_eq!((*(x as *const Hole)).size, layout.size() * 2); + assert_eq!((*(x.as_ptr() as *const Hole)).size, layout.size() * 2); heap.deallocate(z, layout.clone()); - assert_eq!((*(x as *const Hole)).size, heap.size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size); } } @@ -103,11 +103,11 @@ fn deallocate_right_behind() { unsafe { heap.deallocate(x, layout.clone()); - assert_eq!((*(x as *const Hole)).size, size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, size); heap.deallocate(y, layout.clone()); - assert_eq!((*(x as *const Hole)).size, size * 2); + assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 2); heap.deallocate(z, layout.clone()); - assert_eq!((*(x as *const Hole)).size, heap.size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size); } } @@ -124,14 +124,14 @@ fn deallocate_middle() { unsafe { heap.deallocate(x, layout.clone()); - assert_eq!((*(x as *const Hole)).size, size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, size); heap.deallocate(z, layout.clone()); - assert_eq!((*(x as *const Hole)).size, size); - assert_eq!((*(z as *const Hole)).size, size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, size); + assert_eq!((*(z.as_ptr() as *const Hole)).size, size); heap.deallocate(y, layout.clone()); - assert_eq!((*(x as *const Hole)).size, size * 3); + assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 3); heap.deallocate(a, layout.clone()); - assert_eq!((*(x as *const Hole)).size, heap.size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size); } } @@ -167,9 +167,9 @@ fn allocate_multiple_sizes() { let x = heap.allocate_first_fit(layout_1.clone()).unwrap(); let y = heap.allocate_first_fit(layout_2.clone()).unwrap(); - assert_eq!(y as usize, x as usize + base_size * 2); + assert_eq!(y.as_ptr() as usize, x.as_ptr() as usize + base_size * 2); let z = heap.allocate_first_fit(layout_3.clone()).unwrap(); - assert_eq!(z as usize % (base_size * 4), 0); + assert_eq!(z.as_ptr() as usize % (base_size * 4), 0); unsafe { heap.deallocate(x, layout_1.clone());