diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 392731c..9b3115b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -135,7 +135,7 @@ jobs: name: "Miri tests" runs-on: ubuntu-latest env: - MIRIFLAGS: "-Zmiri-disable-isolation -Zmiri-strict-provenance -Zmiri-tag-raw-pointers -Zmiri-ignore-leaks" + MIRIFLAGS: "-Zmiri-disable-isolation -Zmiri-strict-provenance -Zmiri-tag-raw-pointers" steps: - uses: actions/checkout@v1 - run: rustup toolchain install nightly --profile minimal --component rust-src miri diff --git a/src/hole.rs b/src/hole.rs index 5dca909..8dbe660 100644 --- a/src/hole.rs +++ b/src/hole.rs @@ -120,31 +120,31 @@ impl Cursor { alloc_ptr = aligned_addr; alloc_size = required_size; - // Okay, time to move onto the back padding. Here, we are opportunistic - - // if it fits, we sits. Otherwise we just skip adding the back padding, and - // sort of assume that the allocation is actually a bit larger than it - // actually needs to be. - // - // NOTE: Because we always use `HoleList::align_layout`, the size of - // the new allocation is always "rounded up" to cover any partial gaps that - // would have occurred. For this reason, we DON'T need to "round up" - // to account for an unaligned hole spot. - let hole_layout = Layout::new::(); - let back_padding_start = align_up(allocation_end, hole_layout.align()); - let back_padding_end = back_padding_start.wrapping_add(hole_layout.size()); - - // Will the proposed new back padding actually fit in the old hole slot? - back_padding = if back_padding_end <= hole_end { - // Yes, it does! Place a back padding node - Some(HoleInfo { - addr: back_padding_start, - size: (hole_end as usize) - (back_padding_start as usize), - }) - } else { - // No, it does not. We are now pretending the allocation now - // holds the extra 0..size_of::() bytes that are not - // big enough to hold what SHOULD be back_padding + // Okay, time to move onto the back padding. + let back_padding_size = hole_end as usize - allocation_end as usize; + back_padding = if back_padding_size == 0 { None + } else { + // NOTE: Because we always use `HoleList::align_layout`, the size of + // the new allocation is always "rounded up" to cover any partial gaps that + // would have occurred. For this reason, we DON'T need to "round up" + // to account for an unaligned hole spot. + let hole_layout = Layout::new::(); + let back_padding_start = align_up(allocation_end, hole_layout.align()); + let back_padding_end = back_padding_start.wrapping_add(hole_layout.size()); + + // Will the proposed new back padding actually fit in the old hole slot? + if back_padding_end <= hole_end { + // Yes, it does! Place a back padding node + Some(HoleInfo { + addr: back_padding_start, + size: back_padding_size, + }) + } else { + // No, it does not. We don't want to leak any heap bytes, so we + // consider this hole unsuitable for the requested allocation. + return Err(self); + } }; } @@ -697,34 +697,9 @@ fn deallocate(list: &mut HoleList, addr: *mut u8, size: usize) { #[cfg(test)] pub mod test { use super::HoleList; - use crate::{align_down_size, Heap}; + use crate::{align_down_size, test::new_heap}; use core::mem::size_of; - use std::{alloc::Layout, convert::TryInto, mem::MaybeUninit, prelude::v1::*, ptr::NonNull}; - - #[repr(align(128))] - struct Chonk { - data: [MaybeUninit; N], - } - - impl Chonk { - pub fn new() -> Self { - Self { - data: [MaybeUninit::uninit(); N], - } - } - } - - fn new_heap() -> Heap { - const HEAP_SIZE: usize = 1000; - let heap_space = Box::leak(Box::new(Chonk::::new())); - let data = &mut heap_space.data; - let assumed_location = data.as_mut_ptr().cast(); - - let heap = Heap::from_slice(data); - assert_eq!(heap.bottom(), assumed_location); - assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::())); - heap - } + use std::{alloc::Layout, convert::TryInto, prelude::v1::*, ptr::NonNull}; #[test] fn cursor() { diff --git a/src/test.rs b/src/test.rs index 59bfc91..1e88c19 100644 --- a/src/test.rs +++ b/src/test.rs @@ -1,7 +1,12 @@ use super::*; -use core::alloc::Layout; -use std::mem::{align_of, size_of, MaybeUninit}; -use std::prelude::v1::*; +use core::{ + alloc::Layout, + ops::{Deref, DerefMut}, +}; +use std::{ + mem::{align_of, size_of, MaybeUninit}, + prelude::v1::*, +}; #[repr(align(128))] struct Chonk { @@ -16,22 +21,45 @@ impl Chonk { } } -fn new_heap() -> Heap { +pub struct OwnedHeap { + heap: Heap, + _drop: F, +} + +impl Deref for OwnedHeap { + type Target = Heap; + + fn deref(&self) -> &Self::Target { + &self.heap + } +} + +impl DerefMut for OwnedHeap { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.heap + } +} + +pub fn new_heap() -> OwnedHeap { const HEAP_SIZE: usize = 1000; - let heap_space = Box::leak(Box::new(Chonk::::new())); + let mut heap_space = Box::new(Chonk::::new()); let data = &mut heap_space.data; let assumed_location = data.as_mut_ptr().cast(); - let heap = Heap::from_slice(data); + let heap = unsafe { Heap::new(data.as_mut_ptr().cast(), data.len()) }; assert_eq!(heap.bottom(), assumed_location); assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::())); - heap + + let drop = move || { + let _ = heap_space; + }; + OwnedHeap { heap, _drop: drop } } -fn new_max_heap() -> Heap { +fn new_max_heap() -> OwnedHeap { const HEAP_SIZE: usize = 1024; const HEAP_SIZE_MAX: usize = 2048; - let heap_space = Box::leak(Box::new(Chonk::::new())); + let mut heap_space = Box::new(Chonk::::new()); let data = &mut heap_space.data; let start_ptr = data.as_mut_ptr().cast(); @@ -39,7 +67,23 @@ fn new_max_heap() -> Heap { let heap = unsafe { Heap::new(start_ptr, HEAP_SIZE) }; assert_eq!(heap.bottom(), start_ptr); assert_eq!(heap.size(), HEAP_SIZE); - heap + + let drop = move || { + let _ = heap_space; + }; + OwnedHeap { heap, _drop: drop } +} + +fn new_heap_skip(ct: usize) -> OwnedHeap { + const HEAP_SIZE: usize = 1000; + let mut heap_space = Box::new(Chonk::::new()); + let data = &mut heap_space.data[ct..]; + let heap = unsafe { Heap::new(data.as_mut_ptr().cast(), data.len()) }; + + let drop = move || { + let _ = heap_space; + }; + OwnedHeap { heap, _drop: drop } } #[test] @@ -51,7 +95,15 @@ fn empty() { #[test] fn oom() { - let mut heap = new_heap(); + const HEAP_SIZE: usize = 1000; + let mut heap_space = Box::new(Chonk::::new()); + let data = &mut heap_space.data; + let assumed_location = data.as_mut_ptr().cast(); + + let mut heap = unsafe { Heap::new(data.as_mut_ptr().cast(), data.len()) }; + assert_eq!(heap.bottom(), assumed_location); + assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::())); + let layout = Layout::from_size_align(heap.size() + 1, align_of::()); let addr = heap.allocate_first_fit(layout.unwrap()); assert!(addr.is_err()); @@ -388,14 +440,6 @@ fn allocate_multiple_unaligned() { } } -fn new_heap_skip(ct: usize) -> Heap { - const HEAP_SIZE: usize = 1000; - let heap_space = Box::leak(Box::new(Chonk::::new())); - let data = &mut heap_space.data[ct..]; - let heap = Heap::from_slice(data); - heap -} - #[test] fn allocate_usize() { let mut heap = new_heap();