From 5c0b59f6a834d19de4adf64c6f1f807570ec7b50 Mon Sep 17 00:00:00 2001 From: Philipp Oppermann Date: Fri, 7 Jul 2017 13:05:50 +0200 Subject: [PATCH 1/3] Implement `allocator::Alloc` for Heap and use `allocator::Layout` --- src/hole.rs | 44 ++++++++++++++++++++++++++------------------ src/lib.rs | 29 ++++++++++++++++++++++++----- 2 files changed, 50 insertions(+), 23 deletions(-) diff --git a/src/hole.rs b/src/hole.rs index c12fbc3..e36cdae 100644 --- a/src/hole.rs +++ b/src/hole.rs @@ -1,5 +1,6 @@ use core::ptr::Unique; use core::mem::{self, size_of}; +use alloc::allocator::{Layout, AllocErr}; use super::align_up; @@ -41,14 +42,15 @@ impl HoleList { } /// Searches the list for a big enough hole. A hole is big enough if it can hold an allocation - /// of `size` bytes with the given `align`. If such a hole is found in the list, a block of the - /// required size is allocated from it. Then the start address of that block is returned. + /// of `layout.size()` bytes with the given `layout.align()`. If such a hole is found in the + /// list, a block of the required size is allocated from it. Then the start address of that + /// block is returned. /// This function uses the “first fit” strategy, so it uses the first hole that is big /// enough. Thus the runtime is in O(n) but it should be reasonably fast for small allocations. - pub fn allocate_first_fit(&mut self, size: usize, align: usize) -> Option<*mut u8> { - assert!(size >= Self::min_size()); + pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + assert!(layout.size() >= Self::min_size()); - allocate_first_fit(&mut self.first, size, align).map(|allocation| { + allocate_first_fit(&mut self.first, layout).map(|allocation| { if let Some(padding) = allocation.front_padding { deallocate(&mut self.first, padding.addr, padding.size); } @@ -59,14 +61,14 @@ impl HoleList { }) } - /// Frees the allocation given by `ptr` and `size`. `ptr` must be a pointer returned by a call - /// to the `allocate_first_fit` function with identical size. Undefined behavior may occur for + /// Frees the allocation given by `ptr` and `layout`. `ptr` must be a pointer returned by a call + /// to the `allocate_first_fit` function with identical layout. Undefined behavior may occur for /// invalid arguments. /// This function walks the list and inserts the given block at the correct place. If the freed /// block is adjacent to another free block, the blocks are merged again. /// This operation is in `O(n)` since the list needs to be sorted by address. - pub unsafe fn deallocate(&mut self, ptr: *mut u8, size: usize) { - deallocate(&mut self.first, ptr as usize, size) + pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) { + deallocate(&mut self.first, ptr as usize, layout.size()) } /// Returns the minimal allocation size. Smaller allocations or deallocations are not allowed. @@ -125,11 +127,14 @@ struct Allocation { } /// Splits the given hole into `(front_padding, hole, back_padding)` if it's big enough to allocate -/// `required_size` bytes with the `required_align`. Else `None` is returned. +/// `required_layout.size()` bytes with the `required_layout.align()`. Else `None` is returned. /// Front padding occurs if the required alignment is higher than the hole's alignment. Back /// padding occurs if the required size is smaller than the size of the aligned hole. All padding /// must be at least `HoleList::min_size()` big or the hole is unusable. -fn split_hole(hole: HoleInfo, required_size: usize, required_align: usize) -> Option { +fn split_hole(hole: HoleInfo, required_layout: Layout) -> Option { + let required_size = required_layout.size(); + let required_align = required_layout.align(); + let (aligned_addr, front_padding) = if hole.addr == align_up(hole.addr, required_align) { // hole has already the required alignment (hole.addr, None) @@ -179,21 +184,22 @@ fn split_hole(hole: HoleInfo, required_size: usize, required_align: usize) -> Op } /// Searches the list starting at the next hole of `previous` for a big enough hole. A hole is big -/// enough if it can hold an allocation of `size` bytes with the given `align`. When a hole is used -/// for an allocation, there may be some needed padding before and/or after the allocation. This -/// padding is returned as part of the `Allocation`. The caller must take care of freeing it again. +/// enough if it can hold an allocation of `layout.size()` bytes with the given `layou.align()`. +/// When a hole is used for an allocation, there may be some needed padding before and/or after +/// the allocation. This padding is returned as part of the `Allocation`. The caller must take +/// care of freeing it again. /// This function uses the “first fit” strategy, so it breaks as soon as a big enough hole is /// found (and returns it). -fn allocate_first_fit(mut previous: &mut Hole, size: usize, align: usize) -> Option { +fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result { loop { let allocation: Option = previous.next .as_mut() - .and_then(|current| split_hole(unsafe { current.as_ref() }.info(), size, align)); + .and_then(|current| split_hole(unsafe { current.as_ref() }.info(), layout.clone())); match allocation { Some(allocation) => { // hole is big enough, so remove it from the list by updating the previous pointer previous.next = previous.next_unwrap().next.take(); - return Some(allocation); + return Ok(allocation); } None if previous.next.is_some() => { // try next hole @@ -201,7 +207,9 @@ fn allocate_first_fit(mut previous: &mut Hole, size: usize, align: usize) -> Opt } None => { // this was the last hole, so no hole is big enough -> allocation not possible - return None; + return Err(AllocErr::Exhausted { + request: layout, + }); } } } diff --git a/src/lib.rs b/src/lib.rs index 66cc15d..87076b8 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,13 +1,17 @@ #![feature(unique)] #![feature(const_fn)] +#![feature(alloc, allocator_api)] #![no_std] +extern crate alloc; + #[cfg(test)] #[macro_use] extern crate std; use hole::{Hole, HoleList}; use core::mem; +use alloc::allocator::{Alloc, Layout, AllocErr}; mod hole; #[cfg(test)] @@ -59,13 +63,15 @@ impl Heap { /// This function scans the list of free memory blocks and uses the first block that is big /// enough. The runtime is in O(n) where n is the number of free blocks, but it should be /// reasonably fast for small allocations. - pub fn allocate_first_fit(&mut self, mut size: usize, align: usize) -> Option<*mut u8> { + pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + let mut size = layout.size(); if size < HoleList::min_size() { size = HoleList::min_size(); } let size = align_up(size, mem::align_of::()); + let layout = Layout::from_size_align(size, layout.align()).unwrap(); - self.holes.allocate_first_fit(size, align) + self.holes.allocate_first_fit(layout) } /// Frees the given allocation. `ptr` must be a pointer returned @@ -75,13 +81,15 @@ impl Heap { /// This function walks the list of free memory blocks and inserts the freed block at the /// correct place. If the freed block is adjacent to another free block, the blocks are merged /// again. This operation is in `O(n)` since the list needs to be sorted by address. - pub unsafe fn deallocate(&mut self, ptr: *mut u8, mut size: usize, _align: usize) { + pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) { + let mut size = layout.size(); if size < HoleList::min_size() { size = HoleList::min_size(); } let size = align_up(size, mem::align_of::()); + let layout = Layout::from_size_align(size, layout.align()).unwrap(); - self.holes.deallocate(ptr, size); + self.holes.deallocate(ptr, layout); } /// Returns the bottom address of the heap. @@ -106,11 +114,22 @@ impl Heap { /// The new extended area must be valid pub unsafe fn extend(&mut self, by: usize) { let top = self.top(); - self.holes.deallocate(top as *mut u8, by); + let layout = Layout::from_size_align(by, 1).unwrap(); + self.holes.deallocate(top as *mut u8, layout); self.size += by; } } +unsafe impl Alloc for Heap { + unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + self.allocate_first_fit(layout) + } + + unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { + self.deallocate(ptr, layout) + } +} + /// Align downwards. Returns the greatest x with alignment `align` /// so that x <= addr. The alignment must be a power of 2. pub fn align_down(addr: usize, align: usize) -> usize { From 2298ced470f438a2c8c22980610cdaa7e1fae7ab Mon Sep 17 00:00:00 2001 From: Philipp Oppermann Date: Fri, 7 Jul 2017 13:07:15 +0200 Subject: [PATCH 2/3] Adjust tests for new Layout-based API --- src/test.rs | 155 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 91 insertions(+), 64 deletions(-) diff --git a/src/test.rs b/src/test.rs index db76efa..0868de2 100644 --- a/src/test.rs +++ b/src/test.rs @@ -1,5 +1,6 @@ use std::prelude::v1::*; use std::mem::{size_of, align_of}; +use alloc::allocator::Layout; use super::*; fn new_heap() -> Heap { @@ -26,23 +27,25 @@ fn new_max_heap() -> Heap { #[test] fn empty() { let mut heap = Heap::empty(); - assert!(heap.allocate_first_fit(1, 1).is_none()); + let layout = Layout::from_size_align(1, 1).unwrap(); + assert!(heap.allocate_first_fit(layout.clone()).is_err()); } #[test] fn oom() { let mut heap = new_heap(); - let size = heap.size() + 1; - let addr = heap.allocate_first_fit(size, align_of::()); - assert!(addr.is_none()); + let layout = Layout::from_size_align(heap.size() + 1, align_of::()); + let addr = heap.allocate_first_fit(layout.unwrap()); + assert!(addr.is_err()); } #[test] fn allocate_double_usize() { let mut heap = new_heap(); let size = size_of::() * 2; - let addr = heap.allocate_first_fit(size, align_of::()); - assert!(addr.is_some()); + let layout = Layout::from_size_align(size, align_of::()); + let addr = heap.allocate_first_fit(layout.unwrap()); + assert!(addr.is_ok()); let addr = addr.unwrap() as usize; assert!(addr == heap.bottom); let (hole_addr, hole_size) = heap.holes.first_hole().expect("ERROR: no hole left"); @@ -58,11 +61,12 @@ fn allocate_double_usize() { fn allocate_and_free_double_usize() { let mut heap = new_heap(); - let x = heap.allocate_first_fit(size_of::() * 2, align_of::()).unwrap(); + let layout = Layout::from_size_align(size_of::() * 2, align_of::()).unwrap(); + let x = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { *(x as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe); - heap.deallocate(x, size_of::() * 2, align_of::()); + heap.deallocate(x, layout.clone()); assert_eq!((*(heap.bottom as *const Hole)).size, heap.size); assert!((*(heap.bottom as *const Hole)).next.is_none()); } @@ -71,18 +75,18 @@ fn allocate_and_free_double_usize() { #[test] fn deallocate_right_before() { let mut heap = new_heap(); - let size = size_of::() * 5; + let layout = Layout::from_size_align(size_of::() * 5, 1).unwrap(); - let x = heap.allocate_first_fit(size, 1).unwrap(); - let y = heap.allocate_first_fit(size, 1).unwrap(); - let z = heap.allocate_first_fit(size, 1).unwrap(); + let x = heap.allocate_first_fit(layout.clone()).unwrap(); + let y = heap.allocate_first_fit(layout.clone()).unwrap(); + let z = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { - heap.deallocate(y, size, 1); - assert_eq!((*(y as *const Hole)).size, size); - heap.deallocate(x, size, 1); - assert_eq!((*(x as *const Hole)).size, size * 2); - heap.deallocate(z, size, 1); + heap.deallocate(y, layout.clone()); + assert_eq!((*(y as *const Hole)).size, layout.size()); + heap.deallocate(x, layout.clone()); + assert_eq!((*(x as *const Hole)).size, layout.size() * 2); + heap.deallocate(z, layout.clone()); assert_eq!((*(x as *const Hole)).size, heap.size); } } @@ -91,17 +95,18 @@ fn deallocate_right_before() { fn deallocate_right_behind() { let mut heap = new_heap(); let size = size_of::() * 5; + let layout = Layout::from_size_align(size, 1).unwrap(); - let x = heap.allocate_first_fit(size, 1).unwrap(); - let y = heap.allocate_first_fit(size, 1).unwrap(); - let z = heap.allocate_first_fit(size, 1).unwrap(); + let x = heap.allocate_first_fit(layout.clone()).unwrap(); + let y = heap.allocate_first_fit(layout.clone()).unwrap(); + let z = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { - heap.deallocate(x, size, 1); + heap.deallocate(x, layout.clone()); assert_eq!((*(x as *const Hole)).size, size); - heap.deallocate(y, size, 1); + heap.deallocate(y, layout.clone()); assert_eq!((*(x as *const Hole)).size, size * 2); - heap.deallocate(z, size, 1); + heap.deallocate(z, layout.clone()); assert_eq!((*(x as *const Hole)).size, heap.size); } } @@ -110,21 +115,22 @@ fn deallocate_right_behind() { fn deallocate_middle() { let mut heap = new_heap(); let size = size_of::() * 5; + let layout = Layout::from_size_align(size, 1).unwrap(); - let x = heap.allocate_first_fit(size, 1).unwrap(); - let y = heap.allocate_first_fit(size, 1).unwrap(); - let z = heap.allocate_first_fit(size, 1).unwrap(); - let a = heap.allocate_first_fit(size, 1).unwrap(); + let x = heap.allocate_first_fit(layout.clone()).unwrap(); + let y = heap.allocate_first_fit(layout.clone()).unwrap(); + let z = heap.allocate_first_fit(layout.clone()).unwrap(); + let a = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { - heap.deallocate(x, size, 1); + heap.deallocate(x, layout.clone()); assert_eq!((*(x as *const Hole)).size, size); - heap.deallocate(z, size, 1); + heap.deallocate(z, layout.clone()); assert_eq!((*(x as *const Hole)).size, size); assert_eq!((*(z as *const Hole)).size, size); - heap.deallocate(y, size, 1); + heap.deallocate(y, layout.clone()); assert_eq!((*(x as *const Hole)).size, size * 3); - heap.deallocate(a, size, 1); + heap.deallocate(a, layout.clone()); assert_eq!((*(x as *const Hole)).size, heap.size); } } @@ -133,14 +139,16 @@ fn deallocate_middle() { fn reallocate_double_usize() { let mut heap = new_heap(); - let x = heap.allocate_first_fit(size_of::() * 2, align_of::()).unwrap(); + let layout = Layout::from_size_align(size_of::() * 2, align_of::()).unwrap(); + + let x = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { - heap.deallocate(x, size_of::() * 2, align_of::()); + heap.deallocate(x, layout.clone()); } - let y = heap.allocate_first_fit(size_of::() * 2, align_of::()).unwrap(); + let y = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { - heap.deallocate(y, size_of::() * 2, align_of::()); + heap.deallocate(y, layout.clone()); } assert_eq!(x, y); @@ -152,25 +160,30 @@ fn allocate_multiple_sizes() { let base_size = size_of::(); let base_align = align_of::(); - let x = heap.allocate_first_fit(base_size * 2, base_align).unwrap(); - let y = heap.allocate_first_fit(base_size * 7, base_align).unwrap(); + let layout_1 = Layout::from_size_align(base_size * 2, base_align).unwrap(); + let layout_2 = Layout::from_size_align(base_size * 7, base_align).unwrap(); + let layout_3 = Layout::from_size_align(base_size * 3, base_align * 4).unwrap(); + let layout_4 = Layout::from_size_align(base_size * 4, base_align).unwrap(); + + let x = heap.allocate_first_fit(layout_1.clone()).unwrap(); + let y = heap.allocate_first_fit(layout_2.clone()).unwrap(); assert_eq!(y as usize, x as usize + base_size * 2); - let z = heap.allocate_first_fit(base_size * 3, base_align * 4).unwrap(); + let z = heap.allocate_first_fit(layout_3.clone()).unwrap(); assert_eq!(z as usize % (base_size * 4), 0); unsafe { - heap.deallocate(x, base_size * 2, base_align); + heap.deallocate(x, layout_1.clone()); } - let a = heap.allocate_first_fit(base_size * 4, base_align).unwrap(); - let b = heap.allocate_first_fit(base_size * 2, base_align).unwrap(); + let a = heap.allocate_first_fit(layout_4.clone()).unwrap(); + let b = heap.allocate_first_fit(layout_1.clone()).unwrap(); assert_eq!(b, x); unsafe { - heap.deallocate(y, base_size * 7, base_align); - heap.deallocate(z, base_size * 3, base_align * 4); - heap.deallocate(a, base_size * 4, base_align); - heap.deallocate(b, base_size * 2, base_align); + heap.deallocate(y, layout_2); + heap.deallocate(z, layout_3); + heap.deallocate(a, layout_4); + heap.deallocate(b, layout_1); } } @@ -178,27 +191,32 @@ fn allocate_multiple_sizes() { fn allocate_usize() { let mut heap = new_heap(); - assert!(heap.allocate_first_fit(size_of::(), 1).is_some()); + let layout = Layout::from_size_align(size_of::(), 1).unwrap(); + + assert!(heap.allocate_first_fit(layout.clone()).is_ok()); } #[test] fn allocate_usize_in_bigger_block() { let mut heap = new_heap(); - let x = heap.allocate_first_fit(size_of::() * 2, 1).unwrap(); - let y = heap.allocate_first_fit(size_of::() * 2, 1).unwrap(); + let layout_1 = Layout::from_size_align(size_of::() * 2, 1).unwrap(); + let layout_2 = Layout::from_size_align(size_of::(), 1).unwrap(); + + let x = heap.allocate_first_fit(layout_1.clone()).unwrap(); + let y = heap.allocate_first_fit(layout_1.clone()).unwrap(); unsafe { - heap.deallocate(x, size_of::() * 2, 1); + heap.deallocate(x, layout_1.clone()); } - let z = heap.allocate_first_fit(size_of::(), 1); - assert!(z.is_some()); + let z = heap.allocate_first_fit(layout_2.clone()); + assert!(z.is_ok()); let z = z.unwrap(); assert_eq!(x, z); unsafe { - heap.deallocate(y, size_of::() * 2, 1); - heap.deallocate(z, size_of::(), 1); + heap.deallocate(y, layout_1.clone()); + heap.deallocate(z, layout_2); } } @@ -207,10 +225,13 @@ fn allocate_usize_in_bigger_block() { fn align_from_small_to_big() { let mut heap = new_heap(); + let layout_1 = Layout::from_size_align(28, 4).unwrap(); + let layout_2 = Layout::from_size_align(8, 8).unwrap(); + // allocate 28 bytes so that the heap end is only 4 byte aligned - assert!(heap.allocate_first_fit(28, 4).is_some()); + assert!(heap.allocate_first_fit(layout_1.clone()).is_ok()); // try to allocate a 8 byte aligned block - assert!(heap.allocate_first_fit(8, 8).is_some()); + assert!(heap.allocate_first_fit(layout_2.clone()).is_ok()); } #[test] @@ -222,34 +243,40 @@ fn extend_empty_heap() { } // Try to allocate full heap after extend - assert!(heap.allocate_first_fit(2048, 1).is_some()); + let layout = Layout::from_size_align(2048, 1).unwrap(); + assert!(heap.allocate_first_fit(layout.clone()).is_ok()); } #[test] fn extend_full_heap() { let mut heap = new_max_heap(); + let layout = Layout::from_size_align(1024, 1).unwrap(); + // Allocate full heap, extend and allocate again to the max - assert!(heap.allocate_first_fit(1024, 1).is_some()); + assert!(heap.allocate_first_fit(layout.clone()).is_ok()); unsafe { heap.extend(1024); } - assert!(heap.allocate_first_fit(1024, 1).is_some()); + assert!(heap.allocate_first_fit(layout.clone()).is_ok()); } #[test] fn extend_fragmented_heap() { let mut heap = new_max_heap(); - let alloc1 = heap.allocate_first_fit(512, 1); - let alloc2 = heap.allocate_first_fit(512, 1); + let layout_1 = Layout::from_size_align(512, 1).unwrap(); + let layout_2 = Layout::from_size_align(1024, 1).unwrap(); + + let alloc1 = heap.allocate_first_fit(layout_1.clone()); + let alloc2 = heap.allocate_first_fit(layout_1.clone()); - assert!(alloc1.is_some()); - assert!(alloc2.is_some()); + assert!(alloc1.is_ok()); + assert!(alloc2.is_ok()); unsafe { // Create a hole at the beginning of the heap - heap.deallocate(alloc1.unwrap(), 512, 1); + heap.deallocate(alloc1.unwrap(), layout_1.clone()); } unsafe { @@ -258,5 +285,5 @@ fn extend_fragmented_heap() { // We got additional 1024 bytes hole at the end of the heap // Try to allocate there - assert!(heap.allocate_first_fit(1024, 1).is_some()); + assert!(heap.allocate_first_fit(layout_2.clone()).is_ok()); } From 1fa15df84e3a490ad408bbaa881f16530f03d83b Mon Sep 17 00:00:00 2001 From: Philipp Oppermann Date: Fri, 7 Jul 2017 13:07:35 +0200 Subject: [PATCH 3/3] Run rustfmt --- src/hole.rs | 48 ++++++++++++++++++++++++++++-------------------- src/test.rs | 2 +- 2 files changed, 29 insertions(+), 21 deletions(-) diff --git a/src/hole.rs b/src/hole.rs index e36cdae..6ca882e 100644 --- a/src/hole.rs +++ b/src/hole.rs @@ -27,11 +27,13 @@ impl HoleList { assert!(size_of::() == Self::min_size()); let ptr = hole_addr as *mut Hole; - mem::replace(&mut *ptr, - Hole { - size: hole_size, - next: None, - }); + mem::replace( + &mut *ptr, + Hole { + size: hole_size, + next: None, + }, + ); HoleList { first: Hole { @@ -79,7 +81,9 @@ impl HoleList { /// Returns information about the first hole for test purposes. #[cfg(test)] pub fn first_hole(&self) -> Option<(usize, usize)> { - self.first.next.as_ref().map(|hole| (hole.as_ptr() as usize, unsafe { hole.as_ref().size })) + self.first.next.as_ref().map(|hole| { + (hole.as_ptr() as usize, unsafe { hole.as_ref().size }) + }) } } @@ -141,11 +145,13 @@ fn split_hole(hole: HoleInfo, required_layout: Layout) -> Option { } else { // the required alignment causes some padding before the allocation let aligned_addr = align_up(hole.addr + HoleList::min_size(), required_align); - (aligned_addr, - Some(HoleInfo { - addr: hole.addr, - size: aligned_addr - hole.addr, - })) + ( + aligned_addr, + Some(HoleInfo { + addr: hole.addr, + size: aligned_addr - hole.addr, + }), + ) }; let aligned_hole = { @@ -192,9 +198,9 @@ fn split_hole(hole: HoleInfo, required_layout: Layout) -> Option { /// found (and returns it). fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result { loop { - let allocation: Option = previous.next - .as_mut() - .and_then(|current| split_hole(unsafe { current.as_ref() }.info(), layout.clone())); + let allocation: Option = previous.next.as_mut().and_then(|current| { + split_hole(unsafe { current.as_ref() }.info(), layout.clone()) + }); match allocation { Some(allocation) => { // hole is big enough, so remove it from the list by updating the previous pointer @@ -207,9 +213,7 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result { // this was the last hole, so no hole is big enough -> allocation not possible - return Err(AllocErr::Exhausted { - request: layout, - }); + return Err(AllocErr::Exhausted { request: layout }); } } } @@ -233,11 +237,15 @@ fn deallocate(mut hole: &mut Hole, addr: usize, mut size: usize) { // Each freed block must be handled by the previous hole in memory. Thus the freed // address must be always behind the current hole. - assert!(hole_addr + hole.size <= addr, - "invalid deallocation (probably a double free)"); + assert!( + hole_addr + hole.size <= addr, + "invalid deallocation (probably a double free)" + ); // get information about the next block - let next_hole_info = hole.next.as_ref().map(|next| unsafe { next.as_ref().info() }); + let next_hole_info = hole.next + .as_ref() + .map(|next| unsafe { next.as_ref().info() }); match next_hole_info { Some(next) if hole_addr + hole.size == addr && addr + size == next.addr => { diff --git a/src/test.rs b/src/test.rs index 0868de2..f60e85a 100644 --- a/src/test.rs +++ b/src/test.rs @@ -61,7 +61,7 @@ fn allocate_double_usize() { fn allocate_and_free_double_usize() { let mut heap = new_heap(); - let layout = Layout::from_size_align(size_of::() * 2, align_of::()).unwrap(); + let layout = Layout::from_size_align(size_of::() * 2, align_of::()).unwrap(); let x = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { *(x as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe);