Skip to content

Commit

Permalink
Merge pull request #7 from f-bro/new-alloc-api
Browse files Browse the repository at this point in the history
Update alloc API to latest nightly
  • Loading branch information
japaric authored Aug 4, 2017
2 parents b11b99b + b30462a commit ae3d831
Show file tree
Hide file tree
Showing 2 changed files with 55 additions and 101 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@ version = "0.2.2"

[dependencies]
cortex-m = "0.1.5"
linked_list_allocator = "0.2.3"
linked_list_allocator = {git = "https://github.com/phil-opp/linked-list-allocator.git"}
154 changes: 54 additions & 100 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
//! extern crate collections;
//!
//! use collections::Vec;
//! use alloc_cortex_m::CortexMHeap;
//!
//! #[global_allocator]
//! static ALLOCATOR: CortexMHeap = CortexMHeap::empty();
//!
//! // These symbols come from a linker script
//! extern "C" {
Expand All @@ -18,7 +22,7 @@
//! #[no_mangle]
//! pub fn main() -> ! {
//! // Initialize the heap BEFORE you use the allocator
//! unsafe { alloc_cortex_m::init(&mut _heap_start, &mut _heap_end) }
//! unsafe { ALLOCATOR.init(_heap_start, _heap_end - _heap_start) }
//!
//! let mut xs = Vec::new();
//! xs.push(1);
Expand All @@ -37,121 +41,71 @@
//! _heap_end = ORIGIN(SRAM) + LENGTH(SRAM) - _stack_size;
//! ```
#![allocator]
#![feature(allocator)]
#![feature(const_fn)]
#![no_std]
#![feature(alloc, allocator_api)]

extern crate cortex_m;
extern crate linked_list_allocator;
extern crate alloc;

use core::{cmp, ptr};
use alloc::allocator::{Alloc, Layout, AllocErr};

use linked_list_allocator::Heap;
use cortex_m::interrupt::Mutex;

/// A global UNINITIALIZED heap allocator
///
/// You must initialize this heap using the
/// [`init`](struct.Heap.html#method.init) method before using the allocator.
static HEAP: Mutex<Heap> = Mutex::new(Heap::empty());

/// Initializes the heap
///
/// This function must be called BEFORE you run any code that makes use of the
/// allocator.
///
/// `start_addr` is the address where the heap will be located.
///
/// `end_addr` points to the end of the heap.
///
/// Note that:
///
/// - The heap grows "upwards", towards larger addresses. Thus `end_addr` must
/// be larger than `start_addr`
///
/// - The size of the heap is `(end_addr as usize) - (start_addr as usize)`. The
/// allocator won't use the byte at `end_addr`.
///
/// # Unsafety
///
/// Obey these or Bad Stuff will happen.
///
/// - This function must be called exactly ONCE.
/// - `end_addr` > `start_addr`
pub unsafe fn init(start_addr: *mut usize, end_addr: *mut usize) {
let start = start_addr as usize;
let end = end_addr as usize;
let size = end - start;
HEAP.lock(|heap| heap.init(start, size));
pub struct CortexMHeap {
heap: Mutex<Heap>,
}

// Rust allocator interface

#[doc(hidden)]
#[no_mangle]
/// Rust allocation function (c.f. malloc)
pub extern "C" fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
HEAP.lock(|heap| {
heap.allocate_first_fit(size, align).expect("out of memory")
})
}
impl CortexMHeap {

#[doc(hidden)]
#[no_mangle]
pub extern fn __rust_allocate_zeroed(size: usize, align: usize) -> *mut u8 {
let ptr = __rust_allocate(size, align);
if !ptr.is_null() {
unsafe {
ptr::write_bytes(ptr, 0, size);
/// Crate a new UNINITIALIZED heap allocator
///
/// You must initialize this heap using the
/// [`init`](struct.CortexMHeap.html#method.init) method before using the allocator.
pub const fn empty() -> CortexMHeap {
CortexMHeap {
heap: Mutex::new(Heap::empty()),
}
}
ptr
}

/// Rust de-allocation function (c.f. free)
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn __rust_deallocate(ptr: *mut u8, size: usize, align: usize) {
HEAP.lock(|heap| unsafe { heap.deallocate(ptr, size, align) });
}

/// Rust re-allocation function (c.f. realloc)
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn __rust_reallocate(ptr: *mut u8,
size: usize,
new_size: usize,
align: usize)
-> *mut u8 {

// from: https://github.com/rust-lang/rust/blob/
// c66d2380a810c9a2b3dbb4f93a830b101ee49cc2/
// src/liballoc_system/lib.rs#L98-L101

let new_ptr = __rust_allocate(new_size, align);
unsafe { ptr::copy(ptr, new_ptr, cmp::min(size, new_size)) };
__rust_deallocate(ptr, size, align);
new_ptr
/// Initializes the heap
///
/// This function must be called BEFORE you run any code that makes use of the
/// allocator.
///
/// `start_addr` is the address where the heap will be located.
///
/// `size` is the size of the heap in bytes.
///
/// Note that:
///
/// - The heap grows "upwards", towards larger addresses. Thus `end_addr` must
/// be larger than `start_addr`
///
/// - The size of the heap is `(end_addr as usize) - (start_addr as usize)`. The
/// allocator won't use the byte at `end_addr`.
///
/// # Unsafety
///
/// Obey these or Bad Stuff will happen.
///
/// - This function must be called exactly ONCE.
/// - `size > 0`
pub unsafe fn init(&self, start_addr: usize, size: usize){
self.heap.lock(|heap| heap.init(start_addr, size));
}
}

/// Rust re-allocation function which guarantees not to move the data
/// somewhere else.
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn __rust_reallocate_inplace(_ptr: *mut u8,
size: usize,
_new_size: usize,
_align: usize)
-> usize {
size
}
unsafe impl<'a> Alloc for &'a CortexMHeap {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
self.heap.lock(|heap| {
heap.allocate_first_fit(layout)
})
}

/// Some allocators (pool allocators generally) over-allocate. This checks how
/// much space there is at a location. Our allocator doesn't over allocate so
/// this just returns `size`
#[doc(hidden)]
#[no_mangle]
pub extern "C" fn __rust_usable_size(size: usize, _align: usize) -> usize {
size
}
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
self.heap.lock(|heap| heap.deallocate(ptr, layout));
}
}

0 comments on commit ae3d831

Please sign in to comment.