Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
Zoxc committed Nov 14, 2017
1 parent 24840da commit 5ca1224
Show file tree
Hide file tree
Showing 83 changed files with 1,226 additions and 842 deletions.
203 changes: 119 additions & 84 deletions src/Cargo.lock

Large diffs are not rendered by default.

3 changes: 3 additions & 0 deletions src/libarena/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,6 @@ version = "0.0.0"
name = "arena"
path = "lib.rs"
crate-type = ["dylib"]

[dependencies]
rustc_data_structures = { path = "../librustc_data_structures" }
258 changes: 147 additions & 111 deletions src/libarena/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@
#![allow(deprecated)]

extern crate alloc;
extern crate rustc_data_structures;

use rustc_data_structures::lock::Lock;

use std::cell::{Cell, RefCell};
use std::cmp;
Expand All @@ -46,6 +49,10 @@ use alloc::raw_vec::RawVec;

/// An arena that can hold objects of only one type.
pub struct TypedArena<T> {
lock: Lock<TypedArenaInner<T>>,
}

struct TypedArenaInner<T> {
/// A pointer to the next object to be allocated.
ptr: Cell<*mut T>,

Expand Down Expand Up @@ -109,38 +116,102 @@ impl<T> TypedArenaChunk<T> {

const PAGE: usize = 4096;

impl<T> TypedArenaInner<T> {
/// Grows the arena.
#[inline(never)]
#[cold]
fn grow(&self, n: usize) {
unsafe {
let mut chunks = self.chunks.borrow_mut();
let (chunk, mut new_capacity);
if let Some(last_chunk) = chunks.last_mut() {
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
let currently_used_cap = used_bytes / mem::size_of::<T>();
if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
self.end.set(last_chunk.end());
return;
} else {
new_capacity = last_chunk.storage.cap();
loop {
new_capacity = new_capacity.checked_mul(2).unwrap();
if new_capacity >= currently_used_cap + n {
break;
}
}
}
} else {
let elem_size = cmp::max(1, mem::size_of::<T>());
new_capacity = cmp::max(n, PAGE / elem_size);
}
chunk = TypedArenaChunk::<T>::new(new_capacity);
self.ptr.set(chunk.start());
self.end.set(chunk.end());
chunks.push(chunk);
}
}

// Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
// chunks.
fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
// Determine how much was filled.
let start = last_chunk.start() as usize;
// We obtain the value of the pointer to the first uninitialized element.
let end = self.ptr.get() as usize;
// We then calculate the number of elements to be dropped in the last chunk,
// which is the filled area's length.
let diff = if mem::size_of::<T>() == 0 {
// `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
// the number of zero-sized values in the last and only chunk, just out of caution.
// Recall that `end` was incremented for each allocated value.
end - start
} else {
(end - start) / mem::size_of::<T>()
};
// Pass that to the `destroy` method.
unsafe {
last_chunk.destroy(diff);
}
// Reset the chunk.
self.ptr.set(last_chunk.start());
}
}

impl<T> TypedArena<T> {
/// Creates a new `TypedArena`.
#[inline]
pub fn new() -> TypedArena<T> {
TypedArena {
// We set both `ptr` and `end` to 0 so that the first call to
// alloc() will trigger a grow().
ptr: Cell::new(0 as *mut T),
end: Cell::new(0 as *mut T),
chunks: RefCell::new(vec![]),
_own: PhantomData,
lock: Lock::new(TypedArenaInner {
// We set both `ptr` and `end` to 0 so that the first call to
// alloc() will trigger a grow().
ptr: Cell::new(0 as *mut T),
end: Cell::new(0 as *mut T),
chunks: RefCell::new(vec![]),
_own: PhantomData,
})
}
}

/// Allocates an object in the `TypedArena`, returning a reference to it.
#[inline]
pub fn alloc(&self, object: T) -> &mut T {
if self.ptr == self.end {
self.grow(1)
let this = self.lock.lock();

if this.ptr == this.end {
this.grow(1)
}

unsafe {
if mem::size_of::<T>() == 0 {
self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
this.ptr.set(intrinsics::arith_offset(this.ptr.get() as *mut u8, 1) as *mut T);
let ptr = mem::align_of::<T>() as *mut T;
// Don't drop the object. This `write` is equivalent to `forget`.
ptr::write(ptr, object);
&mut *ptr
} else {
let ptr = self.ptr.get();
let ptr = this.ptr.get();
// Advance the pointer.
self.ptr.set(self.ptr.get().offset(1));
this.ptr.set(this.ptr.get().offset(1));
// Write into uninitialized memory.
ptr::write(ptr, object);
&mut *ptr
Expand All @@ -160,61 +231,32 @@ impl<T> TypedArena<T> {
assert!(mem::size_of::<T>() != 0);
assert!(slice.len() != 0);

let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
let this = self.lock.lock();

let available_capacity_bytes = this.end.get() as usize - this.ptr.get() as usize;
let at_least_bytes = slice.len() * mem::size_of::<T>();
if available_capacity_bytes < at_least_bytes {
self.grow(slice.len());
this.grow(slice.len());
}

unsafe {
let start_ptr = self.ptr.get();
let start_ptr = this.ptr.get();
let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len());
self.ptr.set(start_ptr.offset(arena_slice.len() as isize));
this.ptr.set(start_ptr.offset(arena_slice.len() as isize));
arena_slice.copy_from_slice(slice);
arena_slice
}
}

/// Grows the arena.
#[inline(never)]
#[cold]
fn grow(&self, n: usize) {
unsafe {
let mut chunks = self.chunks.borrow_mut();
let (chunk, mut new_capacity);
if let Some(last_chunk) = chunks.last_mut() {
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
let currently_used_cap = used_bytes / mem::size_of::<T>();
if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
self.end.set(last_chunk.end());
return;
} else {
new_capacity = last_chunk.storage.cap();
loop {
new_capacity = new_capacity.checked_mul(2).unwrap();
if new_capacity >= currently_used_cap + n {
break;
}
}
}
} else {
let elem_size = cmp::max(1, mem::size_of::<T>());
new_capacity = cmp::max(n, PAGE / elem_size);
}
chunk = TypedArenaChunk::<T>::new(new_capacity);
self.ptr.set(chunk.start());
self.end.set(chunk.end());
chunks.push(chunk);
}
}

/// Clears the arena. Deallocates all but the longest chunk which may be reused.
pub fn clear(&mut self) {
let this = self.lock.lock();

unsafe {
// Clear the last chunk, which is partially filled.
let mut chunks_borrow = self.chunks.borrow_mut();
let mut chunks_borrow = this.chunks.borrow_mut();
if let Some(mut last_chunk) = chunks_borrow.pop() {
self.clear_last_chunk(&mut last_chunk);
this.clear_last_chunk(&mut last_chunk);
// If `T` is ZST, code below has no effect.
for mut chunk in chunks_borrow.drain(..) {
let cap = chunk.storage.cap();
Expand All @@ -224,41 +266,18 @@ impl<T> TypedArena<T> {
}
}
}

// Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
// chunks.
fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
// Determine how much was filled.
let start = last_chunk.start() as usize;
// We obtain the value of the pointer to the first uninitialized element.
let end = self.ptr.get() as usize;
// We then calculate the number of elements to be dropped in the last chunk,
// which is the filled area's length.
let diff = if mem::size_of::<T>() == 0 {
// `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
// the number of zero-sized values in the last and only chunk, just out of caution.
// Recall that `end` was incremented for each allocated value.
end - start
} else {
(end - start) / mem::size_of::<T>()
};
// Pass that to the `destroy` method.
unsafe {
last_chunk.destroy(diff);
}
// Reset the chunk.
self.ptr.set(last_chunk.start());
}
}

unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
fn drop(&mut self) {
let this = self.lock.get_mut();

unsafe {
// Determine how much was filled.
let mut chunks_borrow = self.chunks.borrow_mut();
let mut chunks_borrow = this.chunks.borrow_mut();
if let Some(mut last_chunk) = chunks_borrow.pop() {
// Drop the contents of the last chunk.
self.clear_last_chunk(&mut last_chunk);
this.clear_last_chunk(&mut last_chunk);
// The last chunk will be dropped. Destroy all other chunks.
for chunk in chunks_borrow.iter_mut() {
let cap = chunk.storage.cap();
Expand All @@ -270,9 +289,13 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
}
}

unsafe impl<T: Send> Send for TypedArena<T> {}
unsafe impl<T: Send> Send for TypedArenaInner<T> {}

pub struct DroplessArena {
lock: Lock<DroplessArenaInner>,
}

struct DroplessArenaInner {
/// A pointer to the next object to be allocated.
ptr: Cell<*mut u8>,

Expand All @@ -284,26 +307,9 @@ pub struct DroplessArena {
chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
}

impl DroplessArena {
pub fn new() -> DroplessArena {
DroplessArena {
ptr: Cell::new(0 as *mut u8),
end: Cell::new(0 as *mut u8),
chunks: RefCell::new(vec![]),
}
}

pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
let ptr = ptr as *const u8 as *mut u8;
for chunk in &*self.chunks.borrow() {
if chunk.start() <= ptr && ptr < chunk.end() {
return true;
}
}

false
}
unsafe impl Send for DroplessArenaInner {}

impl DroplessArenaInner {
fn align_for<T>(&self) {
let align = mem::align_of::<T>();
let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
Expand Down Expand Up @@ -341,23 +347,50 @@ impl DroplessArena {
chunks.push(chunk);
}
}
}

impl DroplessArena {
pub fn new() -> DroplessArena {
DroplessArena {
lock: Lock::new(DroplessArenaInner {
ptr: Cell::new(0 as *mut u8),
end: Cell::new(0 as *mut u8),
chunks: RefCell::new(vec![]),
})
}
}

pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
let this = self.lock.lock();

let ptr = ptr as *const u8 as *mut u8;
for chunk in &*this.chunks.borrow() {
if chunk.start() <= ptr && ptr < chunk.end() {
return true;
}
}

false
}

#[inline]
pub fn alloc<T>(&self, object: T) -> &mut T {
unsafe {
assert!(!mem::needs_drop::<T>());
assert!(mem::size_of::<T>() != 0);

self.align_for::<T>();
let future_end = intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize);
if (future_end as *mut u8) >= self.end.get() {
self.grow::<T>(1)
let this = self.lock.lock();

this.align_for::<T>();
let future_end = intrinsics::arith_offset(this.ptr.get(), mem::size_of::<T>() as isize);
if (future_end as *mut u8) >= this.end.get() {
this.grow::<T>(1)
}

let ptr = self.ptr.get();
let ptr = this.ptr.get();
// Set the pointer past ourselves
self.ptr.set(intrinsics::arith_offset(
self.ptr.get(), mem::size_of::<T>() as isize
this.ptr.set(intrinsics::arith_offset(
this.ptr.get(), mem::size_of::<T>() as isize
) as *mut u8);
// Write into uninitialized memory.
ptr::write(ptr as *mut T, object);
Expand All @@ -377,19 +410,22 @@ impl DroplessArena {
assert!(!mem::needs_drop::<T>());
assert!(mem::size_of::<T>() != 0);
assert!(slice.len() != 0);
self.align_for::<T>();

let this = self.lock.lock();

this.align_for::<T>();

let future_end = unsafe {
intrinsics::arith_offset(self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize)
intrinsics::arith_offset(this.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize)
};
if (future_end as *mut u8) >= self.end.get() {
self.grow::<T>(slice.len());
if (future_end as *mut u8) >= this.end.get() {
this.grow::<T>(slice.len());
}

unsafe {
let arena_slice = slice::from_raw_parts_mut(self.ptr.get() as *mut T, slice.len());
self.ptr.set(intrinsics::arith_offset(
self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize
let arena_slice = slice::from_raw_parts_mut(this.ptr.get() as *mut T, slice.len());
this.ptr.set(intrinsics::arith_offset(
this.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize
) as *mut u8);
arena_slice.copy_from_slice(slice);
arena_slice
Expand Down
Loading

0 comments on commit 5ca1224

Please sign in to comment.