Skip to content

Commit

Permalink
Auto merge of #108693 - Zoxc:arena-opt-funcs, r=cjgillot
Browse files Browse the repository at this point in the history
Optimize DroplessArena arena allocation

This optimizes `DroplessArena` allocation by always ensuring that it is aligned to `usize` and adding `grow_and_alloc` and `grow_and_alloc_raw`functions which both grow and allocate, reducing code size.

<table><tr><td rowspan="2">Benchmark</td><td colspan="1"><b>Before</b></th><td colspan="2"><b>After</b></th></tr><tr><td align="right">Time</td><td align="right">Time</td><td align="right">%</th></tr><tr><td>🟣 <b>clap</b>:check</td><td align="right">1.6968s</td><td align="right">1.6887s</td><td align="right"> -0.48%</td></tr><tr><td>🟣 <b>hyper</b>:check</td><td align="right">0.2552s</td><td align="right">0.2551s</td><td align="right"> -0.03%</td></tr><tr><td>🟣 <b>regex</b>:check</td><td align="right">0.9613s</td><td align="right">0.9553s</td><td align="right"> -0.62%</td></tr><tr><td>🟣 <b>syn</b>:check</td><td align="right">1.5402s</td><td align="right">1.5374s</td><td align="right"> -0.18%</td></tr><tr><td>🟣 <b>syntex_syntax</b>:check</td><td align="right">5.9175s</td><td align="right">5.8813s</td><td align="right"> -0.61%</td></tr><tr><td>Total</td><td align="right">10.3710s</td><td align="right">10.3178s</td><td align="right"> -0.51%</td></tr><tr><td>Summary</td><td align="right">1.0000s</td><td align="right">0.9962s</td><td align="right"> -0.38%</td></tr></table>
  • Loading branch information
bors committed Aug 16, 2023
2 parents 27fb598 + 6f86591 commit 07438b0
Showing 1 changed file with 70 additions and 17 deletions.
87 changes: 70 additions & 17 deletions compiler/rustc_arena/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
test(no_crate_inject, attr(deny(warnings)))
)]
#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
#![feature(new_uninit)]
#![feature(maybe_uninit_slice)]
Expand All @@ -30,11 +31,11 @@ use smallvec::SmallVec;

use std::alloc::Layout;
use std::cell::{Cell, RefCell};
use std::cmp;
use std::marker::PhantomData;
use std::mem::{self, MaybeUninit};
use std::ptr::{self, NonNull};
use std::slice;
use std::{cmp, intrinsics};

#[inline(never)]
#[cold]
Expand Down Expand Up @@ -363,6 +364,22 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {

unsafe impl<T: Send> Send for TypedArena<T> {}

#[inline(always)]
fn align_down(val: usize, align: usize) -> usize {
debug_assert!(align.is_power_of_two());
val & !(align - 1)
}

#[inline(always)]
fn align_up(val: usize, align: usize) -> usize {
debug_assert!(align.is_power_of_two());
(val + align - 1) & !(align - 1)
}

// Pointer alignment is common in compiler types, so keep `DroplessArena` aligned to them
// to optimize away alignment code.
const DROPLESS_ALIGNMENT: usize = mem::align_of::<usize>();

/// An arena that can hold objects of multiple different types that impl `Copy`
/// and/or satisfy `!mem::needs_drop`.
pub struct DroplessArena {
Expand All @@ -375,6 +392,8 @@ pub struct DroplessArena {
/// start. (This is slightly simpler and faster than allocating upwards,
/// see <https://fitzgeraldnick.com/2019/11/01/always-bump-downwards.html>.)
/// When this pointer crosses the start pointer, a new chunk is allocated.
///
/// This is kept aligned to DROPLESS_ALIGNMENT.
end: Cell<*mut u8>,

/// A vector of arena chunks.
Expand All @@ -395,9 +414,11 @@ impl Default for DroplessArena {
}

impl DroplessArena {
#[inline(never)]
#[cold]
fn grow(&self, additional: usize) {
fn grow(&self, layout: Layout) {
// Add some padding so we can align `self.end` while
// stilling fitting in a `layout` allocation.
let additional = layout.size() + cmp::max(DROPLESS_ALIGNMENT, layout.align()) - 1;

unsafe {
let mut chunks = self.chunks.borrow_mut();
let mut new_cap;
Expand All @@ -416,13 +437,35 @@ impl DroplessArena {
// Also ensure that this chunk can fit `additional`.
new_cap = cmp::max(additional, new_cap);

let mut chunk = ArenaChunk::new(new_cap);
let mut chunk = ArenaChunk::new(align_up(new_cap, PAGE));
self.start.set(chunk.start());
self.end.set(chunk.end());

// Align the end to DROPLESS_ALIGNMENT
let end = align_down(chunk.end().addr(), DROPLESS_ALIGNMENT);

// Make sure we don't go past `start`. This should not happen since the allocation
// should be at least DROPLESS_ALIGNMENT - 1 bytes.
debug_assert!(chunk.start().addr() <= end);

self.end.set(chunk.end().with_addr(end));

chunks.push(chunk);
}
}

#[inline(never)]
#[cold]
fn grow_and_alloc_raw(&self, layout: Layout) -> *mut u8 {
self.grow(layout);
self.alloc_raw_without_grow(layout).unwrap()
}

#[inline(never)]
#[cold]
fn grow_and_alloc<T>(&self) -> *mut u8 {
self.grow_and_alloc_raw(Layout::new::<T>())
}

/// Allocates a byte slice with specified layout from the current memory
/// chunk. Returns `None` if there is no free space left to satisfy the
/// request.
Expand All @@ -432,12 +475,17 @@ impl DroplessArena {
let old_end = self.end.get();
let end = old_end.addr();

let align = layout.align();
let bytes = layout.size();
// Align allocated bytes so that `self.end` stays aligned to DROPLESS_ALIGNMENT
let bytes = align_up(layout.size(), DROPLESS_ALIGNMENT);

// Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT
unsafe { intrinsics::assume(end == align_down(end, DROPLESS_ALIGNMENT)) };

let new_end = end.checked_sub(bytes)? & !(align - 1);
let new_end = align_down(end.checked_sub(bytes)?, layout.align());
if start <= new_end {
let new_end = old_end.with_addr(new_end);
// `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down` preserves alignment
// as both `end` and `bytes` are already aligned to DROPLESS_ALIGNMENT.
self.end.set(new_end);
Some(new_end)
} else {
Expand All @@ -448,21 +496,26 @@ impl DroplessArena {
#[inline]
pub fn alloc_raw(&self, layout: Layout) -> *mut u8 {
assert!(layout.size() != 0);
loop {
if let Some(a) = self.alloc_raw_without_grow(layout) {
break a;
}
// No free space left. Allocate a new chunk to satisfy the request.
// On failure the grow will panic or abort.
self.grow(layout.size());
if let Some(a) = self.alloc_raw_without_grow(layout) {
return a;
}
// No free space left. Allocate a new chunk to satisfy the request.
// On failure the grow will panic or abort.
self.grow_and_alloc_raw(layout)
}

#[inline]
pub fn alloc<T>(&self, object: T) -> &mut T {
assert!(!mem::needs_drop::<T>());
assert!(mem::size_of::<T>() != 0);

let mem = self.alloc_raw(Layout::for_value::<T>(&object)) as *mut T;
let mem = if let Some(a) = self.alloc_raw_without_grow(Layout::for_value::<T>(&object)) {
a
} else {
// No free space left. Allocate a new chunk to satisfy the request.
// On failure the grow will panic or abort.
self.grow_and_alloc::<T>()
} as *mut T;

unsafe {
// Write into uninitialized memory.
Expand Down

0 comments on commit 07438b0

Please sign in to comment.