From 700c66143fa3c57e747a47c011c7a233c006229b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20L=C3=B3pez?= Date: Wed, 27 Dec 2023 12:22:14 +0100 Subject: [PATCH] alloc: initial TryBox implementation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Introduce the alloc submodule and the TryBox type, a fork of the upstream alloc crate and Box type respectively. The new type behaves exactly like Box, except it only supports fallible allocations, meaning that users must always handle potential errors. Users must also explicitly set the allocator to use. Using a specific allocator by default like the standard library does is complicated. The standard library uses whichever allocator is set as global (via the #[global_allocator] macro), which can be accessed via a global instance (alloc::alloc::Global). However, the global instance is gated behind an unstable feature, and the macro cannot be reimplemented because it is a compiler intrinsic. As a workaround, one may add new higher level types in the future which use a specific allocator by default. The new TryBox does not support unsized types (e.g. [T]), since that requires unstable features. On the other hand, it has a few extra methods for convenience like try_default_in(), try_clone() and try_clone_in(). Signed-off-by: Carlos López --- src/alloc/boxed.rs | 715 ++++++++++++++++++++++++++++++++++++++++++++ src/alloc/mod.rs | 403 +++++++++++++++++++++++++ src/alloc/unique.rs | 195 ++++++++++++ src/lib.rs | 1 + 4 files changed, 1314 insertions(+) create mode 100644 src/alloc/boxed.rs create mode 100644 src/alloc/mod.rs create mode 100644 src/alloc/unique.rs diff --git a/src/alloc/boxed.rs b/src/alloc/boxed.rs new file mode 100644 index 0000000000..2f85bf274a --- /dev/null +++ b/src/alloc/boxed.rs @@ -0,0 +1,715 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +// +// Copyright (C) 2023 SUSE +// +// Authors: Carlos López + +//! The `TryBox` type for heap allocation. +//! +//! [`TryBox`], casually referred to as a 'box', provides the simplest form of +//! heap allocation in Rust. Boxes provide ownership for this allocation, and +//! drop their contents when they go out of scope. Boxes also ensure that they +//! never allocate more than `isize::MAX` bytes. +//! +//! This is a downstream version of `Box` with a stabilized allocator API, +//! supporting fallible allocations exclusively. + +use core::alloc::Layout; +use core::borrow::{Borrow, BorrowMut}; +use core::cmp::Ordering; +use core::fmt; +use core::mem::{size_of, ManuallyDrop, MaybeUninit}; +use core::ops::{Deref, DerefMut}; +use core::pin::Pin; +use core::ptr::{self, NonNull}; + +use super::unique::Unique; +use super::{SvsmAlloc as Allocator, TryAllocError}; + +/// A pointer type that uniquely owns a heap allocation of type `T`, generic +/// over any given allocator, and supporting failable allocations. +/// +/// This is a downstream version of `Box` with a stabilized allocator API, +/// supporting fallible allocations exclusively. +pub struct TryBox(Unique, A); + +impl TryBox { + /// Allocates memory in the given allocator then places `x` into it, + /// returning an error if the allocation fails + /// + /// This doesn't actually allocate if `T` is zero-sized. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::TryBox, TryAllocError}; + /// use std::alloc::System; + /// + /// let five = TryBox::try_new_in(5, System)?; + /// # Ok::<(), TryAllocError>(()) + /// ``` + #[inline] + pub fn try_new_in(x: T, alloc: A) -> Result { + let mut boxed = Self::try_new_uninit_in(alloc)?; + unsafe { + boxed.as_mut_ptr().write(x); + Ok(boxed.assume_init()) + } + } + + /// Allocates memory in the given allocator then places `x` into it, + /// returning an error if the allocation fails + /// + /// This doesn't actually allocate if `T` is zero-sized. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::TryBox, TryAllocError}; + /// use std::alloc::System; + /// + /// let five = TryBox::try_new_in(5, System)?; + /// # Ok::<(), TryAllocError>(()) + /// ``` + pub fn try_new_uninit_in(alloc: A) -> Result, A>, TryAllocError> { + let ptr = if size_of::() == 0 { + NonNull::dangling() + } else { + let layout = Layout::new::>(); + alloc.allocate(layout)?.cast() + }; + unsafe { Ok(TryBox::from_raw_in(ptr.as_ptr(), alloc)) } + } + + /// Constructs a new `TryBox` with uninitialized contents, with the memory + /// being filled with `0` bytes in the provided allocator. + /// + /// See [`MaybeUninit::zeroed`] for examples of correct and incorrect usage + /// of this method. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::TryBox, TryAllocError}; + /// use std::alloc::System; + /// + /// let zero = TryBox::::try_new_zeroed_in(System)?; + /// let zero = unsafe { zero.assume_init() }; + /// + /// assert_eq!(*zero, 0); + /// # Ok::<(), TryAllocError>(()) + /// ``` + pub fn try_new_zeroed_in(alloc: A) -> Result, A>, TryAllocError> { + let ptr = if size_of::() == 0 { + NonNull::dangling() + } else { + let layout = Layout::new::>(); + alloc.allocate_zeroed(layout)?.cast() + }; + unsafe { Ok(TryBox::from_raw_in(ptr.as_ptr(), alloc)) } + } + + /// Consumes the `TryBox`, returning the wrapped value. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::TryBox, TryAllocError}; + /// + /// use std::alloc::{Layout, System}; + /// + /// let c = TryBox::try_new_in(5, System)?; + /// + /// assert_eq!(TryBox::into_inner(c), 5); + /// # Ok::<(), TryAllocError>(()) + /// ``` + pub fn into_inner(self) -> T { + unsafe { self.0.as_ptr().read() } + } + + /// Constructs a new `Pin>`. If `T` does not implement + /// [`Unpin`], then `x` will be pinned in memory and unable to be + /// moved. + /// + /// Constructing and pinning of the `TryBox` can also be done in two + /// steps: `TryBox::try_pin_in(x, alloc)` does the same as + /// [TryBox::into_pin]\([TryBox::try_new_in]\(x, alloc)?). + /// Consider using [`into_pin`](TryBox::into_pin) if you already have a + /// `TryBox`, or if you want to construct a (pinned) `TryBox` in + /// a different way than with [`TryBox::try_new_in`]. + pub fn try_pin_in(x: T, alloc: A) -> Result, TryAllocError> + where + A: 'static + Allocator, + { + let boxed = Self::try_new_in(x, alloc)?; + Ok(Self::into_pin(boxed)) + } + + /// Constructs a box from a raw pointer in the given allocator. + /// + /// After calling this function, the raw pointer is owned by the + /// resulting `TryBox`. Specifically, the `TryBox` destructor will call + /// the destructor of `T` and free the allocated memory. For this + /// to be safe, the memory must have been allocated in accordance + /// with the memory layout used by `TryBox` . + /// + /// # Safety + /// + /// This function is unsafe because improper use may lead to + /// memory problems. For example, a double-free may occur if the + /// function is called twice on the same raw pointer. + /// + /// + /// # Examples + /// + /// Recreate a `TryBox` which was previously converted to a raw pointer + /// using [`TryBox::into_raw_with_allocator`]: + /// ``` + /// # use svsm::alloc::{boxed::TryBox, TryAllocError}; + /// use std::alloc::System; + /// + /// let x = TryBox::try_new_in(5, System)?; + /// let (ptr, alloc) = TryBox::into_raw_with_allocator(x); + /// let x = unsafe { TryBox::from_raw_in(ptr, alloc) }; + /// # Ok::<(), TryAllocError>(()) + /// ``` + /// Manually create a `TryBox` from scratch by using the system allocator: + /// ``` + /// # use svsm::alloc::{boxed::TryBox, SvsmAlloc, TryAllocError}; + /// use std::alloc::{Layout, System}; + /// + /// unsafe { + /// let ptr = System.allocate(Layout::new::())?.as_ptr() as *mut i32; + /// // In general .write is required to avoid attempting to destruct + /// // the (uninitialized) previous contents of `ptr`, though for this + /// // simple example `*ptr = 5` would have worked as well. + /// ptr.write(5); + /// let x = TryBox::from_raw_in(ptr, System); + /// } + /// # Ok::<(), TryAllocError>(()) + /// ``` + #[inline] + pub unsafe fn from_raw_in(raw: *mut T, alloc: A) -> Self { + Self(unsafe { Unique::new_unchecked(raw) }, alloc) + } + + /// Consumes the `TryBox`, returning a wrapped raw pointer. + /// + /// The pointer will be properly aligned and non-null. + /// + /// After calling this function, the caller is responsible for the + /// memory previously managed by the `TryBox`. In particular, the + /// caller should properly destroy `T` and release the memory, taking + /// into account the memory layout used by `TryBox`. The easiest way to + /// do this is to convert the raw pointer back into a `TryBox` with the + /// [`TryBox::from_raw_in`] function, allowing the `TryBox` destructor to perform + /// the cleanup. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `TryBox::into_raw(b)` instead of `b.into_raw()`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// Converting the raw pointer back into a `TryBox` with [`TryBox::from_raw_in`] + /// for automatic cleanup: + /// ``` + /// # use svsm::alloc::{boxed::TryBox, TryAllocError}; + /// use std::alloc::System; + /// + /// let x = TryBox::try_new_in(String::from("Hello"), System)?; + /// let ptr = TryBox::into_raw(x); + /// let x = unsafe { TryBox::from_raw_in(ptr, System) }; + /// # Ok::<(), TryAllocError>(()) + /// ``` + /// Manual cleanup by explicitly running the destructor and deallocating + /// the memory: + /// ``` + /// # use svsm::alloc::{boxed::TryBox, SvsmAlloc, TryAllocError}; + /// use std::alloc::{Layout, System}; + /// use std::ptr::{self, NonNull}; + /// + /// let x = TryBox::try_new_in(String::from("Hello"), System)?; + /// let p = TryBox::into_raw(x); + /// unsafe { + /// ptr::drop_in_place(p); + /// let non_null = NonNull::new_unchecked(p); + /// System.deallocate(non_null.cast(), Layout::new::()); + /// } + /// # Ok::<(), TryAllocError>(()) + /// ``` + #[inline] + pub fn into_raw(b: Self) -> *mut T { + Self::into_raw_with_allocator(b).0 + } + + /// Consumes the `TryBox`, returning a wrapped raw pointer and the allocator. + /// + /// The pointer will be properly aligned and non-null. + /// + /// After calling this function, the caller is responsible for the + /// memory previously managed by the `TryBox`. In particular, the + /// caller should properly destroy `T` and release the memory, taking + /// into account the memory layout used by `TryBox`. The easiest way to + /// do this is to convert the raw pointer back into a `TryBox` with the + /// [`TryBox::from_raw_in`] function, allowing the `TryBox` destructor to perform + /// the cleanup. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `TryBox::into_raw_with_allocator(b)` instead of `b.into_raw_with_allocator()`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// Converting the raw pointer back into a `TryBox` with [`TryBox::from_raw_in`] + /// for automatic cleanup: + /// ``` + /// # use svsm::alloc::{boxed::TryBox, TryAllocError}; + /// use std::alloc::System; + /// + /// let x = TryBox::try_new_in(String::from("Hello"), System)?; + /// let (ptr, alloc) = TryBox::into_raw_with_allocator(x); + /// let x = unsafe { TryBox::from_raw_in(ptr, alloc) }; + /// # Ok::<(), TryAllocError>(()) + /// ``` + /// Manual cleanup by explicitly running the destructor and deallocating + /// the memory: + /// ``` + /// # use svsm::alloc::{boxed::TryBox, SvsmAlloc, TryAllocError}; + /// + /// use std::alloc::{Layout, System}; + /// use std::ptr::{self, NonNull}; + /// + /// let x = TryBox::try_new_in(String::from("Hello"), System)?; + /// let (ptr, alloc) = TryBox::into_raw_with_allocator(x); + /// unsafe { + /// ptr::drop_in_place(ptr); + /// let non_null = NonNull::new_unchecked(ptr); + /// alloc.deallocate(non_null.cast(), Layout::new::()); + /// } + /// # Ok::<(), TryAllocError>(()) + /// ``` + #[inline] + pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) { + let (leaked, alloc) = TryBox::into_unique(b); + (leaked.as_ptr(), alloc) + } + + #[inline] + fn into_unique(b: Self) -> (Unique, A) { + // TryBox is recognized as a "unique pointer" by Stacked Borrows, but internally it is a + // raw pointer for the type system. Turning it directly into a raw pointer would not be + // recognized as "releasing" the unique pointer to permit aliased raw accesses, + // so all raw pointer methods have to go through `TryBox::leak`. Turning *that* to a raw pointer + // behaves correctly. + let alloc = unsafe { ptr::read(&b.1) }; + (Unique::from(Self::leak(b)), alloc) + } + + /// Returns a reference to the underlying allocator. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `TryBox::allocator(&b)` instead of `b.allocator()`. This + /// is so that there is no conflict with a method on the inner type. + #[inline] + pub const fn allocator(b: &Self) -> &A { + &b.1 + } + + /// Consumes and leaks the `TryBox`, returning a mutable reference, + /// `&'a mut T`. Note that the type `T` must outlive the chosen lifetime + /// `'a`. If the type has only static references, or none at all, then this + /// may be chosen to be `'static`. + /// + /// This function is mainly useful for data that lives for the remainder of + /// the program's life. Dropping the returned reference will cause a memory + /// leak. If this is not acceptable, the reference should first be wrapped + /// with the [`TryBox::from_raw_in`] function producing a `TryBox`. This `TryBox` can + /// then be dropped which will properly destroy `T` and release the + /// allocated memory. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `TryBox::leak(b)` instead of `b.leak()`. This + /// is so that there is no conflict with a method on the inner type. + /// + /// # Examples + /// + /// Simple usage: + /// + /// ``` + /// # use svsm::alloc::{boxed::TryBox, TryAllocError}; + /// use std::alloc::System; + /// + /// let x = TryBox::try_new_in(41, System)?; + /// let static_ref: &'static mut usize = TryBox::leak(x); + /// *static_ref += 1; + /// assert_eq!(*static_ref, 42); + /// + /// // Deallocate + /// let x = unsafe { TryBox::from_raw_in(static_ref, System) }; + /// # Ok::<(), TryAllocError>(()) + /// ``` + #[inline] + pub fn leak<'a>(b: Self) -> &'a mut T + where + A: 'a, + { + unsafe { &mut *ManuallyDrop::new(b).0.as_ptr() } + } + + /// Converts a `TryBox` into a `Pin>`. If `T` does not implement [`Unpin`], then + /// `*boxed` will be pinned in memory and unable to be moved. + /// + /// This conversion does not allocate on the heap and happens in place. + /// + /// This is also available via [`From`]. + /// + /// Constructing and pinning a `TryBox` with TryBox::into_pin([TryBox::try_new_in]\(x, alloc)) + /// can also be written more concisely using [TryBox::try_pin_in]\(x, alloc). + /// This `into_pin` method is useful if you already have a `TryBox`, or you are + /// constructing a (pinned) `TryBox` in a different way than with [`TryBox::try_new_in`]. + /// + /// # Notes + /// + /// It's not recommended that crates add an impl like `From> for Pin`, + /// as it'll introduce an ambiguity when calling `Pin::from`. + /// A demonstration of such a poor impl is shown below. + /// + /// ```compile_fail + /// # use svsm::alloc::boxed::TryBox; + /// # use std::pin::Pin; + /// struct Foo; // A type defined in this crate. + /// impl From> for Pin { + /// fn from(_: TryBox<()>) -> Pin { + /// Pin::new(Foo) + /// } + /// } + /// + /// let foo = TryBox::new(()); + /// let bar = Pin::from(foo); + /// ``` + pub fn into_pin(boxed: Self) -> Pin + where + A: 'static, + { + // It's not possible to move or replace the insides of a `Pin>` + // when `T: !Unpin`, so it's safe to pin it directly without any + // additional requirements. + unsafe { Pin::new_unchecked(boxed) } + } +} + +impl TryBox, A> { + /// Converts to `TryBox`. + /// + /// # Safety + /// + /// As with [`MaybeUninit::assume_init`], + /// it is up to the caller to guarantee that the value + /// really is in an initialized state. + /// Calling this when the content is not yet fully initialized + /// causes immediate undefined behavior. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::TryBox, TryAllocError}; + /// use std::alloc::System; + /// + /// let mut five = TryBox::::try_new_uninit_in(System)?; + /// + /// let five = unsafe { + /// // Deferred initialization: + /// five.as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5); + /// # Ok::<(), TryAllocError>(()) + /// ``` + pub unsafe fn assume_init(self) -> TryBox { + let (raw, alloc) = TryBox::into_raw_with_allocator(self); + unsafe { TryBox::from_raw_in(raw as *mut T, alloc) } + } + + /// Writes the value and converts to `TryBox`. + /// + /// This method converts the box similarly to [`TryBox::assume_init`] but + /// writes `value` into it before conversion thus guaranteeing safety. + /// In some scenarios use of this method may improve performance because + /// the compiler may be able to optimize copying from stack. + /// + /// # Examples + /// + /// ``` + /// # use svsm::alloc::{boxed::TryBox, TryAllocError}; + /// use std::alloc::System; + /// + /// let big_box = TryBox::<[usize; 1024], _>::try_new_uninit_in(System)?; + /// + /// let mut array = [0; 1024]; + /// for (i, place) in array.iter_mut().enumerate() { + /// *place = i; + /// } + /// + /// // The optimizer may be able to elide this copy, so previous code writes + /// // to heap directly. + /// let big_box = TryBox::write(big_box, array); + /// + /// for (i, x) in big_box.iter().enumerate() { + /// assert_eq!(*x, i); + /// } + /// # Ok::<(), TryAllocError>(()) + /// ``` + pub fn write(mut boxed: Self, value: T) -> TryBox { + unsafe { + (*boxed).write(value); + boxed.assume_init() + } + } +} + +impl Drop for TryBox { + fn drop(&mut self) { + let ptr = self.0; + unsafe { + ptr.as_ptr().drop_in_place(); + let layout = Layout::new::(); + if layout.size() != 0 { + self.1.deallocate(From::from(ptr.cast()), layout); + } + } + } +} + +impl TryBox { + /// Allocates memory in the given allocator and places the default value + /// for `T` into it. + pub fn try_default_in(alloc: A) -> Result { + Self::try_new_in(T::default(), alloc) + } +} + +impl TryBox { + /// Returns a new `TryBox` with this box's contents. The new box is + /// allocated with this box's allocator. + pub fn try_clone(&self) -> Result { + let boxed = Self::try_new_uninit_in(self.1.clone())?; + Ok(TryBox::write(boxed, unsafe { self.0.as_ref().clone() })) + } +} + +impl TryBox { + /// Returns a new `TryBox` with this box's contents. The new box is + /// allocated with the given allocator. + pub fn try_clone_in(&self, alloc: A) -> Result { + let boxed = Self::try_new_uninit_in(alloc)?; + Ok(TryBox::write(boxed, unsafe { self.0.as_ref().clone() })) + } +} + +impl PartialEq for TryBox { + #[inline] + fn eq(&self, other: &Self) -> bool { + PartialEq::eq(&**self, &**other) + } +} + +impl PartialOrd for TryBox { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + PartialOrd::partial_cmp(&**self, &**other) + } + #[inline] + fn lt(&self, other: &Self) -> bool { + PartialOrd::lt(&**self, &**other) + } + #[inline] + fn le(&self, other: &Self) -> bool { + PartialOrd::le(&**self, &**other) + } + #[inline] + fn ge(&self, other: &Self) -> bool { + PartialOrd::ge(&**self, &**other) + } + #[inline] + fn gt(&self, other: &Self) -> bool { + PartialOrd::gt(&**self, &**other) + } +} + +impl Ord for TryBox { + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + Ord::cmp(&**self, &**other) + } +} + +impl Eq for TryBox {} + +impl From> for Pin> +where + A: 'static, +{ + /// Converts a `TryBox` into a `Pin>`. If `T` does not implement [`Unpin`], then + /// `*boxed` will be pinned in memory and unable to be moved. + /// + /// This conversion does not allocate on the heap and happens in place. + /// + /// This is also available via [`TryBox::into_pin`]. + /// + /// Constructing and pinning a `TryBox` with >>::from([TryBox::try_new_in]\(x, alloc)?) + /// can also be written more concisely using [TryBox::try_pin_in]\(x, alloc)?. + /// This `From` implementation is useful if you already have a `TryBox`, or you are + /// constructing a (pinned) `TryBox` in a different way than with [`TryBox::try_new_in`]. + fn from(boxed: TryBox) -> Self { + TryBox::into_pin(boxed) + } +} + +impl fmt::Display for TryBox { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&**self, f) + } +} + +impl fmt::Debug for TryBox { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&**self, f) + } +} + +impl fmt::Pointer for TryBox { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // It's not possible to extract the inner Uniq directly from the Box, + // instead we cast it to a *const which aliases the Unique + let ptr: *const T = &**self; + fmt::Pointer::fmt(&ptr, f) + } +} + +impl Deref for TryBox { + type Target = T; + + fn deref(&self) -> &T { + unsafe { &*self.0.as_ptr() } + } +} + +impl DerefMut for TryBox { + fn deref_mut(&mut self) -> &mut T { + unsafe { &mut *self.0.as_ptr() } + } +} + +impl Borrow for TryBox { + fn borrow(&self) -> &T { + unsafe { &*self.0.as_ptr() } + } +} + +impl BorrowMut for TryBox { + fn borrow_mut(&mut self) -> &mut T { + unsafe { &mut *self.0.as_ptr() } + } +} + +impl AsRef for TryBox { + fn as_ref(&self) -> &T { + unsafe { &*self.0.as_ptr() } + } +} + +impl AsMut for TryBox { + fn as_mut(&mut self) -> &mut T { + unsafe { &mut *self.0.as_ptr() } + } +} + +/* Nota bene + * + * We could have chosen not to add this impl, and instead have written a + * function of Pin> to Pin. Such a function would not be sound, + * because Box implements Unpin even when T does not, as a result of + * this impl. + * + * We chose this API instead of the alternative for a few reasons: + * - Logically, it is helpful to understand pinning in regard to the + * memory region being pointed to. For this reason none of the + * standard library pointer types support projecting through a pin + * (Box is the only pointer type in std for which this would be + * safe.) + * - It is in practice very useful to have Box be unconditionally + * Unpin because of trait objects, for which the structural auto + * trait functionality does not apply (e.g., Box would + * otherwise not be Unpin). + * + * Another type with the same semantics as Box but only a conditional + * implementation of `Unpin` (where `T: Unpin`) would be valid/safe, and + * could have a method to project a Pin from it. + */ +impl Unpin for TryBox where A: 'static {} + +#[cfg(test)] +mod tests { + extern crate std; + use super::*; + use std::alloc::System; + + #[test] + fn box_try_new() { + let obj = TryBox::try_new_in(5, System).unwrap(); + assert_eq!(*obj, 5); + } + + #[test] + fn box_try_uninit() { + let mut obj = TryBox::::try_new_uninit_in(System).unwrap(); + // SAFETY: TryBox owns valid memory. Memory is initialized before use. + let init = unsafe { + obj.as_mut_ptr().write(5); + obj.assume_init() + }; + assert_eq!(*init, 5); + } + + #[test] + fn box_try_uninit_write() { + let obj = TryBox::::try_new_uninit_in(System).unwrap(); + let init = TryBox::write(obj, 7); + assert_eq!(*init, 7); + } + + #[test] + fn box_try_zeroed() { + let obj = TryBox::::try_new_zeroed_in(System).unwrap(); + // SAFETY: memory is initialized to zero, which is valid for u32 + let init = unsafe { obj.assume_init() }; + assert_eq!(*init, 0); + } + + #[test] + fn box_nested_deref() { + let inner = TryBox::try_new_in([13; 32], System).unwrap(); + { + let outer = TryBox::try_new_in(inner, System).unwrap(); + assert_eq!(**outer, [13; 32]); + } + } + + #[test] + fn box_try_clone() { + let first = TryBox::try_new_in([13; 32], System).unwrap(); + let second = first.try_clone().unwrap(); + drop(first); + assert_eq!(*second, [13; 32]); + } + + #[test] + fn box_try_clone_mut() { + let mut first = TryBox::try_new_in([13; 32], System).unwrap(); + let second = first.try_clone().unwrap(); + first.fill(14); + assert_eq!(*second, [13; 32]); + assert_eq!(*first, [14; 32]); + } +} diff --git a/src/alloc/mod.rs b/src/alloc/mod.rs new file mode 100644 index 0000000000..6c387cccf6 --- /dev/null +++ b/src/alloc/mod.rs @@ -0,0 +1,403 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +// +// Copyright (C) 2023 SUSE +// +// Authors: Carlos López + +//! An adapted version of the upstream Rust alloc crate with a stabilized allocator API. + +use core::alloc::{Layout, LayoutError}; +use core::ptr::{self, NonNull}; + +pub mod boxed; +mod unique; + +/// A stable version of [`AllocError`](core::alloc::AllocError). +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum TryAllocError { + OutOfMemory, + ZeroSized, + Layout(LayoutError), + CapacityOverflow, +} + +impl From for TryAllocError { + fn from(err: LayoutError) -> Self { + Self::Layout(err) + } +} + +/// A stable version of the [`Allocator`](core::alloc::Allocator) trait. +/// +/// An implementation of `SvsmAlloc` can allocate, grow, shrink, and deallocate arbitrary blocks of +/// data described via [`Layout`][]. +/// +/// `SvsmAlloc` is designed to be implemented on ZSTs, references, or smart pointers because having +/// an allocator like `MyAlloc([u8; N])` cannot be moved, without updating the pointers to the +/// allocated memory. +/// +/// Unlike [`GlobalAlloc`], zero-sized allocations are allowed in `SvsmAlloc`. If an underlying +/// allocator does not support this (like jemalloc) or return a null pointer (such as +/// `libc::malloc`), this must be caught by the implementation. +/// +/// ### Currently allocated memory +/// +/// Some of the methods require that a memory block be *currently allocated* via an allocator. This +/// means that: +/// +/// * the starting address for that memory block was previously returned by [`allocate`], [`grow`], or +/// [`shrink`], and +/// +/// * the memory block has not been subsequently deallocated, where blocks are either deallocated +/// directly by being passed to [`deallocate`] or were changed by being passed to [`grow`] or +/// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer +/// remains valid. +/// +/// [`allocate`]: SvsmAlloc::allocate +/// [`grow`]: SvsmAlloc::grow +/// [`shrink`]: SvsmAlloc::shrink +/// [`deallocate`]: SvsmAlloc::deallocate +/// [`GlobalAlloc`]: core::alloc::GlobalAlloc +/// +/// ### Memory fitting +/// +/// Some of the methods require that a layout *fit* a memory block. What it means for a layout to +/// "fit" a memory block means (or equivalently, for a memory block to "fit" a layout) is that the +/// following conditions must hold: +/// +/// * The block must be allocated with the same alignment as [`layout.align()`], and +/// +/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: +/// - `min` is the size of the layout most recently used to allocate the block, and +/// - `max` is the latest actual size returned from [`allocate`], [`grow`], or [`shrink`]. +/// +/// [`layout.align()`]: Layout::align +/// [`layout.size()`]: Layout::size +/// +/// # Safety +/// +/// * Memory blocks returned from an allocator that are [*currently allocated*] must point to +/// valid memory and retain their validity while they are [*currently allocated*] and at +/// least one of the instance and all of its clones has not been dropped. +/// +/// * copying, cloning, or moving the allocator must not invalidate memory blocks returned from this +/// allocator. A copied or cloned allocator must behave like the same allocator, and +/// +/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other +/// method of the allocator. +/// +/// [*currently allocated*]: #currently-allocated-memory +pub unsafe trait SvsmAlloc { + /// Attempts to allocate a block of memory. + /// + /// On success, returns a [`NonNull<[u8]>`][NonNull] meeting the size and alignment guarantees of `layout`. + /// + /// The returned block may have a larger size than specified by `layout.size()`, and may or may + /// not have its contents initialized. + /// + /// # Errors + /// + /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet + /// allocator's size or alignment constraints. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + fn allocate(&self, layout: Layout) -> Result, TryAllocError>; + + /// Behaves like `allocate`, but also ensures that the returned memory is zero-initialized. + /// + /// # Errors + /// + /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet + /// allocator's size or alignment constraints. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + fn allocate_zeroed(&self, layout: Layout) -> Result, TryAllocError> { + let ptr = self.allocate(layout)?; + // SAFETY: `alloc` returns a valid memory block + unsafe { ptr.as_ptr().cast::().write_bytes(0, ptr.len()) }; + Ok(ptr) + } + + /// Deallocates the memory referenced by `ptr`. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator, and + /// * `layout` must [*fit*] that block of memory. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout); + + /// Attempts to extend the memory block. + /// + /// Returns a new [`NonNull<[u8]>`][NonNull] containing a pointer and the actual size of the allocated + /// memory. The pointer is suitable for holding data described by `new_layout`. To accomplish + /// this, the allocator may extend the allocation referenced by `ptr` to fit the new layout. + /// + /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been + /// transferred to this allocator. Any access to the old `ptr` is Undefined Behavior, even if the + /// allocation was grown in-place. The newly returned pointer is the only valid pointer + /// for accessing this memory now. + /// + /// If this method returns `Err`, then ownership of the memory block has not been transferred to + /// this allocator, and the contents of the memory block are unaltered. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory currently allocated via this allocator. + /// * `old_layout` must fit that block of memory (The `new_layout` argument need not fit it.). + /// * `new_layout.size()` must be greater than or equal to `old_layout.size()`. + /// + /// Note that `new_layout.align()` need not be the same as `old_layout.align()`. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + /// + /// # Errors + /// + /// Returns `Err` if the new layout does not meet the allocator's size and alignment + /// constraints of the allocator, or if growing otherwise fails. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, TryAllocError> { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + let new_ptr = self.allocate(new_layout)?; + + // SAFETY: because `new_layout.size()` must be greater than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), old_layout.size()); + self.deallocate(ptr, old_layout); + } + + Ok(new_ptr) + } + + /// Behaves like `grow`, but also ensures that the new contents are set to zero before being + /// returned. + /// + /// The memory block will contain the following contents after a successful call to + /// `grow_zeroed`: + /// * Bytes `0..old_layout.size()` are preserved from the original allocation. + /// * Bytes `old_layout.size()..old_size` will either be preserved or zeroed, depending on + /// the allocator implementation. `old_size` refers to the size of the memory block prior + /// to the `grow_zeroed` call, which may be larger than the size that was originally + /// requested when it was allocated. + /// * Bytes `old_size..new_size` are zeroed. `new_size` refers to the size of the memory + /// block returned by the `grow_zeroed` call. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. + /// * `old_layout` must [*fit*] that block of memory (The `new_layout` argument need not fit it.). + /// * `new_layout.size()` must be greater than or equal to `old_layout.size()`. + /// + /// Note that `new_layout.align()` need not be the same as `old_layout.align()`. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + /// + /// # Errors + /// + /// Returns `Err` if the new layout does not meet the allocator's size and alignment + /// constraints of the allocator, or if growing otherwise fails. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + unsafe fn grow_zeroed( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, TryAllocError> { + debug_assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`" + ); + + let new_ptr = self.allocate_zeroed(new_layout)?; + + // SAFETY: because `new_layout.size()` must be greater than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), old_layout.size()); + self.deallocate(ptr, old_layout); + } + + Ok(new_ptr) + } + + /// Attempts to shrink the memory block. + /// + /// Returns a new [`NonNull<[u8]>`][NonNull] containing a pointer and the actual size of the allocated + /// memory. The pointer is suitable for holding data described by `new_layout`. To accomplish + /// this, the allocator may shrink the allocation referenced by `ptr` to fit the new layout. + /// + /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been + /// transferred to this allocator. Any access to the old `ptr` is Undefined Behavior, even if the + /// allocation was shrunk in-place. The newly returned pointer is the only valid pointer + /// for accessing this memory now. + /// + /// If this method returns `Err`, then ownership of the memory block has not been transferred to + /// this allocator, and the contents of the memory block are unaltered. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. + /// * `old_layout` must [*fit*] that block of memory (The `new_layout` argument need not fit it.). + /// * `new_layout.size()` must be smaller than or equal to `old_layout.size()`. + /// + /// Note that `new_layout.align()` need not be the same as `old_layout.align()`. + /// + /// [*currently allocated*]: #currently-allocated-memory + /// [*fit*]: #memory-fitting + /// + /// # Errors + /// + /// Returns `Err` if the new layout does not meet the allocator's size and alignment + /// constraints of the allocator, or if shrinking otherwise fails. + /// + /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or + /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement + /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) + /// + /// Clients wishing to abort computation in response to an allocation error are encouraged to + /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, TryAllocError> { + debug_assert!( + new_layout.size() <= old_layout.size(), + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" + ); + + let new_ptr = self.allocate(new_layout)?; + + // SAFETY: because `new_layout.size()` must be lower than or equal to + // `old_layout.size()`, both the old and new memory allocation are valid for reads and + // writes for `new_layout.size()` bytes. Also, because the old allocation wasn't yet + // deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is + // safe. The safety contract for `dealloc` must be upheld by the caller. + unsafe { + ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr().cast(), new_layout.size()); + self.deallocate(ptr, old_layout); + } + + Ok(new_ptr) + } + + /// Creates a "by reference" adapter for this instance of `SvsmAlloc`. + /// + /// The returned adapter also implements `SvsmAlloc` and will simply borrow this. + #[inline(always)] + fn by_ref(&self) -> &Self + where + Self: Sized, + { + self + } +} + +unsafe impl SvsmAlloc for &A +where + A: SvsmAlloc + ?Sized, +{ + #[inline] + fn allocate(&self, layout: Layout) -> Result, TryAllocError> { + (**self).allocate(layout) + } + + #[inline] + fn allocate_zeroed(&self, layout: Layout) -> Result, TryAllocError> { + (**self).allocate_zeroed(layout) + } + + #[inline] + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).deallocate(ptr, layout) } + } + + #[inline] + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, TryAllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).grow(ptr, old_layout, new_layout) } + } + + #[inline] + unsafe fn grow_zeroed( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, TryAllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).grow_zeroed(ptr, old_layout, new_layout) } + } + + #[inline] + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, TryAllocError> { + // SAFETY: the safety contract must be upheld by the caller + unsafe { (**self).shrink(ptr, old_layout, new_layout) } + } +} + +#[cfg(not(target_os = "none"))] +extern crate std; +#[cfg(not(target_os = "none"))] +use core::alloc::GlobalAlloc; +#[cfg(not(target_os = "none"))] +unsafe impl SvsmAlloc for std::alloc::System { + fn allocate(&self, layout: Layout) -> Result, TryAllocError> { + let size = layout.size(); + if size == 0 { + return Err(TryAllocError::ZeroSized); + } + // SAFETY: size is nonzero + let raw_ptr = unsafe { self.alloc(layout) }; + let ptr = NonNull::new(raw_ptr).ok_or(TryAllocError::OutOfMemory)?; + Ok(NonNull::slice_from_raw_parts(ptr, size)) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.dealloc(ptr.as_ptr(), layout) + } +} diff --git a/src/alloc/unique.rs b/src/alloc/unique.rs new file mode 100644 index 0000000000..0440129db2 --- /dev/null +++ b/src/alloc/unique.rs @@ -0,0 +1,195 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +// +// Copyright (C) 2023 SUSE +// +// Authors: Carlos López + +use core::convert::From; +use core::fmt; +use core::marker::PhantomData; +use core::ptr::NonNull; + +/// A wrapper around a raw non-null `*mut T` that indicates that the possessor +/// of this wrapper owns the referent. Useful for building abstractions like +/// `Box`, `Vec`, `String`, and `HashMap`. +/// +/// Unlike `*mut T`, `Unique` behaves "as if" it were an instance of `T`. +/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies +/// the kind of strong aliasing guarantees an instance of `T` can expect: +/// the referent of the pointer should not be modified without a unique path to +/// its owning Unique. +/// +/// If you're uncertain of whether it's correct to use `Unique` for your purposes, +/// consider using `NonNull`, which has weaker semantics. +/// +/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer +/// is never dereferenced. This is so that enums may use this forbidden value +/// as a discriminant -- `Option>` has the same size as `Unique`. +/// However the pointer may still dangle if it isn't dereferenced. +/// +/// Unlike `*mut T`, `Unique` is covariant over `T`. This should always be correct +/// for any type which upholds Unique's aliasing requirements. +#[repr(transparent)] +// Lang item used experimentally by Miri to define the semantics of `Unique`. +pub struct Unique { + pointer: NonNull, + // NOTE: this marker has no consequences for variance, but is necessary + // for dropck to understand that we logically own a `T`. + // + // For details, see: + // https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data + _marker: PhantomData, +} + +/// `Unique` pointers are `Send` if `T` is `Send` because the data they +/// reference is unaliased. Note that this aliasing invariant is +/// unenforced by the type system; the abstraction using the +/// `Unique` must enforce it. +unsafe impl Send for Unique {} + +/// `Unique` pointers are `Sync` if `T` is `Sync` because the data they +/// reference is unaliased. Note that this aliasing invariant is +/// unenforced by the type system; the abstraction using the +/// `Unique` must enforce it. +unsafe impl Sync for Unique {} + +impl Unique { + /// Creates a new `Unique` that is dangling, but well-aligned. + /// + /// This is useful for initializing types which lazily allocate, like + /// `Vec::new` does. + /// + /// Note that the pointer value may potentially represent a valid pointer to + /// a `T`, which means this must not be used as a "not yet initialized" + /// sentinel value. Types that lazily allocate must track initialization by + /// some other means. + #[must_use] + #[inline] + pub const fn dangling() -> Self { + // FIXME(const-hack) replace with `From` + Unique { + pointer: NonNull::dangling(), + _marker: PhantomData, + } + } +} + +impl Unique { + /// Creates a new `Unique`. + /// + /// # Safety + /// + /// `ptr` must be non-null. + #[inline] + pub const unsafe fn new_unchecked(ptr: *mut T) -> Self { + // SAFETY: the caller must guarantee that `ptr` is non-null. + unsafe { + Unique { + pointer: NonNull::new_unchecked(ptr), + _marker: PhantomData, + } + } + } + + /// Creates a new `Unique` if `ptr` is non-null. + #[inline] + pub fn new(ptr: *mut T) -> Option { + NonNull::new(ptr).map(|pointer| Unique { + pointer, + _marker: PhantomData, + }) + } + + /// Acquires the underlying `*mut` pointer. + #[must_use = "`self` will be dropped if the result is not used"] + #[inline] + pub const fn as_ptr(self) -> *mut T { + self.pointer.as_ptr() + } + + /// Dereferences the content. + /// + /// The resulting lifetime is bound to self so this behaves "as if" + /// it were actually an instance of T that is getting borrowed. If a longer + /// (unbound) lifetime is needed, use `&*my_ptr.as_ptr()`. + #[must_use] + #[inline] + pub const unsafe fn as_ref(&self) -> &T { + // SAFETY: the caller must guarantee that `self` meets all the + // requirements for a reference. + unsafe { self.pointer.as_ref() } + } + + /// Mutably dereferences the content. + /// + /// The resulting lifetime is bound to self so this behaves "as if" + /// it were actually an instance of T that is getting borrowed. If a longer + /// (unbound) lifetime is needed, use `&mut *my_ptr.as_ptr()`. + #[must_use] + #[inline] + pub unsafe fn as_mut(&mut self) -> &mut T { + // SAFETY: the caller must guarantee that `self` meets all the + // requirements for a mutable reference. + unsafe { self.pointer.as_mut() } + } + + /// Casts to a pointer of another type. + #[must_use = "`self` will be dropped if the result is not used"] + #[inline] + pub const fn cast(self) -> Unique { + // FIXME(const-hack): replace with `From` + // SAFETY: is `NonNull` + unsafe { Unique::new_unchecked(self.pointer.cast().as_ptr()) } + } +} + +impl Clone for Unique { + #[inline] + fn clone(&self) -> Self { + *self + } +} + +impl Copy for Unique {} + +impl fmt::Debug for Unique { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Pointer::fmt(&self.as_ptr(), f) + } +} + +impl fmt::Pointer for Unique { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Pointer::fmt(&self.as_ptr(), f) + } +} + +impl From<&mut T> for Unique { + /// Converts a `&mut T` to a `Unique`. + /// + /// This conversion is infallible since references cannot be null. + #[inline] + fn from(reference: &mut T) -> Self { + Self::from(NonNull::from(reference)) + } +} + +impl From> for Unique { + /// Converts a `NonNull` to a `Unique`. + /// + /// This conversion is infallible since `NonNull` cannot be null. + #[inline] + fn from(pointer: NonNull) -> Self { + Unique { + pointer, + _marker: PhantomData, + } + } +} + +impl From> for NonNull { + #[inline] + fn from(unique: Unique) -> Self { + unique.pointer + } +} diff --git a/src/lib.rs b/src/lib.rs index fd0f3adf70..b1fbc42405 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -14,6 +14,7 @@ pub mod acpi; pub mod address; +pub mod alloc; pub mod config; pub mod console; pub mod cpu;