From 2931e0fd63541d6e39ffa9ec75ffc9b614b73a16 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sun, 3 Jul 2022 23:18:27 -0400 Subject: [PATCH] handle Box with allocators --- src/stacked_borrows.rs | 32 +++++++++---- tests/pass/box-custom-alloc.rs | 87 ++++++++++++++++++++++++++++++++++ 2 files changed, 110 insertions(+), 9 deletions(-) create mode 100644 tests/pass/box-custom-alloc.rs diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index efc38fdae3..9969fbdbcd 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -976,27 +976,30 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx // Raw pointers need to be enabled. ty::RawPtr(tym) if kind == RetagKind::Raw => Some((RefKind::Raw { mutable: tym.mutbl == Mutability::Mut }, false)), - // Boxes do not get a protector: protectors reflect that references outlive the call - // they were passed in to; that's just not the case for boxes. - ty::Adt(..) if ty.is_box() => Some((RefKind::Unique { two_phase: false }, false)), + // Boxes are handled separately due to that allocator situation. _ => None, } } // We need a visitor to visit all references. However, that requires - // a `MPlaceTy` (or `OpTy), so we have a fast path for reference types that + // a `MPlaceTy` (or `OpTy`), so we have a fast path for reference types that // avoids allocating. - if let Some((mutbl, protector)) = qualify(place.layout.ty, kind) { + if let Some((ref_kind, protector)) = qualify(place.layout.ty, kind) { // Fast path. let val = this.read_immediate(&this.place_to_op(place)?)?; - let val = this.retag_reference(&val, mutbl, protector)?; + let val = this.retag_reference(&val, ref_kind, protector)?; this.write_immediate(*val, place)?; return Ok(()); } // If we don't want to recurse, we are already done. - if !this.machine.stacked_borrows.as_mut().unwrap().get_mut().retag_fields { + // EXCEPT if this is a `Box`, then we have to recurse because allocators. + // (Yes this means we technically also recursively retag the allocator itself even if field + // retagging is not enabled. *shrug*) + if !this.machine.stacked_borrows.as_mut().unwrap().get_mut().retag_fields + && !place.layout.ty.ty_adt_def().is_some_and(|adt| adt.is_box()) + { return Ok(()); } @@ -1034,10 +1037,21 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx self.ecx } + fn visit_box(&mut self, place: &MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx> { + // Boxes do not get a protector: protectors reflect that references outlive the call + // they were passed in to; that's just not the case for boxes. + let (ref_kind, protector) = (RefKind::Unique { two_phase: false }, false); + + let val = self.ecx.read_immediate(&place.into())?; + let val = self.ecx.retag_reference(&val, ref_kind, protector)?; + self.ecx.write_immediate(*val, &place.into())?; + Ok(()) + } + fn visit_value(&mut self, place: &MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx> { - if let Some((mutbl, protector)) = qualify(place.layout.ty, self.kind) { + if let Some((ref_kind, protector)) = qualify(place.layout.ty, self.kind) { let val = self.ecx.read_immediate(&place.into())?; - let val = self.ecx.retag_reference(&val, mutbl, protector)?; + let val = self.ecx.retag_reference(&val, ref_kind, protector)?; self.ecx.write_immediate(*val, &place.into())?; } else if matches!(place.layout.ty.kind(), ty::RawPtr(..)) { // Wide raw pointers *do* have fields and their types are strange. diff --git a/tests/pass/box-custom-alloc.rs b/tests/pass/box-custom-alloc.rs new file mode 100644 index 0000000000..ef432a86d4 --- /dev/null +++ b/tests/pass/box-custom-alloc.rs @@ -0,0 +1,87 @@ +#![allow(incomplete_features)] // for trait upcasting +#![feature(allocator_api, trait_upcasting)] + +use std::alloc::Layout; +use std::alloc::{AllocError, Allocator}; +use std::cell::Cell; +use std::mem::MaybeUninit; +use std::ptr::{self, NonNull}; + +struct OnceAlloc<'a> { + space: Cell<&'a mut [MaybeUninit]>, +} + +unsafe impl<'shared, 'a: 'shared> Allocator for &'shared OnceAlloc<'a> { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + let space = self.space.replace(&mut []); + + let (ptr, len) = (space.as_mut_ptr(), space.len()); + + if ptr.align_offset(layout.align()) != 0 || len < layout.size() { + return Err(AllocError); + } + + let slice_ptr = ptr::slice_from_raw_parts_mut(ptr as *mut u8, len); + unsafe { Ok(NonNull::new_unchecked(slice_ptr)) } + } + + unsafe fn deallocate(&self, _ptr: NonNull, _layout: Layout) {} +} + +trait MyTrait { + fn hello(&self) -> u8; +} + +impl MyTrait for [u8; 1] { + fn hello(&self) -> u8 { + self[0] + } +} + +trait TheTrait: MyTrait {} + +impl TheTrait for [u8; 1] {} + +/// `Box` is a `ScalarPair` where the 2nd component is the allocator. +fn test1() { + let mut space = vec![MaybeUninit::new(0); 1]; + let once_alloc = OnceAlloc { space: Cell::new(&mut space[..]) }; + + let boxed = Box::new_in([42u8; 1], &once_alloc); + let _val = *boxed; + let with_dyn: Box = boxed; + assert_eq!(42, with_dyn.hello()); + let with_dyn: Box = with_dyn; // upcast + assert_eq!(42, with_dyn.hello()); +} + +// Make the allocator itself so big that the Box is not even a ScalarPair any more. +struct OnceAllocRef<'s, 'a>(&'s OnceAlloc<'a>, u64); + +unsafe impl<'shared, 'a: 'shared> Allocator for OnceAllocRef<'shared, 'a> { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + self.0.allocate(layout) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.0.deallocate(ptr, layout) + } +} + +/// `Box` is an `Aggregate`. +fn test2() { + let mut space = vec![MaybeUninit::new(0); 1]; + let once_alloc = OnceAlloc { space: Cell::new(&mut space[..]) }; + + let boxed = Box::new_in([42u8; 1], OnceAllocRef(&once_alloc, 0)); + let _val = *boxed; + let with_dyn: Box = boxed; + assert_eq!(42, with_dyn.hello()); + let with_dyn: Box = with_dyn; // upcast + assert_eq!(42, with_dyn.hello()); +} + +fn main() { + test1(); + test2(); +}