Skip to content

Commit

Permalink
Auto merge of #76634 - RalfJung:miri-guaranteed-eq-ne, r=oli-obk
Browse files Browse the repository at this point in the history
move guaranteed{ne,eq} implementation to compile-time machine

Currently, Miri needs a special hack to avoid using the core engine implementation of these intrinsics. That seems silly, so let's move them to the CTFE machine, which is the only machine that wants to use them.

I also added a reference to #73722 as a warning to anyone who wants to adjust `guaranteed_eq`.
  • Loading branch information
bors committed Sep 17, 2020
2 parents 95386b6 + c321276 commit 7bdb5de
Show file tree
Hide file tree
Showing 2 changed files with 70 additions and 44 deletions.
71 changes: 68 additions & 3 deletions compiler/rustc_mir/src/const_eval/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use rustc_ast::Mutability;
use rustc_hir::def_id::DefId;
use rustc_middle::mir::AssertMessage;
use rustc_session::Limit;
use rustc_span::symbol::Symbol;
use rustc_span::symbol::{sym, Symbol};

use crate::interpret::{
self, compile_time_machine, AllocId, Allocation, Frame, GlobalId, ImmTy, InterpCx,
Expand Down Expand Up @@ -176,6 +176,38 @@ impl interpret::MayLeak for ! {
}
}

impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
fn guaranteed_eq(&mut self, a: Scalar, b: Scalar) -> bool {
match (a, b) {
// Comparisons between integers are always known.
(Scalar::Raw { .. }, Scalar::Raw { .. }) => a == b,
// Equality with integers can never be known for sure.
(Scalar::Raw { .. }, Scalar::Ptr(_)) | (Scalar::Ptr(_), Scalar::Raw { .. }) => false,
// FIXME: return `true` for when both sides are the same pointer, *except* that
// some things (like functions and vtables) do not have stable addresses
// so we need to be careful around them (see e.g. #73722).
(Scalar::Ptr(_), Scalar::Ptr(_)) => false,
}
}

fn guaranteed_ne(&mut self, a: Scalar, b: Scalar) -> bool {
match (a, b) {
// Comparisons between integers are always known.
(Scalar::Raw { .. }, Scalar::Raw { .. }) => a != b,
// Comparisons of abstract pointers with null pointers are known if the pointer
// is in bounds, because if they are in bounds, the pointer can't be null.
(Scalar::Raw { data: 0, .. }, Scalar::Ptr(ptr))
| (Scalar::Ptr(ptr), Scalar::Raw { data: 0, .. }) => !self.memory.ptr_may_be_null(ptr),
// Inequality with integers other than null can never be known for sure.
(Scalar::Raw { .. }, Scalar::Ptr(_)) | (Scalar::Ptr(_), Scalar::Raw { .. }) => false,
// FIXME: return `true` for at least some comparisons where we can reliably
// determine the result of runtime inequality tests at compile-time.
// Examples include comparison of addresses in different static items.
(Scalar::Ptr(_), Scalar::Ptr(_)) => false,
}
}
}

impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
compile_time_machine!(<'mir, 'tcx>);

Expand Down Expand Up @@ -234,12 +266,45 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
_unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx> {
// Shared intrinsics.
if ecx.emulate_intrinsic(instance, args, ret)? {
return Ok(());
}
// An intrinsic that we do not support
let intrinsic_name = ecx.tcx.item_name(instance.def_id());
Err(ConstEvalErrKind::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into())

// CTFE-specific intrinsics.
let (dest, ret) = match ret {
None => {
return Err(ConstEvalErrKind::NeedsRfc(format!(
"calling intrinsic `{}`",
intrinsic_name
))
.into());
}
Some(p) => p,
};
match intrinsic_name {
sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => {
let a = ecx.read_immediate(args[0])?.to_scalar()?;
let b = ecx.read_immediate(args[1])?.to_scalar()?;
let cmp = if intrinsic_name == sym::ptr_guaranteed_eq {
ecx.guaranteed_eq(a, b)
} else {
ecx.guaranteed_ne(a, b)
};
ecx.write_scalar(Scalar::from_bool(cmp), dest)?;
}
_ => {
return Err(ConstEvalErrKind::NeedsRfc(format!(
"calling intrinsic `{}`",
intrinsic_name
))
.into());
}
}

ecx.go_to_block(ret);
Ok(())
}

fn assert_panic(
Expand Down
43 changes: 2 additions & 41 deletions compiler/rustc_mir/src/interpret/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,8 @@ crate fn eval_nullary_intrinsic<'tcx>(

impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Returns `true` if emulation happened.
/// Here we implement the intrinsics that are common to all Miri instances; individual machines can add their own
/// intrinsic handling.
pub fn emulate_intrinsic(
&mut self,
instance: ty::Instance<'tcx>,
Expand Down Expand Up @@ -328,16 +330,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let offset_ptr = ptr.ptr_wrapping_signed_offset(offset_bytes, self);
self.write_scalar(offset_ptr, dest)?;
}
sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => {
let a = self.read_immediate(args[0])?.to_scalar()?;
let b = self.read_immediate(args[1])?.to_scalar()?;
let cmp = if intrinsic_name == sym::ptr_guaranteed_eq {
self.guaranteed_eq(a, b)
} else {
self.guaranteed_ne(a, b)
};
self.write_scalar(Scalar::from_bool(cmp), dest)?;
}
sym::ptr_offset_from => {
let a = self.read_immediate(args[0])?.to_scalar()?;
let b = self.read_immediate(args[1])?.to_scalar()?;
Expand Down Expand Up @@ -448,37 +440,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok(true)
}

fn guaranteed_eq(&mut self, a: Scalar<M::PointerTag>, b: Scalar<M::PointerTag>) -> bool {
match (a, b) {
// Comparisons between integers are always known.
(Scalar::Raw { .. }, Scalar::Raw { .. }) => a == b,
// Equality with integers can never be known for sure.
(Scalar::Raw { .. }, Scalar::Ptr(_)) | (Scalar::Ptr(_), Scalar::Raw { .. }) => false,
// FIXME: return `true` for when both sides are the same pointer, *except* that
// some things (like functions and vtables) do not have stable addresses
// so we need to be careful around them.
(Scalar::Ptr(_), Scalar::Ptr(_)) => false,
}
}

fn guaranteed_ne(&mut self, a: Scalar<M::PointerTag>, b: Scalar<M::PointerTag>) -> bool {
match (a, b) {
// Comparisons between integers are always known.
(Scalar::Raw { .. }, Scalar::Raw { .. }) => a != b,
// Comparisons of abstract pointers with null pointers are known if the pointer
// is in bounds, because if they are in bounds, the pointer can't be null.
(Scalar::Raw { data: 0, .. }, Scalar::Ptr(ptr))
| (Scalar::Ptr(ptr), Scalar::Raw { data: 0, .. }) => !self.memory.ptr_may_be_null(ptr),
// Inequality with integers other than null can never be known for sure.
(Scalar::Raw { .. }, Scalar::Ptr(_)) | (Scalar::Ptr(_), Scalar::Raw { .. }) => false,
// FIXME: return `true` for at least some comparisons where we can reliably
// determine the result of runtime inequality tests at compile-time.
// Examples include comparison of addresses in static items, for these we can
// give reliable results.
(Scalar::Ptr(_), Scalar::Ptr(_)) => false,
}
}

pub fn exact_div(
&mut self,
a: ImmTy<'tcx, M::PointerTag>,
Expand Down

0 comments on commit 7bdb5de

Please sign in to comment.