Skip to content

fix handling of base address for TypeId allocations #144187

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 21, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 5 additions & 13 deletions compiler/rustc_const_eval/src/const_eval/machine.rs
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nothing really changes here, but I looked at the code to ensure it is correct for TypeId and realized some ways in which it, and the comments, could be improved.

Original file line number Diff line number Diff line change
Expand Up @@ -279,23 +279,15 @@ impl<'tcx> CompileTimeInterpCx<'tcx> {
fn guaranteed_cmp(&mut self, a: Scalar, b: Scalar) -> InterpResult<'tcx, u8> {
interp_ok(match (a, b) {
// Comparisons between integers are always known.
(Scalar::Int { .. }, Scalar::Int { .. }) => {
if a == b {
1
} else {
0
}
}
// Comparisons of abstract pointers with null pointers are known if the pointer
// is in bounds, because if they are in bounds, the pointer can't be null.
// Inequality with integers other than null can never be known for sure.
(Scalar::Int(int), ptr @ Scalar::Ptr(..))
| (ptr @ Scalar::Ptr(..), Scalar::Int(int))
(Scalar::Int(a), Scalar::Int(b)) => (a == b) as u8,
// Comparisons of null with an arbitrary scalar can be known if `scalar_may_be_null`
// indicates that the scalar can definitely *not* be null.
(Scalar::Int(int), ptr) | (ptr, Scalar::Int(int))
if int.is_null() && !self.scalar_may_be_null(ptr)? =>
{
0
}
// Equality with integers can never be known for sure.
// Other ways of comparing integers and pointers can never be known for sure.
(Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => 2,
// FIXME: return a `1` for when both sides are the same pointer, *except* that
// some things (like functions and vtables) do not have stable addresses
Expand Down
16 changes: 13 additions & 3 deletions compiler/rustc_const_eval/src/interpret/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,10 @@ pub enum AllocKind {
LiveData,
/// A function allocation (that fn ptrs point to).
Function,
/// A "virtual" allocation, used for vtables and TypeId.
Virtual,
/// A vtable allocation.
VTable,
/// A TypeId allocation.
TypeId,
/// A dead allocation.
Dead,
}
Expand Down Expand Up @@ -952,7 +954,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
let kind = match global_alloc {
GlobalAlloc::Static { .. } | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
GlobalAlloc::VTable { .. } | GlobalAlloc::TypeId { .. } => AllocKind::Virtual,
GlobalAlloc::VTable { .. } => AllocKind::VTable,
GlobalAlloc::TypeId { .. } => AllocKind::TypeId,
};
return AllocInfo::new(size, align, kind, mutbl);
}
Expand Down Expand Up @@ -1617,6 +1620,13 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
match self.ptr_try_get_alloc_id(ptr, 0) {
Ok((alloc_id, offset, _)) => {
let info = self.get_alloc_info(alloc_id);
if matches!(info.kind, AllocKind::TypeId) {
// We *could* actually precisely answer this question since here,
// the offset *is* the integer value. But the entire point of making
// this a pointer is not to leak the integer value, so we say everything
// might be null.
return interp_ok(true);
}
// If the pointer is in-bounds (including "at the end"), it is definitely not null.
if offset <= info.size {
return interp_ok(false);
Expand Down
58 changes: 34 additions & 24 deletions src/tools/miri/src/alloc_addresses/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,21 +116,26 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
let this = self.eval_context_ref();
let info = this.get_alloc_info(alloc_id);

// Miri's address assignment leaks state across thread boundaries, which is incompatible
// with GenMC execution. So we instead let GenMC assign addresses to allocations.
if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() {
let addr = genmc_ctx.handle_alloc(&this.machine, info.size, info.align, memory_kind)?;
return interp_ok(addr);
}

let mut rng = this.machine.rng.borrow_mut();
// This is either called immediately after allocation (and then cached), or when
// adjusting `tcx` pointers (which never get freed). So assert that we are looking
// at a live allocation. This also ensures that we never re-assign an address to an
// allocation that previously had an address, but then was freed and the address
// information was removed.
assert!(!matches!(info.kind, AllocKind::Dead));

// TypeId allocations always have a "base address" of 0 (i.e., the relative offset is the
// hash fragment and therefore equal to the actual integer value).
if matches!(info.kind, AllocKind::TypeId) {
return interp_ok(0);
}

// Miri's address assignment leaks state across thread boundaries, which is incompatible
// with GenMC execution. So we instead let GenMC assign addresses to allocations.
if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() {
let addr = genmc_ctx.handle_alloc(&this.machine, info.size, info.align, memory_kind)?;
return interp_ok(addr);
}

// This allocation does not have a base address yet, pick or reuse one.
if !this.machine.native_lib.is_empty() {
// In native lib mode, we use the "real" address of the bytes for this allocation.
Expand All @@ -157,7 +162,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
this.get_alloc_bytes_unchecked_raw(alloc_id)?
}
}
AllocKind::Function | AllocKind::Virtual => {
AllocKind::Function | AllocKind::VTable => {
// Allocate some dummy memory to get a unique address for this function/vtable.
let alloc_bytes = MiriAllocBytes::from_bytes(
&[0u8; 1],
Expand All @@ -169,12 +174,13 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
std::mem::forget(alloc_bytes);
ptr
}
AllocKind::Dead => unreachable!(),
AllocKind::TypeId | AllocKind::Dead => unreachable!(),
};
// We don't have to expose this pointer yet, we do that in `prepare_for_native_call`.
return interp_ok(base_ptr.addr().to_u64());
}
// We are not in native lib mode, so we control the addresses ourselves.
let mut rng = this.machine.rng.borrow_mut();
if let Some((reuse_addr, clock)) = global_state.reuse.take_addr(
&mut *rng,
info.size,
Expand Down Expand Up @@ -295,21 +301,25 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
// Store address in cache.
global_state.base_addr.try_insert(alloc_id, base_addr).unwrap();

// Also maintain the opposite mapping in `int_to_ptr_map`, ensuring we keep it sorted.
// We have a fast-path for the common case that this address is bigger than all previous ones.
let pos = if global_state
.int_to_ptr_map
.last()
.is_some_and(|(last_addr, _)| *last_addr < base_addr)
{
global_state.int_to_ptr_map.len()
} else {
global_state
// Also maintain the opposite mapping in `int_to_ptr_map`, ensuring we keep it
// sorted. We have a fast-path for the common case that this address is bigger than
// all previous ones. We skip this for allocations at address 0; those can't be
// real, they must be TypeId "fake allocations".
if base_addr != 0 {
let pos = if global_state
.int_to_ptr_map
.binary_search_by_key(&base_addr, |(addr, _)| *addr)
.unwrap_err()
};
global_state.int_to_ptr_map.insert(pos, (base_addr, alloc_id));
.last()
.is_some_and(|(last_addr, _)| *last_addr < base_addr)
{
global_state.int_to_ptr_map.len()
} else {
global_state
.int_to_ptr_map
.binary_search_by_key(&base_addr, |(addr, _)| *addr)
.unwrap_err()
};
global_state.int_to_ptr_map.insert(pos, (base_addr, alloc_id));
}

interp_ok(base_addr)
}
Expand Down
4 changes: 2 additions & 2 deletions src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -650,7 +650,7 @@ trait EvalContextPrivExt<'tcx, 'ecx>: crate::MiriInterpCxExt<'tcx> {
dcx.log_protector();
}
},
AllocKind::Function | AllocKind::Virtual | AllocKind::Dead => {
AllocKind::Function | AllocKind::VTable | AllocKind::TypeId | AllocKind::Dead => {
// No stacked borrows on these allocations.
}
}
Expand Down Expand Up @@ -1021,7 +1021,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
trace!("Stacked Borrows tag {tag:?} exposed in {alloc_id:?}");
alloc_extra.borrow_tracker_sb().borrow_mut().exposed_tags.insert(tag);
}
AllocKind::Function | AllocKind::Virtual | AllocKind::Dead => {
AllocKind::Function | AllocKind::VTable | AllocKind::TypeId | AllocKind::Dead => {
// No stacked borrows on these allocations.
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -673,7 +673,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
trace!("Tree Borrows tag {tag:?} exposed in {alloc_id:?}");
alloc_extra.borrow_tracker_tb().borrow_mut().expose_tag(tag);
}
AllocKind::Function | AllocKind::Virtual | AllocKind::Dead => {
AllocKind::Function | AllocKind::VTable | AllocKind::TypeId | AllocKind::Dead => {
// No tree borrows on these allocations.
}
}
Expand Down
Loading