Skip to content

const-eval: full support for pointer fragments #144081

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions compiler/rustc_const_eval/messages.ftl
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ const_eval_const_context = {$kind ->
}

const_eval_const_heap_ptr_in_final = encountered `const_allocate` pointer in final value that was not made global
.note = use `const_make_global` to make allocated pointers immutable before returning
.note = use `const_make_global` to turn allocated pointers into immutable globals before returning

const_eval_const_make_global_ptr_already_made_global = attempting to call `const_make_global` twice on the same allocation {$alloc}

Expand Down Expand Up @@ -231,6 +231,9 @@ const_eval_mutable_borrow_escaping =

const_eval_mutable_ptr_in_final = encountered mutable pointer in final value of {const_eval_intern_kind}

const_eval_partial_pointer_in_final = encountered partial pointer in final value of {const_eval_intern_kind}
.note = while pointers can be broken apart into individual bytes during const-evaluation, only complete pointers (with all their bytes in the right order) are supported in the final value

const_eval_nested_static_in_thread_local = #[thread_local] does not support implicit nested statics, please create explicit static items and refer to them instead

const_eval_non_const_await =
Expand Down Expand Up @@ -299,10 +302,8 @@ const_eval_panic = evaluation panicked: {$msg}

const_eval_panic_non_str = argument to `panic!()` in a const context must have type `&str`

const_eval_partial_pointer_copy =
unable to copy parts of a pointer from memory at {$ptr}
const_eval_partial_pointer_overwrite =
unable to overwrite parts of a pointer in memory at {$ptr}
const_eval_partial_pointer_read =
unable to read parts of a pointer from memory at {$ptr}
const_eval_pointer_arithmetic_overflow =
overflowing pointer arithmetic: the total offset in bytes does not fit in an `isize`

Expand Down
7 changes: 7 additions & 0 deletions compiler/rustc_const_eval/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,13 @@ fn eval_body_using_ecx<'tcx, R: InterpretationResult<'tcx>>(
ecx.tcx.dcx().emit_err(errors::ConstHeapPtrInFinal { span: ecx.tcx.span }),
)));
}
Err(InternError::PartialPointer) => {
throw_inval!(AlreadyReported(ReportedErrorInfo::non_const_eval_error(
ecx.tcx
.dcx()
.emit_err(errors::PartialPtrInFinal { span: ecx.tcx.span, kind: intern_kind }),
)));
}
}

interp_ok(R::make_result(ret, ecx))
Expand Down
16 changes: 12 additions & 4 deletions compiler/rustc_const_eval/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,15 @@ pub(crate) struct ConstHeapPtrInFinal {
pub span: Span,
}

#[derive(Diagnostic)]
#[diag(const_eval_partial_pointer_in_final)]
#[note]
pub(crate) struct PartialPtrInFinal {
#[primary_span]
pub span: Span,
pub kind: InternKind,
}

#[derive(Diagnostic)]
#[diag(const_eval_unstable_in_stable_exposed)]
pub(crate) struct UnstableInStableExposed {
Expand Down Expand Up @@ -832,8 +841,7 @@ impl ReportErrorExt for UnsupportedOpInfo {
UnsupportedOpInfo::Unsupported(s) => s.clone().into(),
UnsupportedOpInfo::ExternTypeField => const_eval_extern_type_field,
UnsupportedOpInfo::UnsizedLocal => const_eval_unsized_local,
UnsupportedOpInfo::OverwritePartialPointer(_) => const_eval_partial_pointer_overwrite,
UnsupportedOpInfo::ReadPartialPointer(_) => const_eval_partial_pointer_copy,
UnsupportedOpInfo::ReadPartialPointer(_) => const_eval_partial_pointer_read,
UnsupportedOpInfo::ReadPointerAsInt(_) => const_eval_read_pointer_as_int,
UnsupportedOpInfo::ThreadLocalStatic(_) => const_eval_thread_local_static,
UnsupportedOpInfo::ExternStatic(_) => const_eval_extern_static,
Expand All @@ -844,7 +852,7 @@ impl ReportErrorExt for UnsupportedOpInfo {
use UnsupportedOpInfo::*;

use crate::fluent_generated::*;
if let ReadPointerAsInt(_) | OverwritePartialPointer(_) | ReadPartialPointer(_) = self {
if let ReadPointerAsInt(_) | ReadPartialPointer(_) = self {
diag.help(const_eval_ptr_as_bytes_1);
diag.help(const_eval_ptr_as_bytes_2);
}
Expand All @@ -856,7 +864,7 @@ impl ReportErrorExt for UnsupportedOpInfo {
| UnsupportedOpInfo::ExternTypeField
| Unsupported(_)
| ReadPointerAsInt(_) => {}
OverwritePartialPointer(ptr) | ReadPartialPointer(ptr) => {
ReadPartialPointer(ptr) => {
diag.arg("ptr", ptr);
}
ThreadLocalStatic(did) | ExternStatic(did) => rustc_middle::ty::tls::with(|tcx| {
Expand Down
28 changes: 14 additions & 14 deletions compiler/rustc_const_eval/src/interpret/intern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,11 +79,18 @@ fn intern_shallow<'tcx, M: CompileTimeMachine<'tcx>>(
// `const_make_global`. We want to error here, but we have to first put the
// allocation back into the `alloc_map` to keep things in a consistent state.
ecx.memory.alloc_map.insert(alloc_id, (kind, alloc));
ecx.tcx.dcx().delayed_bug("non-global heap allocation in const value");
return Err(InternError::ConstAllocNotGlobal);
}
}
MemoryKind::Stack | MemoryKind::CallerLocation => {}
}
if !alloc.provenance_merge_bytes(&ecx.tcx) {
// Per-byte provenance is not supported by backends, so we cannot accept it here.
ecx.memory.alloc_map.insert(alloc_id, (kind, alloc));
ecx.tcx.dcx().delayed_bug("partial pointer in const value");
return Err(InternError::PartialPointer);
}

// Set allocation mutability as appropriate. This is used by LLVM to put things into
// read-only memory, and also by Miri when evaluating other globals that
Expand Down Expand Up @@ -166,6 +173,7 @@ pub enum InternError {
BadMutablePointer,
DanglingPointer,
ConstAllocNotGlobal,
PartialPointer,
}

/// Intern `ret` and everything it references.
Expand Down Expand Up @@ -225,17 +233,14 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx>>(
alloc.1.mutability = base_mutability;
alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
} else {
intern_shallow(ecx, base_alloc_id, base_mutability, Some(&mut disambiguator))
.unwrap()
.collect()
intern_shallow(ecx, base_alloc_id, base_mutability, Some(&mut disambiguator))?.collect()
};
// We need to distinguish "has just been interned" from "was already in `tcx`",
// so we track this in a separate set.
let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
// Whether we encountered a bad mutable pointer.
// We want to first report "dangling" and then "mutable", so we need to delay reporting these
// errors.
let mut result = Ok(());
let mut found_bad_mutable_ptr = false;

// Keep interning as long as there are things to intern.
Expand Down Expand Up @@ -310,28 +315,23 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx>>(
// okay with losing some potential for immutability here. This can anyway only affect
// `static mut`.
just_interned.insert(alloc_id);
match intern_shallow(ecx, alloc_id, inner_mutability, Some(&mut disambiguator)) {
Ok(nested) => todo.extend(nested),
Err(err) => {
ecx.tcx.dcx().delayed_bug("error during const interning");
result = Err(err);
}
}
let next = intern_shallow(ecx, alloc_id, inner_mutability, Some(&mut disambiguator))?;
todo.extend(next);
}
if found_bad_mutable_ptr && result.is_ok() {
if found_bad_mutable_ptr {
// We found a mutable pointer inside a const where inner allocations should be immutable,
// and there was no other error. This should usually never happen! However, this can happen
// in unleash-miri mode, so report it as a normal error then.
if ecx.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
result = Err(InternError::BadMutablePointer);
return Err(InternError::BadMutablePointer);
} else {
span_bug!(
ecx.tcx.span,
"the static const safety checks accepted a mutable pointer they should not have accepted"
);
}
}
result
Ok(())
}

/// Intern `ret`. This function assumes that `ret` references no other allocation.
Expand Down
44 changes: 13 additions & 31 deletions compiler/rustc_const_eval/src/interpret/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1309,29 +1309,20 @@ impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>
}

/// Mark the given sub-range (relative to this allocation reference) as uninitialized.
pub fn write_uninit(&mut self, range: AllocRange) -> InterpResult<'tcx> {
pub fn write_uninit(&mut self, range: AllocRange) {
let range = self.range.subrange(range);

self.alloc
.write_uninit(&self.tcx, range)
.map_err(|e| e.to_interp_error(self.alloc_id))
.into()
self.alloc.write_uninit(&self.tcx, range);
}

/// Mark the entire referenced range as uninitialized
pub fn write_uninit_full(&mut self) -> InterpResult<'tcx> {
self.alloc
.write_uninit(&self.tcx, self.range)
.map_err(|e| e.to_interp_error(self.alloc_id))
.into()
pub fn write_uninit_full(&mut self) {
self.alloc.write_uninit(&self.tcx, self.range);
}

/// Remove all provenance in the reference range.
pub fn clear_provenance(&mut self) -> InterpResult<'tcx> {
self.alloc
.clear_provenance(&self.tcx, self.range)
.map_err(|e| e.to_interp_error(self.alloc_id))
.into()
pub fn clear_provenance(&mut self) {
self.alloc.clear_provenance(&self.tcx, self.range);
}
}

Expand Down Expand Up @@ -1422,11 +1413,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {

// Side-step AllocRef and directly access the underlying bytes more efficiently.
// (We are staying inside the bounds here and all bytes do get overwritten so all is good.)
let alloc_id = alloc_ref.alloc_id;
let bytes = alloc_ref
.alloc
.get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range)
.map_err(move |e| e.to_interp_error(alloc_id))?;
let bytes =
alloc_ref.alloc.get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range);
// `zip` would stop when the first iterator ends; we want to definitely
// cover all of `bytes`.
for dest in bytes {
Expand Down Expand Up @@ -1508,10 +1496,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
// `get_bytes_mut` will clear the provenance, which is correct,
// since we don't want to keep any provenance at the target.
// This will also error if copying partial provenance is not supported.
let provenance = src_alloc
.provenance()
.prepare_copy(src_range, dest_offset, num_copies, self)
.map_err(|e| e.to_interp_error(src_alloc_id))?;
let provenance =
src_alloc.provenance().prepare_copy(src_range, dest_offset, num_copies, self);
// Prepare a copy of the initialization mask.
let init = src_alloc.init_mask().prepare_copy(src_range);

Expand All @@ -1529,10 +1515,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
dest_range,
)?;
// Yes we do overwrite all bytes in `dest_bytes`.
let dest_bytes = dest_alloc
.get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range)
.map_err(|e| e.to_interp_error(dest_alloc_id))?
.as_mut_ptr();
let dest_bytes =
dest_alloc.get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range).as_mut_ptr();

if init.no_bytes_init() {
// Fast path: If all bytes are `uninit` then there is nothing to copy. The target range
Expand All @@ -1541,9 +1525,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
// This also avoids writing to the target bytes so that the backing allocation is never
// touched if the bytes stay uninitialized for the whole interpreter execution. On contemporary
// operating system this can avoid physically allocating the page.
dest_alloc
.write_uninit(&tcx, dest_range)
.map_err(|e| e.to_interp_error(dest_alloc_id))?;
dest_alloc.write_uninit(&tcx, dest_range);
// `write_uninit` also resets the provenance, so we are done.
return interp_ok(());
}
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_const_eval/src/interpret/place.rs
Original file line number Diff line number Diff line change
Expand Up @@ -700,7 +700,7 @@ where

match value {
Immediate::Scalar(scalar) => {
alloc.write_scalar(alloc_range(Size::ZERO, scalar.size()), scalar)
alloc.write_scalar(alloc_range(Size::ZERO, scalar.size()), scalar)?;
}
Immediate::ScalarPair(a_val, b_val) => {
let BackendRepr::ScalarPair(a, b) = layout.backend_repr else {
Expand All @@ -720,10 +720,10 @@ where
alloc.write_scalar(alloc_range(Size::ZERO, a_val.size()), a_val)?;
alloc.write_scalar(alloc_range(b_offset, b_val.size()), b_val)?;
// We don't have to reset padding here, `write_immediate` will anyway do a validation run.
interp_ok(())
}
Immediate::Uninit => alloc.write_uninit_full(),
}
interp_ok(())
}

pub fn write_uninit(
Expand All @@ -743,7 +743,7 @@ where
// Zero-sized access
return interp_ok(());
};
alloc.write_uninit_full()?;
alloc.write_uninit_full();
}
}
interp_ok(())
Expand All @@ -767,7 +767,7 @@ where
// Zero-sized access
return interp_ok(());
};
alloc.clear_provenance()?;
alloc.clear_provenance();
}
}
interp_ok(())
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_const_eval/src/interpret/validity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -949,7 +949,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
let padding_size = offset - padding_cleared_until;
let range = alloc_range(padding_start, padding_size);
trace!("reset_padding on {}: resetting padding range {range:?}", mplace.layout.ty);
alloc.write_uninit(range)?;
alloc.write_uninit(range);
}
padding_cleared_until = offset + size;
}
Expand Down Expand Up @@ -1239,7 +1239,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt,
if self.reset_provenance_and_padding {
// We can't share this with above as above, we might be looking at read-only memory.
let mut alloc = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)?.expect("we already excluded size 0");
alloc.clear_provenance()?;
alloc.clear_provenance();
// Also, mark this as containing data, not padding.
self.add_data_range(mplace.ptr(), size);
}
Expand Down
Loading
Loading