Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 9 pull requests #120711

Closed
wants to merge 29 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
7a2b663
interning doesn't check alignment anymroe, because it doesn't do any …
oli-obk Jan 24, 2024
b6d0225
prefer instrumentation over entry/exit tracing statements
oli-obk Jan 24, 2024
a73c448
Prefer external iteration now that we don't actually recurse anymore
oli-obk Jan 24, 2024
a57a00e
separately intern the outermost alloc from the rest
oli-obk Jan 24, 2024
5d46b98
Document base vs nested alloc interning
oli-obk Jan 25, 2024
0f55e1b
Simplify `impl_zeroable_primitive` macro.
reitermarkus Jan 31, 2024
a5042de
Make `NonZero` constructors generic.
reitermarkus Jan 22, 2024
3cc601a
Switch OwnedStore handle count to AtomicU32
GnomedDev Jan 31, 2024
2e212b7
coverage: Split out counter increment sites from BCB node/edge counters
Zalathar Jan 25, 2024
f5d6eb3
hir: Stop keeping prefixes for most of `use` list stems
petrochenkov Jan 30, 2024
285d8c2
Suggest `[tail @ ..]` on `[..tail]` and `[...tail]` where `tail` is u…
fmease Feb 3, 2024
e8c3cbf
Make `Diagnostic::is_error` return false for `Level::FailureNote`.
nnethercote Jan 30, 2024
5dd0431
Tighten the assertion in `downgrade_to_delayed_bug`.
nnethercote Jan 31, 2024
c367386
Refactor `emit_diagnostic`.
nnethercote Jan 31, 2024
59e0bc2
Split `Level::DelayedBug` in two.
nnethercote Jan 31, 2024
d9508a1
Make `Emitter::emit_diagnostic` consuming.
nnethercote Feb 2, 2024
c94769a
Clarify order of operations during interning
oli-obk Feb 5, 2024
411967c
Zip together `place_ty` and `place_validity`
Nadrieril Jan 31, 2024
6cac1c4
Track `is_top_level` via `PlaceInfo`
Nadrieril Jan 31, 2024
aa3f1a2
Update test output.
reitermarkus Feb 6, 2024
ee000a0
Rollup merge of #120302 - oli-obk:const_intern_cleanups, r=RalfJung
matthiaskrgr Feb 6, 2024
4ee56d7
Rollup merge of #120520 - nnethercote:rename-good-path, r=oli-obk
matthiaskrgr Feb 6, 2024
3ad89af
Rollup merge of #120521 - reitermarkus:generic-nonzero-constructors, …
matthiaskrgr Feb 6, 2024
04d155f
Rollup merge of #120527 - GnomedDev:atomicu32-handle, r=petrochenkov
matthiaskrgr Feb 6, 2024
99baa1d
Rollup merge of #120564 - Zalathar:increment-site, r=oli-obk
matthiaskrgr Feb 6, 2024
a55fdcc
Rollup merge of #120575 - nnethercote:simplify-codegen-diag-handling,…
matthiaskrgr Feb 6, 2024
fae1071
Rollup merge of #120597 - fmease:sugg-on-js-style-spread-op-in-pat, r…
matthiaskrgr Feb 6, 2024
faad0e1
Rollup merge of #120609 - petrochenkov:nousestem2, r=compiler-errors
matthiaskrgr Feb 6, 2024
00794ce
Rollup merge of #120633 - Nadrieril:place_info, r=compiler-errors
matthiaskrgr Feb 6, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 13 additions & 6 deletions compiler/rustc_ast_lowering/src/item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -498,8 +498,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
}

let res =
self.expect_full_res_from_use(id).map(|res| self.lower_res(res)).collect();
let res = self.lower_import_res(id, path.span);
let path = self.lower_use_path(res, &path, ParamMode::Explicit);
hir::ItemKind::Use(path, hir::UseKind::Single)
}
Expand Down Expand Up @@ -535,7 +534,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
// for that we return the `{}` import (called the
// `ListStem`).

let prefix = Path { segments, span: prefix.span.to(path.span), tokens: None };
let span = prefix.span.to(path.span);
let prefix = Path { segments, span, tokens: None };

// Add all the nested `PathListItem`s to the HIR.
for &(ref use_tree, id) in trees {
Expand Down Expand Up @@ -569,9 +569,16 @@ impl<'hir> LoweringContext<'_, 'hir> {
});
}

let res =
self.expect_full_res_from_use(id).map(|res| self.lower_res(res)).collect();
let path = self.lower_use_path(res, &prefix, ParamMode::Explicit);
let path = if trees.is_empty() && !prefix.segments.is_empty() {
// For empty lists we need to lower the prefix so it is checked for things
// like stability later.
let res = self.lower_import_res(id, span);
self.lower_use_path(res, &prefix, ParamMode::Explicit)
} else {
// For non-empty lists we can just drop all the data, the prefix is already
// present in HIR as a part of nested imports.
self.arena.alloc(hir::UsePath { res: smallvec![], segments: &[], span })
};
hir::ItemKind::Use(path, hir::UseKind::ListStem)
}
}
Expand Down
12 changes: 9 additions & 3 deletions compiler/rustc_ast_lowering/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ use rustc_middle::ty::{ResolverAstLowering, TyCtxt};
use rustc_session::parse::{add_feature_diagnostics, feature_err};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{DesugaringKind, Span, DUMMY_SP};
use smallvec::SmallVec;
use smallvec::{smallvec, SmallVec};
use std::collections::hash_map::Entry;
use thin_vec::ThinVec;

Expand Down Expand Up @@ -750,8 +750,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
self.resolver.get_partial_res(id).map_or(Res::Err, |pr| pr.expect_full_res())
}

fn expect_full_res_from_use(&mut self, id: NodeId) -> impl Iterator<Item = Res<NodeId>> {
self.resolver.get_import_res(id).present_items()
fn lower_import_res(&mut self, id: NodeId, span: Span) -> SmallVec<[Res; 3]> {
let res = self.resolver.get_import_res(id).present_items();
let res: SmallVec<_> = res.map(|res| self.lower_res(res)).collect();
if res.is_empty() {
self.dcx().span_delayed_bug(span, "no resolution for an import");
return smallvec![Res::Err];
}
res
}

fn make_lang_item_qpath(&mut self, lang_item: hir::LangItem, span: Span) -> hir::QPath<'hir> {
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_ast_lowering/src/path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
p: &Path,
param_mode: ParamMode,
) -> &'hir hir::UsePath<'hir> {
assert!((1..=3).contains(&res.len()));
self.arena.alloc(hir::UsePath {
res,
segments: self.arena.alloc_from_iter(p.segments.iter().map(|segment| {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_ssa/src/back/write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1810,7 +1810,7 @@ impl Translate for SharedEmitter {
}

impl Emitter for SharedEmitter {
fn emit_diagnostic(&mut self, diag: &rustc_errors::Diagnostic) {
fn emit_diagnostic(&mut self, diag: rustc_errors::Diagnostic) {
let args: FxHashMap<DiagnosticArgName, DiagnosticArgValue> =
diag.args().map(|(name, arg)| (name.clone(), arg.clone())).collect();
drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic {
Expand Down
8 changes: 2 additions & 6 deletions compiler/rustc_const_eval/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use std::mem;

use either::{Left, Right};

use rustc_hir::def::DefKind;
Expand All @@ -24,12 +22,13 @@ use crate::interpret::{
};

// Returns a pointer to where the result lives
#[instrument(level = "trace", skip(ecx, body), ret)]
fn eval_body_using_ecx<'mir, 'tcx>(
ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
cid: GlobalId<'tcx>,
body: &'mir mir::Body<'tcx>,
) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
debug!("eval_body_using_ecx: {:?}, {:?}", cid, ecx.param_env);
trace!(?ecx.param_env);
let tcx = *ecx.tcx;
assert!(
cid.promoted.is_some()
Expand Down Expand Up @@ -75,11 +74,8 @@ fn eval_body_using_ecx<'mir, 'tcx>(
None => InternKind::Constant,
}
};
let check_alignment = mem::replace(&mut ecx.machine.check_alignment, CheckAlignment::No); // interning doesn't need to respect alignment
intern_const_alloc_recursive(ecx, intern_kind, &ret)?;
ecx.machine.check_alignment = check_alignment;

debug!("eval_body_using_ecx done: {:?}", ret);
Ok(ret)
}

Expand Down
116 changes: 59 additions & 57 deletions compiler/rustc_const_eval/src/interpret/intern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,12 @@ pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be*
/// already mutable (as a sanity check).
///
/// `recursive_alloc` is called for all recursively encountered allocations.
/// Returns an iterator over all relocations referred to by this allocation.
fn intern_shallow<'rt, 'mir, 'tcx, T, M: CompileTimeMachine<'mir, 'tcx, T>>(
ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
alloc_id: AllocId,
mutability: Mutability,
mut recursive_alloc: impl FnMut(&InterpCx<'mir, 'tcx, M>, CtfeProvenance),
) -> Result<(), ()> {
) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, ()> {
trace!("intern_shallow {:?}", alloc_id);
// remove allocation
let Some((_kind, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else {
Expand All @@ -65,14 +64,10 @@ fn intern_shallow<'rt, 'mir, 'tcx, T, M: CompileTimeMachine<'mir, 'tcx, T>>(
assert_eq!(alloc.mutability, Mutability::Mut);
}
}
// record child allocations
for &(_, prov) in alloc.provenance().ptrs().iter() {
recursive_alloc(ecx, prov);
}
// link the alloc id to the actual allocation
let alloc = ecx.tcx.mk_const_alloc(alloc);
ecx.tcx.set_alloc_id_memory(alloc_id, alloc);
Ok(())
Ok(alloc.0.0.provenance().ptrs().iter().map(|&(_, prov)| prov))
}

/// How a constant value should be interned.
Expand Down Expand Up @@ -128,12 +123,16 @@ pub fn intern_const_alloc_recursive<
}
};

// Initialize recursive interning.
// Intern the base allocation, and initialize todo list for recursive interning.
let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
let mut todo = vec![(base_alloc_id, base_mutability)];
// First we intern the base allocation, as it requires a different mutability.
// This gives us the initial set of nested allocations, which will then all be processed
// recursively in the loop below.
let mut todo: Vec<_> =
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect();
// We need to distinguish "has just been interned" from "was already in `tcx`",
// so we track this in a separate set.
let mut just_interned = FxHashSet::default();
let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
// Whether we encountered a bad mutable pointer.
// We want to first report "dangling" and then "mutable", so we need to delay reporting these
// errors.
Expand All @@ -147,52 +146,56 @@ pub fn intern_const_alloc_recursive<
// raw pointers, so we cannot rely on validation to catch them -- and since interning runs
// before validation, and interning doesn't know the type of anything, this means we can't show
// better errors. Maybe we should consider doing validation before interning in the future.
while let Some((alloc_id, mutability)) = todo.pop() {
while let Some(prov) = todo.pop() {
let alloc_id = prov.alloc_id();
// Crucially, we check this *before* checking whether the `alloc_id`
// has already been interned. The point of this check is to ensure that when
// there are multiple pointers to the same allocation, they are *all* immutable.
// Therefore it would be bad if we only checked the first pointer to any given
// allocation.
// (It is likely not possible to actually have multiple pointers to the same allocation,
// so alternatively we could also check that and ICE if there are multiple such pointers.)
if intern_kind != InternKind::Promoted
&& inner_mutability == Mutability::Not
&& !prov.immutable()
{
if ecx.tcx.try_get_global_alloc(alloc_id).is_some()
&& !just_interned.contains(&alloc_id)
{
// This is a pointer to some memory from another constant. We encounter mutable
// pointers to such memory since we do not always track immutability through
// these "global" pointers. Allowing them is harmless; the point of these checks
// during interning is to justify why we intern the *new* allocations immutably,
// so we can completely ignore existing allocations. We also don't need to add
// this to the todo list, since after all it is already interned.
continue;
}
// Found a mutable pointer inside a const where inner allocations should be
// immutable. We exclude promoteds from this, since things like `&mut []` and
// `&None::<Cell<i32>>` lead to promotion that can produce mutable pointers. We rely
// on the promotion analysis not screwing up to ensure that it is sound to intern
// promoteds as immutable.
found_bad_mutable_pointer = true;
}
if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
// Already interned.
debug_assert!(!ecx.memory.alloc_map.contains_key(&alloc_id));
continue;
}
just_interned.insert(alloc_id);
intern_shallow(ecx, alloc_id, mutability, |ecx, prov| {
let alloc_id = prov.alloc_id();
if intern_kind != InternKind::Promoted
&& inner_mutability == Mutability::Not
&& !prov.immutable()
{
if ecx.tcx.try_get_global_alloc(alloc_id).is_some()
&& !just_interned.contains(&alloc_id)
{
// This is a pointer to some memory from another constant. We encounter mutable
// pointers to such memory since we do not always track immutability through
// these "global" pointers. Allowing them is harmless; the point of these checks
// during interning is to justify why we intern the *new* allocations immutably,
// so we can completely ignore existing allocations. We also don't need to add
// this to the todo list, since after all it is already interned.
return;
}
// Found a mutable pointer inside a const where inner allocations should be
// immutable. We exclude promoteds from this, since things like `&mut []` and
// `&None::<Cell<i32>>` lead to promotion that can produce mutable pointers. We rely
// on the promotion analysis not screwing up to ensure that it is sound to intern
// promoteds as immutable.
found_bad_mutable_pointer = true;
}
// We always intern with `inner_mutability`, and furthermore we ensured above that if
// that is "immutable", then there are *no* mutable pointers anywhere in the newly
// interned memory -- justifying that we can indeed intern immutably. However this also
// means we can *not* easily intern immutably here if `prov.immutable()` is true and
// `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
// we'd have to somehow check that they are *all* immutable before deciding that this
// allocation can be made immutable. In the future we could consider analyzing all
// pointers before deciding which allocations can be made immutable; but for now we are
// okay with losing some potential for immutability here. This can anyway only affect
// `static mut`.
todo.push((alloc_id, inner_mutability));
})
.map_err(|()| {
// We always intern with `inner_mutability`, and furthermore we ensured above that if
// that is "immutable", then there are *no* mutable pointers anywhere in the newly
// interned memory -- justifying that we can indeed intern immutably. However this also
// means we can *not* easily intern immutably here if `prov.immutable()` is true and
// `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
// we'd have to somehow check that they are *all* immutable before deciding that this
// allocation can be made immutable. In the future we could consider analyzing all
// pointers before deciding which allocations can be made immutable; but for now we are
// okay with losing some potential for immutability here. This can anyway only affect
// `static mut`.
todo.extend(intern_shallow(ecx, alloc_id, inner_mutability).map_err(|()| {
ecx.tcx.dcx().emit_err(DanglingPtrInFinal { span: ecx.tcx.span, kind: intern_kind })
})?;
})?);
}
if found_bad_mutable_pointer {
return Err(ecx
Expand Down Expand Up @@ -220,13 +223,13 @@ pub fn intern_const_alloc_for_constprop<
return Ok(());
}
// Move allocation to `tcx`.
intern_shallow(ecx, alloc_id, Mutability::Not, |_ecx, _| {
for _ in intern_shallow(ecx, alloc_id, Mutability::Not).map_err(|()| err_ub!(DeadLocal))? {
// We are not doing recursive interning, so we don't currently support provenance.
// (If this assertion ever triggers, we should just implement a
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
panic!("`intern_const_alloc_for_constprop` called on allocation with nested provenance")
})
.map_err(|()| err_ub!(DeadLocal).into())
}
Ok(())
}

impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
Expand All @@ -247,15 +250,14 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
let dest = self.allocate(layout, MemoryKind::Stack)?;
f(self, &dest.clone().into())?;
let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
intern_shallow(self, alloc_id, Mutability::Not, |ecx, prov| {
for prov in intern_shallow(self, alloc_id, Mutability::Not).unwrap() {
// We are not doing recursive interning, so we don't currently support provenance.
// (If this assertion ever triggers, we should just implement a
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
if !ecx.tcx.try_get_global_alloc(prov.alloc_id()).is_some() {
if !self.tcx.try_get_global_alloc(prov.alloc_id()).is_some() {
panic!("`intern_with_temp_alloc` with nested allocations");
}
})
.unwrap();
}
Ok(alloc_id)
}
}
2 changes: 1 addition & 1 deletion compiler/rustc_error_messages/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ impl From<Cow<'static, str>> for DiagnosticMessage {
}
}

/// A workaround for "good path" ICEs when formatting types in disabled lints.
/// A workaround for good_path_delayed_bug ICEs when formatting types in disabled lints.
///
/// Delays formatting until `.into(): DiagnosticMessage` is used.
pub struct DelayDm<F>(pub F);
Expand Down
22 changes: 13 additions & 9 deletions compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,15 +44,15 @@ impl Translate for AnnotateSnippetEmitter {

impl Emitter for AnnotateSnippetEmitter {
/// The entry point for the diagnostics generation
fn emit_diagnostic(&mut self, diag: &Diagnostic) {
fn emit_diagnostic(&mut self, mut diag: Diagnostic) {
let fluent_args = to_fluent_args(diag.args());

let mut children = diag.children.clone();
let (mut primary_span, suggestions) = self.primary_span_formatted(diag, &fluent_args);
let mut suggestions = diag.suggestions.unwrap_or(vec![]);
self.primary_span_formatted(&mut diag.span, &mut suggestions, &fluent_args);

self.fix_multispans_in_extern_macros_and_render_macro_backtrace(
&mut primary_span,
&mut children,
&mut diag.span,
&mut diag.children,
&diag.level,
self.macro_backtrace,
);
Expand All @@ -62,9 +62,9 @@ impl Emitter for AnnotateSnippetEmitter {
&diag.messages,
&fluent_args,
&diag.code,
&primary_span,
&children,
suggestions,
&diag.span,
&diag.children,
&suggestions,
);
}

Expand All @@ -85,7 +85,11 @@ fn source_string(file: Lrc<SourceFile>, line: &Line) -> String {
/// Maps `Diagnostic::Level` to `snippet::AnnotationType`
fn annotation_type_for_level(level: Level) -> AnnotationType {
match level {
Level::Bug | Level::DelayedBug(_) | Level::Fatal | Level::Error => AnnotationType::Error,
Level::Bug
| Level::Fatal
| Level::Error
| Level::DelayedBug
| Level::GoodPathDelayedBug => AnnotationType::Error,
Level::ForceWarning(_) | Level::Warning => AnnotationType::Warning,
Level::Note | Level::OnceNote => AnnotationType::Note,
Level::Help | Level::OnceHelp => AnnotationType::Help,
Expand Down
Loading
Loading