diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 540e1eb157e2f..caf97abf78d9e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -361,8 +361,8 @@ jobs: os: macos-13 - name: dist-aarch64-apple env: - SCRIPT: "./x.py dist bootstrap --include-default-paths --host=aarch64-apple-darwin --target=aarch64-apple-darwin" - RUST_CONFIGURE_ARGS: "--enable-full-tools --enable-sanitizers --enable-profiler --set rust.jemalloc --set llvm.ninja=false --set rust.lto=thin" + SCRIPT: "./x.py dist bootstrap --include-default-paths --stage 2" + RUST_CONFIGURE_ARGS: "--build=x86_64-apple-darwin --host=aarch64-apple-darwin --target=aarch64-apple-darwin --enable-full-tools --enable-sanitizers --enable-profiler --disable-docs --set rust.jemalloc --set llvm.ninja=false" RUSTC_RETRY_LINKER_ON_SEGFAULT: 1 SELECT_XCODE: /Applications/Xcode_13.4.1.app USE_XCODE_CLANG: 1 @@ -372,20 +372,8 @@ jobs: NO_DEBUG_ASSERTIONS: 1 NO_OVERFLOW_CHECKS: 1 DIST_REQUIRE_ALL_TOOLS: 1 - os: macos-13-xlarge - - name: aarch64-apple - env: - SCRIPT: "./x.py --stage 2 test --host=aarch64-apple-darwin --target=aarch64-apple-darwin" - RUST_CONFIGURE_ARGS: "--enable-sanitizers --enable-profiler --set rust.jemalloc --set llvm.ninja=false" - RUSTC_RETRY_LINKER_ON_SEGFAULT: 1 - SELECT_XCODE: /Applications/Xcode_13.4.1.app - USE_XCODE_CLANG: 1 - MACOSX_DEPLOYMENT_TARGET: 11.0 - MACOSX_STD_DEPLOYMENT_TARGET: 11.0 - NO_LLVM_ASSERTIONS: 1 - NO_DEBUG_ASSERTIONS: 1 - NO_OVERFLOW_CHECKS: 1 - os: macos-13-xlarge + JEMALLOC_SYS_WITH_LG_PAGE: 14 + os: macos-13 - name: x86_64-msvc env: RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --enable-profiler" diff --git a/Cargo.lock b/Cargo.lock index 5d78e29de0e08..b8192e333fe91 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -285,9 +285,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" [[package]] name = "block-buffer" @@ -537,7 +537,7 @@ checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" [[package]] name = "clippy" -version = "0.1.76" +version = "0.1.77" dependencies = [ "anstream", "clippy_config", @@ -565,7 +565,7 @@ dependencies = [ [[package]] name = "clippy_config" -version = "0.1.76" +version = "0.1.77" dependencies = [ "rustc-semver", "serde", @@ -588,7 +588,7 @@ dependencies = [ [[package]] name = "clippy_lints" -version = "0.1.76" +version = "0.1.77" dependencies = [ "arrayvec", "cargo_metadata 0.15.4", @@ -613,7 +613,7 @@ dependencies = [ [[package]] name = "clippy_utils" -version = "0.1.76" +version = "0.1.77" dependencies = [ "arrayvec", "clippy_config", @@ -984,7 +984,7 @@ checksum = "a0afaad2b26fa326569eb264b1363e8ae3357618c43982b3f285f0774ce76b69" [[package]] name = "declare_clippy_lint" -version = "0.1.76" +version = "0.1.77" dependencies = [ "itertools", "quote", @@ -3370,7 +3370,7 @@ dependencies = [ name = "rustc_abi" version = "0.0.0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "rand", "rand_xoshiro", "rustc_data_structures", @@ -3401,7 +3401,7 @@ dependencies = [ name = "rustc_ast" version = "0.0.0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "memchr", "rustc_data_structures", "rustc_index", @@ -3552,7 +3552,7 @@ dependencies = [ name = "rustc_codegen_llvm" version = "0.0.0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "itertools", "libc", "measureme", @@ -3587,7 +3587,7 @@ name = "rustc_codegen_ssa" version = "0.0.0" dependencies = [ "ar_archive_writer", - "bitflags 1.3.2", + "bitflags 2.4.1", "cc", "itertools", "jobserver", @@ -3654,7 +3654,7 @@ name = "rustc_data_structures" version = "0.0.0" dependencies = [ "arrayvec", - "bitflags 1.3.2", + "bitflags 2.4.1", "elsa", "ena", "indexmap", @@ -4121,7 +4121,7 @@ dependencies = [ name = "rustc_metadata" version = "0.0.0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "libloading 0.7.4", "odht", "rustc_ast", @@ -4151,7 +4151,7 @@ dependencies = [ name = "rustc_middle" version = "0.0.0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "derive_more", "either", "field-offset", @@ -4286,7 +4286,7 @@ dependencies = [ name = "rustc_parse" version = "0.0.0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "rustc_ast", "rustc_ast_pretty", "rustc_data_structures", @@ -4424,7 +4424,7 @@ dependencies = [ name = "rustc_resolve" version = "0.0.0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "pulldown-cmark", "rustc_arena", "rustc_ast", @@ -4463,7 +4463,7 @@ dependencies = [ name = "rustc_session" version = "0.0.0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "getopts", "libc", "rustc_ast", @@ -4521,7 +4521,7 @@ dependencies = [ name = "rustc_symbol_mangling" version = "0.0.0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "punycode", "rustc-demangle", "rustc_data_structures", @@ -4539,7 +4539,7 @@ dependencies = [ name = "rustc_target" version = "0.0.0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "object", "rustc_abi", "rustc_data_structures", @@ -4563,6 +4563,7 @@ checksum = "8ba09476327c4b70ccefb6180f046ef588c26a24cf5d269a9feba316eb4f029f" name = "rustc_trait_selection" version = "0.0.0" dependencies = [ + "itertools", "rustc_ast", "rustc_attr", "rustc_data_structures", @@ -4637,7 +4638,7 @@ dependencies = [ name = "rustc_type_ir" version = "0.0.0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "derivative", "rustc_data_structures", "rustc_index", @@ -4767,7 +4768,7 @@ version = "0.38.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "errno", "libc", "linux-raw-sys", @@ -5205,9 +5206,9 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.29.2" +version = "0.29.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9557d0845b86eea8182f7b10dff120214fb6cd9fd937b6f4917714e546a38695" +checksum = "cd727fc423c2060f6c92d9534cef765c65a6ed3f428a03d7def74a8c4348e666" dependencies = [ "cfg-if", "core-foundation-sys", diff --git a/compiler/rustc_abi/Cargo.toml b/compiler/rustc_abi/Cargo.toml index e549724b1c0a5..5031e7a6705f0 100644 --- a/compiler/rustc_abi/Cargo.toml +++ b/compiler/rustc_abi/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -bitflags = "1.2.1" +bitflags = "2.4.1" rand = { version = "0.8.4", default-features = false, optional = true } rand_xoshiro = { version = "0.6.0", optional = true } rustc_data_structures = { path = "../rustc_data_structures", optional = true } diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs index eb42803f93e4e..549927d58987c 100644 --- a/compiler/rustc_abi/src/lib.rs +++ b/compiler/rustc_abi/src/lib.rs @@ -29,10 +29,12 @@ pub use layout::LayoutCalculator; /// instead of implementing everything in `rustc_middle`. pub trait HashStableContext {} +#[derive(Clone, Copy, PartialEq, Eq, Default)] +#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))] +pub struct ReprFlags(u8); + bitflags! { - #[derive(Default)] - #[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))] - pub struct ReprFlags: u8 { + impl ReprFlags: u8 { const IS_C = 1 << 0; const IS_SIMD = 1 << 1; const IS_TRANSPARENT = 1 << 2; @@ -42,11 +44,12 @@ bitflags! { // the seed stored in `ReprOptions.layout_seed` const RANDOMIZE_LAYOUT = 1 << 4; // Any of these flags being set prevent field reordering optimisation. - const IS_UNOPTIMISABLE = ReprFlags::IS_C.bits - | ReprFlags::IS_SIMD.bits - | ReprFlags::IS_LINEAR.bits; + const IS_UNOPTIMISABLE = ReprFlags::IS_C.bits() + | ReprFlags::IS_SIMD.bits() + | ReprFlags::IS_LINEAR.bits(); } } +rustc_data_structures::external_bitflags_debug! { ReprFlags } #[derive(Copy, Clone, Debug, Eq, PartialEq)] #[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))] diff --git a/compiler/rustc_ast/Cargo.toml b/compiler/rustc_ast/Cargo.toml index 59e3d85589a2e..937ee4bcd4266 100644 --- a/compiler/rustc_ast/Cargo.toml +++ b/compiler/rustc_ast/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -bitflags = "1.2.1" +bitflags = "2.4.1" memchr = "2.5.0" rustc_data_structures = { path = "../rustc_data_structures" } rustc_index = { path = "../rustc_index" } diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index 3496cfc38c84e..1812cc335a4f6 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -2171,9 +2171,10 @@ pub enum InlineAsmRegOrRegClass { RegClass(Symbol), } +#[derive(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)] +pub struct InlineAsmOptions(u16); bitflags::bitflags! { - #[derive(Encodable, Decodable, HashStable_Generic)] - pub struct InlineAsmOptions: u16 { + impl InlineAsmOptions: u16 { const PURE = 1 << 0; const NOMEM = 1 << 1; const READONLY = 1 << 2; @@ -2186,6 +2187,12 @@ bitflags::bitflags! { } } +impl std::fmt::Debug for InlineAsmOptions { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + bitflags::parser::to_writer(self, f) + } +} + #[derive(Clone, PartialEq, Encodable, Decodable, Debug, Hash, HashStable_Generic)] pub enum InlineAsmTemplatePiece { String(String), @@ -2481,15 +2488,6 @@ pub enum Const { No, } -impl From for Const { - fn from(constness: BoundConstness) -> Self { - match constness { - BoundConstness::Maybe(span) => Self::Yes(span), - BoundConstness::Never => Self::No, - } - } -} - /// Item defaultness. /// For details see the [RFC #2532](https://github.com/rust-lang/rfcs/pull/2532). #[derive(Copy, Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] @@ -2543,6 +2541,8 @@ impl BoundPolarity { pub enum BoundConstness { /// `Type: Trait` Never, + /// `Type: const Trait` + Always(Span), /// `Type: ~const Trait` Maybe(Span), } @@ -2551,6 +2551,7 @@ impl BoundConstness { pub fn as_str(self) -> &'static str { match self { Self::Never => "", + Self::Always(_) => "const", Self::Maybe(_) => "~const", } } diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index b0cd2ec981592..d62462b1ae33b 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -528,15 +528,6 @@ impl Token { } } - /// Returns `true` if the token can appear at the start of a generic bound. - pub fn can_begin_bound(&self) -> bool { - self.is_path_start() - || self.is_lifetime() - || self.is_keyword(kw::For) - || self == &Question - || self == &OpenDelim(Delimiter::Parenthesis) - } - /// Returns `true` if the token can appear at the start of an item. pub fn can_begin_item(&self) -> bool { match self.kind { diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 4c0c496584eb8..053468ff936af 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -26,7 +26,7 @@ use rustc_span::{sym, Span, Symbol, DUMMY_SP}; use smallvec::{smallvec, SmallVec}; use std::borrow::Cow; -use std::{cmp, fmt, iter, mem}; +use std::{cmp, fmt, iter}; /// When the main Rust parser encounters a syntax-extension invocation, it /// parses the arguments to the invocation as a token tree. This is a very @@ -81,14 +81,6 @@ impl TokenTree { } } - /// Modify the `TokenTree`'s span in-place. - pub fn set_span(&mut self, span: Span) { - match self { - TokenTree::Token(token, _) => token.span = span, - TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span), - } - } - /// Create a `TokenTree::Token` with alone spacing. pub fn token_alone(kind: TokenKind, span: Span) -> TokenTree { TokenTree::Token(Token::new(kind, span), Spacing::Alone) @@ -461,19 +453,6 @@ impl TokenStream { t1.next().is_none() && t2.next().is_none() } - /// Applies the supplied function to each `TokenTree` and its index in `self`, returning a new `TokenStream` - /// - /// It is equivalent to `TokenStream::new(self.trees().cloned().enumerate().map(|(i, tt)| f(i, tt)).collect())`. - pub fn map_enumerated_owned( - mut self, - mut f: impl FnMut(usize, TokenTree) -> TokenTree, - ) -> TokenStream { - let owned = Lrc::make_mut(&mut self.0); // clone if necessary - // rely on vec's in-place optimizations to avoid another allocation - *owned = mem::take(owned).into_iter().enumerate().map(|(i, tree)| f(i, tree)).collect(); - self - } - /// Create a token stream containing a single token with alone spacing. The /// spacing used for the final token in a constructed stream doesn't matter /// because it's never used. In practice we arbitrarily use diff --git a/compiler/rustc_ast_lowering/messages.ftl b/compiler/rustc_ast_lowering/messages.ftl index 6bde4f2d8fa5a..fd94e7e9341d4 100644 --- a/compiler/rustc_ast_lowering/messages.ftl +++ b/compiler/rustc_ast_lowering/messages.ftl @@ -56,6 +56,9 @@ ast_lowering_functional_record_update_destructuring_assignment = functional record updates are not allowed in destructuring assignments .suggestion = consider removing the trailing pattern +ast_lowering_generic_param_default_in_binder = + defaults for generic parameters are not allowed in `for<...>` binders + ast_lowering_generic_type_with_parentheses = parenthesized type parameters may only be used with a `Fn` trait .label = only `Fn` traits may use parentheses diff --git a/compiler/rustc_ast_lowering/src/errors.rs b/compiler/rustc_ast_lowering/src/errors.rs index 11bb559719b9f..710690d0d86a4 100644 --- a/compiler/rustc_ast_lowering/src/errors.rs +++ b/compiler/rustc_ast_lowering/src/errors.rs @@ -395,3 +395,10 @@ pub enum BadReturnTypeNotation { span: Span, }, } + +#[derive(Diagnostic)] +#[diag(ast_lowering_generic_param_default_in_binder)] +pub(crate) struct GenericParamDefaultInBinder { + #[primary_span] + pub span: Span, +} diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs index 7e63826647835..ba858d49acf6e 100644 --- a/compiler/rustc_ast_lowering/src/expr.rs +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -183,14 +183,6 @@ impl<'hir> LoweringContext<'_, 'hir> { self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))), hir::MatchSource::Normal, ), - ExprKind::Gen(capture_clause, block, GenBlockKind::Async) => self.make_async_expr( - *capture_clause, - e.id, - None, - e.span, - hir::CoroutineSource::Block, - |this| this.with_new_scopes(e.span, |this| this.lower_block_expr(block)), - ), ExprKind::Await(expr, await_kw_span) => self.lower_expr_await(*await_kw_span, expr), ExprKind::Closure(box Closure { binder, @@ -226,6 +218,22 @@ impl<'hir> LoweringContext<'_, 'hir> { *fn_arg_span, ), }, + ExprKind::Gen(capture_clause, block, genblock_kind) => { + let desugaring_kind = match genblock_kind { + GenBlockKind::Async => hir::CoroutineDesugaring::Async, + GenBlockKind::Gen => hir::CoroutineDesugaring::Gen, + GenBlockKind::AsyncGen => hir::CoroutineDesugaring::AsyncGen, + }; + self.make_desugared_coroutine_expr( + *capture_clause, + e.id, + None, + e.span, + desugaring_kind, + hir::CoroutineSource::Block, + |this| this.with_new_scopes(e.span, |this| this.lower_block_expr(block)), + ) + } ExprKind::Block(blk, opt_label) => { let opt_label = self.lower_label(*opt_label); hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label) @@ -313,23 +321,6 @@ impl<'hir> LoweringContext<'_, 'hir> { rest, ) } - ExprKind::Gen(capture_clause, block, GenBlockKind::Gen) => self.make_gen_expr( - *capture_clause, - e.id, - None, - e.span, - hir::CoroutineSource::Block, - |this| this.with_new_scopes(e.span, |this| this.lower_block_expr(block)), - ), - ExprKind::Gen(capture_clause, block, GenBlockKind::AsyncGen) => self - .make_async_gen_expr( - *capture_clause, - e.id, - None, - e.span, - hir::CoroutineSource::Block, - |this| this.with_new_scopes(e.span, |this| this.lower_block_expr(block)), - ), ExprKind::Yield(opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()), ExprKind::Err => { hir::ExprKind::Err(self.dcx().span_delayed_bug(e.span, "lowered ExprKind::Err")) @@ -555,7 +546,7 @@ impl<'hir> LoweringContext<'_, 'hir> { fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> { let pat = self.lower_pat(&arm.pat); - let mut guard = arm.guard.as_ref().map(|cond| { + let guard = arm.guard.as_ref().map(|cond| { if let ExprKind::Let(pat, scrutinee, span, is_recovered) = &cond.kind { hir::Guard::IfLet(self.arena.alloc(hir::Let { hir_id: self.next_id(), @@ -587,10 +578,8 @@ impl<'hir> LoweringContext<'_, 'hir> { } } else if let Some(body) = &arm.body { self.dcx().emit_err(NeverPatternWithBody { span: body.span }); - guard = None; } else if let Some(g) = &arm.guard { self.dcx().emit_err(NeverPatternWithGuard { span: g.span }); - guard = None; } // We add a fake `loop {}` arm body so that it typecks to `!`. @@ -612,214 +601,91 @@ impl<'hir> LoweringContext<'_, 'hir> { hir::Arm { hir_id, pat, guard, body, span } } - /// Lower an `async` construct to a coroutine that implements `Future`. + /// Lower/desugar a coroutine construct. /// - /// This results in: - /// - /// ```text - /// static move? |_task_context| -> { - /// - /// } - /// ``` - pub(super) fn make_async_expr( - &mut self, - capture_clause: CaptureBy, - closure_node_id: NodeId, - ret_ty: Option>, - span: Span, - async_coroutine_source: hir::CoroutineSource, - body: impl FnOnce(&mut Self) -> hir::Expr<'hir>, - ) -> hir::ExprKind<'hir> { - let output = ret_ty.unwrap_or_else(|| hir::FnRetTy::DefaultReturn(self.lower_span(span))); - - // Resume argument type: `ResumeTy` - let unstable_span = self.mark_span_with_reason( - DesugaringKind::Async, - self.lower_span(span), - Some(self.allow_gen_future.clone()), - ); - let resume_ty = hir::QPath::LangItem(hir::LangItem::ResumeTy, unstable_span); - let input_ty = hir::Ty { - hir_id: self.next_id(), - kind: hir::TyKind::Path(resume_ty), - span: unstable_span, - }; - - // The closure/coroutine `FnDecl` takes a single (resume) argument of type `input_ty`. - let fn_decl = self.arena.alloc(hir::FnDecl { - inputs: arena_vec![self; input_ty], - output, - c_variadic: false, - implicit_self: hir::ImplicitSelfKind::None, - lifetime_elision_allowed: false, - }); - - // Lower the argument pattern/ident. The ident is used again in the `.await` lowering. - let (pat, task_context_hid) = self.pat_ident_binding_mode( - span, - Ident::with_dummy_span(sym::_task_context), - hir::BindingAnnotation::MUT, - ); - let param = hir::Param { - hir_id: self.next_id(), - pat, - ty_span: self.lower_span(span), - span: self.lower_span(span), - }; - let params = arena_vec![self; param]; - - let body = self.lower_body(move |this| { - this.coroutine_kind = Some(hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::Async, - async_coroutine_source, - )); - - let old_ctx = this.task_context; - this.task_context = Some(task_context_hid); - let res = body(this); - this.task_context = old_ctx; - (params, res) - }); - - // `static |_task_context| -> { body }`: - hir::ExprKind::Closure(self.arena.alloc(hir::Closure { - def_id: self.local_def_id(closure_node_id), - binder: hir::ClosureBinder::Default, - capture_clause, - bound_generic_params: &[], - fn_decl, - body, - fn_decl_span: self.lower_span(span), - fn_arg_span: None, - movability: Some(hir::Movability::Static), - constness: hir::Constness::NotConst, - })) - } - - /// Lower a `gen` construct to a generator that implements `Iterator`. + /// In particular, this creates the correct async resume argument and `_task_context`. /// /// This results in: /// /// ```text - /// static move? |()| -> () { + /// static move? |<_task_context?>| -> { /// /// } /// ``` - pub(super) fn make_gen_expr( + pub(super) fn make_desugared_coroutine_expr( &mut self, capture_clause: CaptureBy, closure_node_id: NodeId, - _yield_ty: Option>, + return_ty: Option>, span: Span, + desugaring_kind: hir::CoroutineDesugaring, coroutine_source: hir::CoroutineSource, body: impl FnOnce(&mut Self) -> hir::Expr<'hir>, ) -> hir::ExprKind<'hir> { - let output = hir::FnRetTy::DefaultReturn(self.lower_span(span)); - - // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`. - let fn_decl = self.arena.alloc(hir::FnDecl { - inputs: &[], - output, - c_variadic: false, - implicit_self: hir::ImplicitSelfKind::None, - lifetime_elision_allowed: false, - }); - - let body = self.lower_body(move |this| { - this.coroutine_kind = Some(hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::Gen, - coroutine_source, - )); - - let res = body(this); - (&[], res) - }); - - // `static |()| -> () { body }`: - hir::ExprKind::Closure(self.arena.alloc(hir::Closure { - def_id: self.local_def_id(closure_node_id), - binder: hir::ClosureBinder::Default, - capture_clause, - bound_generic_params: &[], - fn_decl, - body, - fn_decl_span: self.lower_span(span), - fn_arg_span: None, - movability: Some(Movability::Movable), - constness: hir::Constness::NotConst, - })) - } + let coroutine_kind = hir::CoroutineKind::Desugared(desugaring_kind, coroutine_source); + + // The `async` desugaring takes a resume argument and maintains a `task_context`, + // whereas a generator does not. + let (inputs, params, task_context): (&[_], &[_], _) = match desugaring_kind { + hir::CoroutineDesugaring::Async | hir::CoroutineDesugaring::AsyncGen => { + // Resume argument type: `ResumeTy` + let unstable_span = self.mark_span_with_reason( + DesugaringKind::Async, + self.lower_span(span), + Some(self.allow_gen_future.clone()), + ); + let resume_ty = self.make_lang_item_qpath(hir::LangItem::ResumeTy, unstable_span); + let input_ty = hir::Ty { + hir_id: self.next_id(), + kind: hir::TyKind::Path(resume_ty), + span: unstable_span, + }; + let inputs = arena_vec![self; input_ty]; - /// Lower a `async gen` construct to a generator that implements `AsyncIterator`. - /// - /// This results in: - /// - /// ```text - /// static move? |_task_context| -> () { - /// - /// } - /// ``` - pub(super) fn make_async_gen_expr( - &mut self, - capture_clause: CaptureBy, - closure_node_id: NodeId, - _yield_ty: Option>, - span: Span, - async_coroutine_source: hir::CoroutineSource, - body: impl FnOnce(&mut Self) -> hir::Expr<'hir>, - ) -> hir::ExprKind<'hir> { - let output = hir::FnRetTy::DefaultReturn(self.lower_span(span)); + // Lower the argument pattern/ident. The ident is used again in the `.await` lowering. + let (pat, task_context_hid) = self.pat_ident_binding_mode( + span, + Ident::with_dummy_span(sym::_task_context), + hir::BindingAnnotation::MUT, + ); + let param = hir::Param { + hir_id: self.next_id(), + pat, + ty_span: self.lower_span(span), + span: self.lower_span(span), + }; + let params = arena_vec![self; param]; - // Resume argument type: `ResumeTy` - let unstable_span = self.mark_span_with_reason( - DesugaringKind::Async, - self.lower_span(span), - Some(self.allow_gen_future.clone()), - ); - let resume_ty = hir::QPath::LangItem(hir::LangItem::ResumeTy, unstable_span); - let input_ty = hir::Ty { - hir_id: self.next_id(), - kind: hir::TyKind::Path(resume_ty), - span: unstable_span, + (inputs, params, Some(task_context_hid)) + } + hir::CoroutineDesugaring::Gen => (&[], &[], None), }; - // The closure/coroutine `FnDecl` takes a single (resume) argument of type `input_ty`. + let output = + return_ty.unwrap_or_else(|| hir::FnRetTy::DefaultReturn(self.lower_span(span))); + let fn_decl = self.arena.alloc(hir::FnDecl { - inputs: arena_vec![self; input_ty], + inputs, output, c_variadic: false, implicit_self: hir::ImplicitSelfKind::None, lifetime_elision_allowed: false, }); - // Lower the argument pattern/ident. The ident is used again in the `.await` lowering. - let (pat, task_context_hid) = self.pat_ident_binding_mode( - span, - Ident::with_dummy_span(sym::_task_context), - hir::BindingAnnotation::MUT, - ); - let param = hir::Param { - hir_id: self.next_id(), - pat, - ty_span: self.lower_span(span), - span: self.lower_span(span), - }; - let params = arena_vec![self; param]; - let body = self.lower_body(move |this| { - this.coroutine_kind = Some(hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::AsyncGen, - async_coroutine_source, - )); + this.coroutine_kind = Some(coroutine_kind); let old_ctx = this.task_context; - this.task_context = Some(task_context_hid); + if task_context.is_some() { + this.task_context = task_context; + } let res = body(this); this.task_context = old_ctx; + (params, res) }); - // `static |_task_context| -> { body }`: + // `static |<_task_context?>| -> { }`: hir::ExprKind::Closure(self.arena.alloc(hir::Closure { def_id: self.local_def_id(closure_node_id), binder: hir::ClosureBinder::Default, @@ -829,7 +695,7 @@ impl<'hir> LoweringContext<'_, 'hir> { body, fn_decl_span: self.lower_span(span), fn_arg_span: None, - movability: Some(hir::Movability::Static), + kind: hir::ClosureKind::Coroutine(coroutine_kind), constness: hir::Constness::NotConst, })) } @@ -898,7 +764,7 @@ impl<'hir> LoweringContext<'_, 'hir> { let is_async_gen = match self.coroutine_kind { Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)) => false, Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)) => true, - Some(hir::CoroutineKind::Coroutine) + Some(hir::CoroutineKind::Coroutine(_)) | Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _)) | None => { return hir::ExprKind::Err(self.dcx().emit_err(AwaitOnlyInAsyncFnAndBlocks { @@ -1086,7 +952,7 @@ impl<'hir> LoweringContext<'_, 'hir> { ) -> hir::ExprKind<'hir> { let (binder_clause, generic_params) = self.lower_closure_binder(binder); - let (body_id, coroutine_option) = self.with_new_scopes(fn_decl_span, move |this| { + let (body_id, closure_kind) = self.with_new_scopes(fn_decl_span, move |this| { let mut coroutine_kind = None; let body_id = this.lower_fn_body(decl, |this| { let e = this.lower_expr_mut(body); @@ -1094,7 +960,7 @@ impl<'hir> LoweringContext<'_, 'hir> { e }); let coroutine_option = - this.coroutine_movability_for_fn(decl, fn_decl_span, coroutine_kind, movability); + this.closure_movability_for_fn(decl, fn_decl_span, coroutine_kind, movability); (body_id, coroutine_option) }); @@ -1111,26 +977,26 @@ impl<'hir> LoweringContext<'_, 'hir> { body: body_id, fn_decl_span: self.lower_span(fn_decl_span), fn_arg_span: Some(self.lower_span(fn_arg_span)), - movability: coroutine_option, + kind: closure_kind, constness: self.lower_constness(constness), }); hir::ExprKind::Closure(c) } - fn coroutine_movability_for_fn( + fn closure_movability_for_fn( &mut self, decl: &FnDecl, fn_decl_span: Span, coroutine_kind: Option, movability: Movability, - ) -> Option { + ) -> hir::ClosureKind { match coroutine_kind { - Some(hir::CoroutineKind::Coroutine) => { + Some(hir::CoroutineKind::Coroutine(_)) => { if decl.inputs.len() > 1 { self.dcx().emit_err(CoroutineTooManyParameters { fn_decl_span }); } - Some(movability) + hir::ClosureKind::Coroutine(hir::CoroutineKind::Coroutine(movability)) } Some( hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) @@ -1143,7 +1009,7 @@ impl<'hir> LoweringContext<'_, 'hir> { if movability == Movability::Static { self.dcx().emit_err(ClosureCannotBeStatic { fn_decl_span }); } - None + hir::ClosureKind::Closure } } } @@ -1204,11 +1070,12 @@ impl<'hir> LoweringContext<'_, 'hir> { None }; - let async_body = this.make_async_expr( + let async_body = this.make_desugared_coroutine_expr( capture_clause, inner_closure_id, async_ret_ty, body.span, + hir::CoroutineDesugaring::Async, hir::CoroutineSource::Closure, |this| this.with_new_scopes(fn_decl_span, |this| this.lower_expr_mut(body)), ); @@ -1235,7 +1102,7 @@ impl<'hir> LoweringContext<'_, 'hir> { body, fn_decl_span: self.lower_span(fn_decl_span), fn_arg_span: Some(self.lower_span(fn_arg_span)), - movability: None, + kind: hir::ClosureKind::Closure, constness: hir::Constness::NotConst, }); hir::ExprKind::Closure(c) @@ -1655,7 +1522,7 @@ impl<'hir> LoweringContext<'_, 'hir> { self.dcx().emit_err(AsyncCoroutinesNotSupported { span }), ); } - Some(hir::CoroutineKind::Coroutine) | None => { + Some(hir::CoroutineKind::Coroutine(_)) => { if !self.tcx.features().coroutines { rustc_session::parse::feature_err( &self.tcx.sess.parse_sess, @@ -1665,7 +1532,19 @@ impl<'hir> LoweringContext<'_, 'hir> { ) .emit(); } - self.coroutine_kind = Some(hir::CoroutineKind::Coroutine); + false + } + None => { + if !self.tcx.features().coroutines { + rustc_session::parse::feature_err( + &self.tcx.sess.parse_sess, + sym::coroutines, + span, + "yield syntax is experimental", + ) + .emit(); + } + self.coroutine_kind = Some(hir::CoroutineKind::Coroutine(Movability::Movable)); false } }; @@ -2115,11 +1994,9 @@ impl<'hir> LoweringContext<'_, 'hir> { lang_item: hir::LangItem, name: Symbol, ) -> hir::Expr<'hir> { + let qpath = self.make_lang_item_qpath(lang_item, self.lower_span(span)); let path = hir::ExprKind::Path(hir::QPath::TypeRelative( - self.arena.alloc(self.ty( - span, - hir::TyKind::Path(hir::QPath::LangItem(lang_item, self.lower_span(span))), - )), + self.arena.alloc(self.ty(span, hir::TyKind::Path(qpath))), self.arena.alloc(hir::PathSegment::new( Ident::new(name, span), self.next_id(), diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs index 81457018b37af..c618953461cf6 100644 --- a/compiler/rustc_ast_lowering/src/item.rs +++ b/compiler/rustc_ast_lowering/src/item.rs @@ -339,9 +339,14 @@ impl<'hir> LoweringContext<'_, 'hir> { let itctx = ImplTraitContext::Universal; let (generics, (trait_ref, lowered_ty)) = self.lower_generics(ast_generics, *constness, id, &itctx, |this| { + let constness = match *constness { + Const::Yes(span) => BoundConstness::Maybe(span), + Const::No => BoundConstness::Never, + }; + let trait_ref = trait_ref.as_ref().map(|trait_ref| { this.lower_trait_ref( - *constness, + constness, trait_ref, &ImplTraitContext::Disallowed(ImplTraitPosition::Trait), ) @@ -952,11 +957,7 @@ impl<'hir> LoweringContext<'_, 'hir> { params: &'hir [hir::Param<'hir>], value: hir::Expr<'hir>, ) -> hir::BodyId { - let body = hir::Body { - coroutine_kind: self.coroutine_kind, - params, - value: self.arena.alloc(value), - }; + let body = hir::Body { params, value: self.arena.alloc(value) }; let id = body.id(); debug_assert_eq!(id.hir_id.owner, self.current_hir_id_owner); self.bodies.push((id.hir_id.local_id, self.arena.alloc(body))); @@ -1208,33 +1209,20 @@ impl<'hir> LoweringContext<'_, 'hir> { this.expr_block(body) }; - // FIXME(gen_blocks): Consider unifying the `make_*_expr` functions. - let coroutine_expr = match coroutine_kind { - CoroutineKind::Async { .. } => this.make_async_expr( - CaptureBy::Value { move_kw: rustc_span::DUMMY_SP }, - closure_id, - None, - body.span, - hir::CoroutineSource::Fn, - mkbody, - ), - CoroutineKind::Gen { .. } => this.make_gen_expr( - CaptureBy::Value { move_kw: rustc_span::DUMMY_SP }, - closure_id, - None, - body.span, - hir::CoroutineSource::Fn, - mkbody, - ), - CoroutineKind::AsyncGen { .. } => this.make_async_gen_expr( - CaptureBy::Value { move_kw: rustc_span::DUMMY_SP }, - closure_id, - None, - body.span, - hir::CoroutineSource::Fn, - mkbody, - ), + let desugaring_kind = match coroutine_kind { + CoroutineKind::Async { .. } => hir::CoroutineDesugaring::Async, + CoroutineKind::Gen { .. } => hir::CoroutineDesugaring::Gen, + CoroutineKind::AsyncGen { .. } => hir::CoroutineDesugaring::AsyncGen, }; + let coroutine_expr = this.make_desugared_coroutine_expr( + CaptureBy::Value { move_kw: rustc_span::DUMMY_SP }, + closure_id, + None, + body.span, + desugaring_kind, + hir::CoroutineSource::Fn, + mkbody, + ); let hir_id = this.lower_node_id(closure_id); this.maybe_forward_track_caller(body.span, fn_id, hir_id); @@ -1253,11 +1241,13 @@ impl<'hir> LoweringContext<'_, 'hir> { coroutine_kind: Option, ) -> (&'hir hir::Generics<'hir>, hir::FnSig<'hir>) { let header = self.lower_fn_header(sig.header); + // Don't pass along the user-provided constness of trait associated functions; we don't want to + // synthesize a host effect param for them. We reject `const` on them during AST validation. + let constness = if kind == FnDeclKind::Inherent { sig.header.constness } else { Const::No }; let itctx = ImplTraitContext::Universal; - let (generics, decl) = - self.lower_generics(generics, sig.header.constness, id, &itctx, |this| { - this.lower_fn_decl(&sig.decl, id, sig.span, kind, coroutine_kind) - }); + let (generics, decl) = self.lower_generics(generics, constness, id, &itctx, |this| { + this.lower_fn_decl(&sig.decl, id, sig.span, kind, coroutine_kind) + }); (generics, hir::FnSig { header, decl, span: self.lower_span(sig.span) }) } diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index e395411628886..fb59770d48a2b 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -33,6 +33,7 @@ #![allow(internal_features)] #![feature(rustdoc_internals)] #![doc(rust_logo)] +#![feature(if_let_guard)] #![feature(box_patterns)] #![feature(let_chains)] #![recursion_limit = "256"] @@ -770,6 +771,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { self.resolver.get_import_res(id).present_items() } + fn make_lang_item_qpath(&mut self, lang_item: hir::LangItem, span: Span) -> hir::QPath<'hir> { + hir::QPath::Resolved(None, self.make_lang_item_path(lang_item, span, None)) + } + fn make_lang_item_path( &mut self, lang_item: hir::LangItem, @@ -787,7 +792,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { hir_id: self.next_id(), res, args, - infer_args: false, + infer_args: args.is_none(), }]), }) } @@ -1324,7 +1329,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { span: t.span, }, itctx, - ast::Const::No, + ast::BoundConstness::Never, ); let bounds = this.arena.alloc_from_iter([bound]); let lifetime_bound = this.elided_dyn_bound(t.span); @@ -1429,19 +1434,21 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let (bounds, lifetime_bound) = self.with_dyn_type_scope(true, |this| { let bounds = this.arena.alloc_from_iter(bounds.iter().filter_map(|bound| match bound { - GenericBound::Trait( - ty, - TraitBoundModifiers { - polarity: BoundPolarity::Positive | BoundPolarity::Negative(_), - constness, - }, - ) => Some(this.lower_poly_trait_ref(ty, itctx, (*constness).into())), - // We can safely ignore constness here, since AST validation - // will take care of invalid modifier combinations. - GenericBound::Trait( - _, - TraitBoundModifiers { polarity: BoundPolarity::Maybe(_), .. }, - ) => None, + // We can safely ignore constness here since AST validation + // takes care of rejecting invalid modifier combinations and + // const trait bounds in trait object types. + GenericBound::Trait(ty, modifiers) => match modifiers.polarity { + BoundPolarity::Positive | BoundPolarity::Negative(_) => { + Some(this.lower_poly_trait_ref( + ty, + itctx, + // Still, don't pass along the constness here; we don't want to + // synthesize any host effect args, it'd only cause problems. + ast::BoundConstness::Never, + )) + } + BoundPolarity::Maybe(_) => None, + }, GenericBound::Outlives(lifetime) => { if lifetime_bound.is_none() { lifetime_bound = Some(this.lower_lifetime(lifetime)); @@ -2111,7 +2118,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { param: &GenericParam, source: hir::GenericParamSource, ) -> hir::GenericParam<'hir> { - let (name, kind) = self.lower_generic_param_kind(param); + let (name, kind) = self.lower_generic_param_kind(param, source); let hir_id = self.lower_node_id(param.id); self.lower_attrs(hir_id, ¶m.attrs); @@ -2130,6 +2137,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { fn lower_generic_param_kind( &mut self, param: &GenericParam, + source: hir::GenericParamSource, ) -> (hir::ParamName, hir::GenericParamKind<'hir>) { match ¶m.kind { GenericParamKind::Lifetime => { @@ -2148,22 +2156,51 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { (param_name, kind) } GenericParamKind::Type { default, .. } => { - let kind = hir::GenericParamKind::Type { - default: default.as_ref().map(|x| { + // Not only do we deny type param defaults in binders but we also map them to `None` + // since later compiler stages cannot handle them (and shouldn't need to be able to). + let default = default + .as_ref() + .filter(|_| match source { + hir::GenericParamSource::Generics => true, + hir::GenericParamSource::Binder => { + self.dcx().emit_err(errors::GenericParamDefaultInBinder { + span: param.span(), + }); + + false + } + }) + .map(|def| { self.lower_ty( - x, + def, &ImplTraitContext::Disallowed(ImplTraitPosition::GenericDefault), ) - }), - synthetic: false, - }; + }); + + let kind = hir::GenericParamKind::Type { default, synthetic: false }; (hir::ParamName::Plain(self.lower_ident(param.ident)), kind) } GenericParamKind::Const { ty, kw_span: _, default } => { let ty = self .lower_ty(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::GenericDefault)); - let default = default.as_ref().map(|def| self.lower_anon_const(def)); + + // Not only do we deny const param defaults in binders but we also map them to `None` + // since later compiler stages cannot handle them (and shouldn't need to be able to). + let default = default + .as_ref() + .filter(|_| match source { + hir::GenericParamSource::Generics => true, + hir::GenericParamSource::Binder => { + self.dcx().emit_err(errors::GenericParamDefaultInBinder { + span: param.span(), + }); + + false + } + }) + .map(|def| self.lower_anon_const(def)); + ( hir::ParamName::Plain(self.lower_ident(param.ident)), hir::GenericParamKind::Const { ty, default, is_host_effect: false }, @@ -2174,7 +2211,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { fn lower_trait_ref( &mut self, - constness: ast::Const, + constness: ast::BoundConstness, p: &TraitRef, itctx: &ImplTraitContext, ) -> hir::TraitRef<'hir> { @@ -2197,7 +2234,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { &mut self, p: &PolyTraitRef, itctx: &ImplTraitContext, - constness: ast::Const, + constness: ast::BoundConstness, ) -> hir::PolyTraitRef<'hir> { let bound_generic_params = self.lower_lifetime_binder(p.trait_ref.ref_id, &p.bound_generic_params); @@ -2322,9 +2359,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { &mut self, modifiers: TraitBoundModifiers, ) -> hir::TraitBoundModifier { + // Invalid modifier combinations will cause an error during AST validation. + // Arbitrarily pick a placeholder for them to make compilation proceed. match (modifiers.constness, modifiers.polarity) { (BoundConstness::Never, BoundPolarity::Positive) => hir::TraitBoundModifier::None, - (BoundConstness::Never, BoundPolarity::Maybe(_)) => hir::TraitBoundModifier::Maybe, + (_, BoundPolarity::Maybe(_)) => hir::TraitBoundModifier::Maybe, (BoundConstness::Never, BoundPolarity::Negative(_)) => { if self.tcx.features().negative_bounds { hir::TraitBoundModifier::Negative @@ -2332,15 +2371,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { hir::TraitBoundModifier::None } } - (BoundConstness::Maybe(_), BoundPolarity::Positive) => { - hir::TraitBoundModifier::MaybeConst - } - // Invalid modifier combinations will cause an error during AST validation. - // Arbitrarily pick a placeholder for compilation to proceed. - (BoundConstness::Maybe(_), BoundPolarity::Maybe(_)) => hir::TraitBoundModifier::Maybe, - (BoundConstness::Maybe(_), BoundPolarity::Negative(_)) => { - hir::TraitBoundModifier::MaybeConst - } + (BoundConstness::Always(_), _) => hir::TraitBoundModifier::Const, + (BoundConstness::Maybe(_), _) => hir::TraitBoundModifier::MaybeConst, } } @@ -2558,45 +2590,62 @@ struct GenericArgsCtor<'hir> { } impl<'hir> GenericArgsCtor<'hir> { - fn push_constness(&mut self, lcx: &mut LoweringContext<'_, 'hir>, constness: ast::Const) { + fn push_constness( + &mut self, + lcx: &mut LoweringContext<'_, 'hir>, + constness: ast::BoundConstness, + ) { if !lcx.tcx.features().effects { return; } - // if bound is non-const, don't add host effect param - let ast::Const::Yes(span) = constness else { return }; + let (span, body) = match constness { + BoundConstness::Never => return, + BoundConstness::Always(span) => { + let span = lcx.lower_span(span); - let span = lcx.lower_span(span); + let body = hir::ExprKind::Lit( + lcx.arena.alloc(hir::Lit { node: LitKind::Bool(false), span }), + ); - let id = lcx.next_node_id(); - let hir_id = lcx.next_id(); + (span, body) + } + BoundConstness::Maybe(span) => { + let span = lcx.lower_span(span); - let Some(host_param_id) = lcx.host_param_id else { - lcx.dcx().span_delayed_bug( - span, - "no host param id for call in const yet no errors reported", - ); - return; - }; + let Some(host_param_id) = lcx.host_param_id else { + lcx.dcx().span_delayed_bug( + span, + "no host param id for call in const yet no errors reported", + ); + return; + }; - let body = lcx.lower_body(|lcx| { - (&[], { let hir_id = lcx.next_id(); let res = Res::Def(DefKind::ConstParam, host_param_id.to_def_id()); - let expr_kind = hir::ExprKind::Path(hir::QPath::Resolved( + let body = hir::ExprKind::Path(hir::QPath::Resolved( None, lcx.arena.alloc(hir::Path { span, res, - segments: arena_vec![lcx; hir::PathSegment::new(Ident { - name: sym::host, - span, - }, hir_id, res)], + segments: arena_vec![ + lcx; + hir::PathSegment::new( + Ident { name: sym::host, span }, + hir_id, + res + ) + ], }), )); - lcx.expr(span, expr_kind) - }) - }); + + (span, body) + } + }; + let body = lcx.lower_body(|lcx| (&[], lcx.expr(span, body))); + + let id = lcx.next_node_id(); + let hir_id = lcx.next_id(); let def_id = lcx.create_def( lcx.current_hir_id_owner.def_id, diff --git a/compiler/rustc_ast_lowering/src/path.rs b/compiler/rustc_ast_lowering/src/path.rs index 130eb3521c3f4..c679ee56fcd8b 100644 --- a/compiler/rustc_ast_lowering/src/path.rs +++ b/compiler/rustc_ast_lowering/src/path.rs @@ -25,7 +25,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { param_mode: ParamMode, itctx: &ImplTraitContext, // constness of the impl/bound if this is a trait path - constness: Option, + constness: Option, ) -> hir::QPath<'hir> { let qself_position = qself.as_ref().map(|q| q.position); let qself = qself.as_ref().map(|q| self.lower_ty(&q.ty, itctx)); @@ -179,7 +179,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { param_mode: ParamMode, parenthesized_generic_args: ParenthesizedGenericArgs, itctx: &ImplTraitContext, - constness: Option, + constness: Option, ) -> hir::PathSegment<'hir> { debug!("path_span: {:?}, lower_path_segment(segment: {:?})", path_span, segment); let (mut generic_args, infer_args) = if let Some(generic_args) = segment.args.as_deref() { diff --git a/compiler/rustc_ast_passes/messages.ftl b/compiler/rustc_ast_passes/messages.ftl index ea3cd3e4bee2c..feea02c679ce1 100644 --- a/compiler/rustc_ast_passes/messages.ftl +++ b/compiler/rustc_ast_passes/messages.ftl @@ -46,6 +46,8 @@ ast_passes_const_and_c_variadic = functions cannot be both `const` and C-variadi .const = `const` because of this .variadic = C-variadic because of this +ast_passes_const_bound_trait_object = const trait bounds are not allowed in trait object types + ast_passes_const_without_body = free constant item without body .suggestion = provide a definition for the constant @@ -231,8 +233,21 @@ ast_passes_tilde_const_disallowed = `~const` is not allowed here .item = this item cannot have `~const` trait bounds ast_passes_trait_fn_const = - functions in traits cannot be declared const - .label = functions in traits cannot be const + functions in {$in_impl -> + [true] trait impls + *[false] traits + } cannot be declared const + .label = functions in {$in_impl -> + [true] trait impls + *[false] traits + } cannot be const + .const_context_label = this declares all associated functions implicitly const + .remove_const_sugg = remove the `const`{$requires_multiple_changes -> + [true] {" ..."} + *[false] {""} + } + .make_impl_const_sugg = ... and declare the impl to be const instead + .make_trait_const_sugg = ... and declare the trait to be a `#[const_trait]` instead ast_passes_trait_object_single_bound = only a single explicit lifetime bound is permitted diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index 3600e4960afd2..b69d4cccaf04f 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -46,6 +46,21 @@ enum DisallowTildeConstContext<'a> { Item, } +enum TraitOrTraitImpl<'a> { + Trait { span: Span, constness: Option }, + TraitImpl { constness: Const, polarity: ImplPolarity, trait_ref: &'a TraitRef }, +} + +impl<'a> TraitOrTraitImpl<'a> { + fn constness(&self) -> Option { + match self { + Self::Trait { constness: Some(span), .. } + | Self::TraitImpl { constness: Const::Yes(span), .. } => Some(*span), + _ => None, + } + } +} + struct AstValidator<'a> { session: &'a Session, features: &'a Features, @@ -53,11 +68,7 @@ struct AstValidator<'a> { /// The span of the `extern` in an `extern { ... }` block, if any. extern_mod: Option<&'a Item>, - /// Are we inside a trait impl? - in_trait_impl: bool, - - /// Are we inside a const trait defn or impl? - in_const_trait_or_impl: bool, + outer_trait_or_trait_impl: Option>, has_proc_macro_decls: bool, @@ -78,24 +89,28 @@ struct AstValidator<'a> { impl<'a> AstValidator<'a> { fn with_in_trait_impl( &mut self, - is_in: bool, - constness: Option, + trait_: Option<(Const, ImplPolarity, &'a TraitRef)>, f: impl FnOnce(&mut Self), ) { - let old = mem::replace(&mut self.in_trait_impl, is_in); - let old_const = mem::replace( - &mut self.in_const_trait_or_impl, - matches!(constness, Some(Const::Yes(_))), + let old = mem::replace( + &mut self.outer_trait_or_trait_impl, + trait_.map(|(constness, polarity, trait_ref)| TraitOrTraitImpl::TraitImpl { + constness, + polarity, + trait_ref, + }), ); f(self); - self.in_trait_impl = old; - self.in_const_trait_or_impl = old_const; + self.outer_trait_or_trait_impl = old; } - fn with_in_trait(&mut self, is_const: bool, f: impl FnOnce(&mut Self)) { - let old = mem::replace(&mut self.in_const_trait_or_impl, is_const); + fn with_in_trait(&mut self, span: Span, constness: Option, f: impl FnOnce(&mut Self)) { + let old = mem::replace( + &mut self.outer_trait_or_trait_impl, + Some(TraitOrTraitImpl::Trait { span, constness }), + ); f(self); - self.in_const_trait_or_impl = old; + self.outer_trait_or_trait_impl = old; } fn with_banned_impl_trait(&mut self, f: impl FnOnce(&mut Self)) { @@ -291,10 +306,48 @@ impl<'a> AstValidator<'a> { } } - fn check_trait_fn_not_const(&self, constness: Const) { - if let Const::Yes(span) = constness { - self.dcx().emit_err(errors::TraitFnConst { span }); - } + fn check_trait_fn_not_const(&self, constness: Const, parent: &TraitOrTraitImpl<'a>) { + let Const::Yes(span) = constness else { + return; + }; + + let make_impl_const_sugg = if self.features.const_trait_impl + && let TraitOrTraitImpl::TraitImpl { + constness: Const::No, + polarity: ImplPolarity::Positive, + trait_ref, + } = parent + { + Some(trait_ref.path.span.shrink_to_lo()) + } else { + None + }; + + let make_trait_const_sugg = if self.features.const_trait_impl + && let TraitOrTraitImpl::Trait { span, constness: None } = parent + { + Some(span.shrink_to_lo()) + } else { + None + }; + + let parent_constness = parent.constness(); + self.dcx().emit_err(errors::TraitFnConst { + span, + in_impl: matches!(parent, TraitOrTraitImpl::TraitImpl { .. }), + const_context_label: parent_constness, + remove_const_sugg: ( + self.session.source_map().span_extend_while(span, |c| c == ' ').unwrap_or(span), + match parent_constness { + Some(_) => rustc_errors::Applicability::MachineApplicable, + None => rustc_errors::Applicability::MaybeIncorrect, + }, + ), + requires_multiple_changes: make_impl_const_sugg.is_some() + || make_trait_const_sugg.is_some(), + make_impl_const_sugg, + make_trait_const_sugg, + }); } fn check_fn_decl(&self, fn_decl: &FnDecl, self_semantic: SelfSemantic) { @@ -817,7 +870,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { self_ty, items, }) => { - self.with_in_trait_impl(true, Some(*constness), |this| { + self.with_in_trait_impl(Some((*constness, *polarity, t)), |this| { this.visibility_not_permitted( &item.vis, errors::VisibilityNotPermittedNote::TraitImpl, @@ -963,8 +1016,9 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } } ItemKind::Trait(box Trait { is_auto, generics, bounds, items, .. }) => { - let is_const_trait = attr::contains_name(&item.attrs, sym::const_trait); - self.with_in_trait(is_const_trait, |this| { + let is_const_trait = + attr::find_by_name(&item.attrs, sym::const_trait).map(|attr| attr.span); + self.with_in_trait(item.span, is_const_trait, |this| { if *is_auto == IsAuto::Yes { // Auto traits cannot have generics, super traits nor contain items. this.deny_generic_params(generics, item.ident.span); @@ -977,8 +1031,9 @@ impl<'a> Visitor<'a> for AstValidator<'a> { // context for the supertraits. this.visit_vis(&item.vis); this.visit_ident(item.ident); - let disallowed = - (!is_const_trait).then(|| DisallowTildeConstContext::Trait(item.span)); + let disallowed = is_const_trait + .is_none() + .then(|| DisallowTildeConstContext::Trait(item.span)); this.with_tilde_const(disallowed, |this| { this.visit_generics(generics); walk_list!(this, visit_param_bound, bounds, BoundKind::SuperTraits) @@ -1207,6 +1262,9 @@ impl<'a> Visitor<'a> for AstValidator<'a> { (BoundKind::TraitObject, BoundConstness::Never, BoundPolarity::Maybe(_)) => { self.dcx().emit_err(errors::OptionalTraitObject { span: poly.span }); } + (BoundKind::TraitObject, BoundConstness::Always(_), BoundPolarity::Positive) => { + self.dcx().emit_err(errors::ConstBoundTraitObject { span: poly.span }); + } (_, BoundConstness::Maybe(span), BoundPolarity::Positive) if let Some(reason) = &self.disallow_tilde_const => { @@ -1237,8 +1295,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } ( _, - BoundConstness::Maybe(_), - BoundPolarity::Maybe(_) | BoundPolarity::Negative(_), + BoundConstness::Always(_) | BoundConstness::Maybe(_), + BoundPolarity::Negative(_) | BoundPolarity::Maybe(_), ) => { self.dcx().emit_err(errors::IncompatibleTraitBoundModifiers { span: bound.span(), @@ -1339,7 +1397,12 @@ impl<'a> Visitor<'a> for AstValidator<'a> { let tilde_const_allowed = matches!(fk.header(), Some(FnHeader { constness: ast::Const::Yes(_), .. })) - || matches!(fk.ctxt(), Some(FnCtxt::Assoc(_)) if self.in_const_trait_or_impl); + || matches!(fk.ctxt(), Some(FnCtxt::Assoc(_))) + && self + .outer_trait_or_trait_impl + .as_ref() + .and_then(TraitOrTraitImpl::constness) + .is_some(); let disallowed = (!tilde_const_allowed).then(|| DisallowTildeConstContext::Fn(fk)); self.with_tilde_const(disallowed, |this| visit::walk_fn(this, fk)); @@ -1350,7 +1413,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { self.check_nomangle_item_asciionly(item.ident, item.span); } - if ctxt == AssocCtxt::Trait || !self.in_trait_impl { + if ctxt == AssocCtxt::Trait || self.outer_trait_or_trait_impl.is_none() { self.check_defaultness(item.span, item.kind.defaultness()); } @@ -1398,10 +1461,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> { ); } - if ctxt == AssocCtxt::Trait || self.in_trait_impl { + if let Some(parent) = &self.outer_trait_or_trait_impl { self.visibility_not_permitted(&item.vis, errors::VisibilityNotPermittedNote::TraitImpl); if let AssocItemKind::Fn(box Fn { sig, .. }) = &item.kind { - self.check_trait_fn_not_const(sig.header.constness); + self.check_trait_fn_not_const(sig.header.constness, parent); } } @@ -1411,7 +1474,11 @@ impl<'a> Visitor<'a> for AstValidator<'a> { match &item.kind { AssocItemKind::Fn(box Fn { sig, generics, body, .. }) - if self.in_const_trait_or_impl + if self + .outer_trait_or_trait_impl + .as_ref() + .and_then(TraitOrTraitImpl::constness) + .is_some() || ctxt == AssocCtxt::Trait || matches!(sig.header.constness, Const::Yes(_)) => { @@ -1427,8 +1494,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { ); self.visit_fn(kind, item.span, item.id); } - _ => self - .with_in_trait_impl(false, None, |this| visit::walk_assoc_item(this, item, ctxt)), + _ => self.with_in_trait_impl(None, |this| visit::walk_assoc_item(this, item, ctxt)), } } } @@ -1544,8 +1610,7 @@ pub fn check_crate( session, features, extern_mod: None, - in_trait_impl: false, - in_const_trait_or_impl: false, + outer_trait_or_trait_impl: None, has_proc_macro_decls: false, outer_impl_trait: None, disallow_tilde_const: Some(DisallowTildeConstContext::Item), diff --git a/compiler/rustc_ast_passes/src/errors.rs b/compiler/rustc_ast_passes/src/errors.rs index a5b842b320e09..42ada39f51589 100644 --- a/compiler/rustc_ast_passes/src/errors.rs +++ b/compiler/rustc_ast_passes/src/errors.rs @@ -1,7 +1,7 @@ //! Errors emitted by ast_passes. use rustc_ast::ParamKindOrd; -use rustc_errors::AddToDiagnostic; +use rustc_errors::{AddToDiagnostic, Applicability}; use rustc_macros::{Diagnostic, Subdiagnostic}; use rustc_span::{symbol::Ident, Span, Symbol}; @@ -49,6 +49,24 @@ pub struct TraitFnConst { #[primary_span] #[label] pub span: Span, + pub in_impl: bool, + #[label(ast_passes_const_context_label)] + pub const_context_label: Option, + #[suggestion(ast_passes_remove_const_sugg, code = "")] + pub remove_const_sugg: (Span, Applicability), + pub requires_multiple_changes: bool, + #[suggestion( + ast_passes_make_impl_const_sugg, + code = "const ", + applicability = "maybe-incorrect" + )] + pub make_impl_const_sugg: Option, + #[suggestion( + ast_passes_make_trait_const_sugg, + code = "#[const_trait]\n", + applicability = "maybe-incorrect" + )] + pub make_trait_const_sugg: Option, } #[derive(Diagnostic)] @@ -540,6 +558,13 @@ pub struct OptionalTraitObject { pub span: Span, } +#[derive(Diagnostic)] +#[diag(ast_passes_const_bound_trait_object)] +pub struct ConstBoundTraitObject { + #[primary_span] + pub span: Span, +} + #[derive(Diagnostic)] #[diag(ast_passes_tilde_const_disallowed)] pub struct TildeConstDisallowed { diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 12d37cf5a7a4f..f4b424259deef 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -1096,14 +1096,22 @@ impl<'a> State<'a> { ast::StmtKind::Item(item) => self.print_item(item), ast::StmtKind::Expr(expr) => { self.space_if_not_bol(); - self.print_expr_outer_attr_style(expr, false, FixupContext::default()); + self.print_expr_outer_attr_style( + expr, + false, + FixupContext { stmt: true, ..FixupContext::default() }, + ); if classify::expr_requires_semi_to_be_stmt(expr) { self.word(";"); } } ast::StmtKind::Semi(expr) => { self.space_if_not_bol(); - self.print_expr_outer_attr_style(expr, false, FixupContext::default()); + self.print_expr_outer_attr_style( + expr, + false, + FixupContext { stmt: true, ..FixupContext::default() }, + ); self.word(";"); } ast::StmtKind::Empty => { @@ -1155,7 +1163,11 @@ impl<'a> State<'a> { ast::StmtKind::Expr(expr) if i == blk.stmts.len() - 1 => { self.maybe_print_comment(st.span.lo()); self.space_if_not_bol(); - self.print_expr_outer_attr_style(expr, false, FixupContext::default()); + self.print_expr_outer_attr_style( + expr, + false, + FixupContext { stmt: true, ..FixupContext::default() }, + ); self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi())); } _ => self.print_stmt(st), @@ -1561,7 +1573,7 @@ impl<'a> State<'a> { GenericBound::Trait(tref, modifier) => { match modifier.constness { ast::BoundConstness::Never => {} - ast::BoundConstness::Maybe(_) => { + ast::BoundConstness::Always(_) | ast::BoundConstness::Maybe(_) => { self.word_space(modifier.constness.as_str()); } } diff --git a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs index f868beec81221..ff154a009ed05 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs @@ -4,6 +4,7 @@ use ast::ForLoopKind; use itertools::{Itertools, Position}; use rustc_ast::ptr::P; use rustc_ast::token; +use rustc_ast::util::classify; use rustc_ast::util::literal::escape_byte_str_symbol; use rustc_ast::util::parser::{self, AssocOp, Fixity}; use rustc_ast::{self as ast, BlockCheckMode}; @@ -15,6 +16,61 @@ use std::fmt::Write; #[derive(Copy, Clone, Debug)] pub(crate) struct FixupContext { + /// Print expression such that it can be parsed back as a statement + /// consisting of the original expression. + /// + /// The effect of this is for binary operators in statement position to set + /// `leftmost_subexpression_in_stmt` when printing their left-hand operand. + /// + /// ```ignore (illustrative) + /// (match x {}) - 1; // match needs parens when LHS of binary operator + /// + /// match x {}; // not when its own statement + /// ``` + pub stmt: bool, + + /// This is the difference between: + /// + /// ```ignore (illustrative) + /// (match x {}) - 1; // subexpression needs parens + /// + /// let _ = match x {} - 1; // no parens + /// ``` + /// + /// There are 3 distinguishable contexts in which `print_expr` might be + /// called with the expression `$match` as its argument, where `$match` + /// represents an expression of kind `ExprKind::Match`: + /// + /// - stmt=false leftmost_subexpression_in_stmt=false + /// + /// Example: `let _ = $match - 1;` + /// + /// No parentheses required. + /// + /// - stmt=false leftmost_subexpression_in_stmt=true + /// + /// Example: `$match - 1;` + /// + /// Must parenthesize `($match)`, otherwise parsing back the output as a + /// statement would terminate the statement after the closing brace of + /// the match, parsing `-1;` as a separate statement. + /// + /// - stmt=true leftmost_subexpression_in_stmt=false + /// + /// Example: `$match;` + /// + /// No parentheses required. + pub leftmost_subexpression_in_stmt: bool, + + /// This is the difference between: + /// + /// ```ignore (illustrative) + /// if let _ = (Struct {}) {} // needs parens + /// + /// match () { + /// () if let _ = Struct {} => {} // no parens + /// } + /// ``` pub parenthesize_exterior_struct_lit: bool, } @@ -22,7 +78,11 @@ pub(crate) struct FixupContext { /// in a targetted fashion where needed. impl Default for FixupContext { fn default() -> Self { - FixupContext { parenthesize_exterior_struct_lit: false } + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: false, + parenthesize_exterior_struct_lit: false, + } } } @@ -76,7 +136,8 @@ impl<'a> State<'a> { /// Prints an expr using syntax that's acceptable in a condition position, such as the `cond` in /// `if cond { ... }`. fn print_expr_as_cond(&mut self, expr: &ast::Expr) { - let fixup = FixupContext { parenthesize_exterior_struct_lit: true }; + let fixup = + FixupContext { parenthesize_exterior_struct_lit: true, ..FixupContext::default() }; self.print_expr_cond_paren(expr, Self::cond_needs_par(expr), fixup) } @@ -99,26 +160,25 @@ impl<'a> State<'a> { &mut self, expr: &ast::Expr, needs_par: bool, - fixup: FixupContext, + mut fixup: FixupContext, ) { if needs_par { self.popen(); + + // If we are surrounding the whole cond in parentheses, such as: + // + // if (return Struct {}) {} + // + // then there is no need for parenthesizing the individual struct + // expressions within. On the other hand if the whole cond is not + // parenthesized, then print_expr must parenthesize exterior struct + // literals. + // + // if x == (Struct {}) {} + // + fixup = FixupContext::default(); } - // If we are surrounding the whole cond in parentheses, such as: - // - // if (return Struct {}) {} - // - // then there is no need for parenthesizing the individual struct - // expressions within. On the other hand if the whole cond is not - // parenthesized, then print_expr must parenthesize exterior struct - // literals. - // - // if x == (Struct {}) {} - // - let fixup = FixupContext { - parenthesize_exterior_struct_lit: fixup.parenthesize_exterior_struct_lit && !needs_par, - }; self.print_expr(expr, fixup); if needs_par { @@ -234,7 +294,32 @@ impl<'a> State<'a> { _ => parser::PREC_POSTFIX, }; - self.print_expr_maybe_paren(func, prec, fixup); + // Independent of parenthesization related to precedence, we must + // parenthesize `func` if this is a statement context in which without + // parentheses, a statement boundary would occur inside `func` or + // immediately after `func`. + // + // Suppose `func` represents `match () { _ => f }`. We must produce: + // + // (match () { _ => f })(); + // + // instead of: + // + // match () { _ => f } (); + // + // because the latter is valid syntax but with the incorrect meaning. + // It's a match-expression followed by tuple-expression, not a function + // call. + self.print_expr_maybe_paren( + func, + prec, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: fixup.stmt || fixup.leftmost_subexpression_in_stmt, + ..fixup + }, + ); + self.print_call_post(args) } @@ -245,7 +330,17 @@ impl<'a> State<'a> { base_args: &[P], fixup: FixupContext, ) { + // Unlike in `print_expr_call`, no change to fixup here because + // statement boundaries never occur in front of a `.` (or `?`) token. + // + // match () { _ => f }.method(); + // + // Parenthesizing only for precedence and not with regard to statement + // boundaries, `$receiver.method()` can be parsed back as a statement + // containing an expression if and only if `$receiver` can be parsed as + // a statement containing an expression. self.print_expr_maybe_paren(receiver, parser::PREC_POSTFIX, fixup); + self.word("."); self.print_ident(segment.ident); if let Some(args) = &segment.args { @@ -289,22 +384,36 @@ impl<'a> State<'a> { (&ast::ExprKind::Let { .. }, _) if !parser::needs_par_as_let_scrutinee(prec) => { parser::PREC_FORCE_PAREN } - // For a binary expression like `(match () { _ => a }) OP b`, the parens are required - // otherwise the parser would interpret `match () { _ => a }` as a statement, - // with the remaining `OP b` not making sense. So we force parens. - (&ast::ExprKind::Match(..), _) => parser::PREC_FORCE_PAREN, _ => left_prec, }; - self.print_expr_maybe_paren(lhs, left_prec, fixup); + self.print_expr_maybe_paren( + lhs, + left_prec, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: fixup.stmt || fixup.leftmost_subexpression_in_stmt, + ..fixup + }, + ); + self.space(); self.word_space(op.node.as_str()); - self.print_expr_maybe_paren(rhs, right_prec, fixup) + + self.print_expr_maybe_paren( + rhs, + right_prec, + FixupContext { stmt: false, leftmost_subexpression_in_stmt: false, ..fixup }, + ); } fn print_expr_unary(&mut self, op: ast::UnOp, expr: &ast::Expr, fixup: FixupContext) { self.word(op.as_str()); - self.print_expr_maybe_paren(expr, parser::PREC_PREFIX, fixup) + self.print_expr_maybe_paren( + expr, + parser::PREC_PREFIX, + FixupContext { stmt: false, leftmost_subexpression_in_stmt: false, ..fixup }, + ); } fn print_expr_addr_of( @@ -322,7 +431,11 @@ impl<'a> State<'a> { self.print_mutability(mutability, true); } } - self.print_expr_maybe_paren(expr, parser::PREC_PREFIX, fixup) + self.print_expr_maybe_paren( + expr, + parser::PREC_PREFIX, + FixupContext { stmt: false, leftmost_subexpression_in_stmt: false, ..fixup }, + ); } pub(super) fn print_expr(&mut self, expr: &ast::Expr, fixup: FixupContext) { @@ -333,7 +446,7 @@ impl<'a> State<'a> { &mut self, expr: &ast::Expr, is_inline: bool, - fixup: FixupContext, + mut fixup: FixupContext, ) { self.maybe_print_comment(expr.span.lo()); @@ -345,7 +458,27 @@ impl<'a> State<'a> { } self.ibox(INDENT_UNIT); + + // The Match subexpression in `match x {} - 1` must be parenthesized if + // it is the leftmost subexpression in a statement: + // + // (match x {}) - 1; + // + // But not otherwise: + // + // let _ = match x {} - 1; + // + // Same applies to a small set of other expression kinds which eagerly + // terminate a statement which opens with them. + let needs_par = + fixup.leftmost_subexpression_in_stmt && !classify::expr_requires_semi_to_be_stmt(expr); + if needs_par { + self.popen(); + fixup = FixupContext::default(); + } + self.ann.pre(self, AnnNode::Expr(expr)); + match &expr.kind { ast::ExprKind::Array(exprs) => { self.print_expr_vec(exprs); @@ -386,7 +519,16 @@ impl<'a> State<'a> { } ast::ExprKind::Cast(expr, ty) => { let prec = AssocOp::As.precedence() as i8; - self.print_expr_maybe_paren(expr, prec, fixup); + self.print_expr_maybe_paren( + expr, + prec, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: fixup.stmt + || fixup.leftmost_subexpression_in_stmt, + ..fixup + }, + ); self.space(); self.word_space("as"); self.print_type(ty); @@ -508,31 +650,71 @@ impl<'a> State<'a> { self.print_block_with_attrs(blk, attrs); } ast::ExprKind::Await(expr, _) => { + // Same fixups as ExprKind::MethodCall. self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX, fixup); self.word(".await"); } ast::ExprKind::Assign(lhs, rhs, _) => { + // Same fixups as ExprKind::Binary. let prec = AssocOp::Assign.precedence() as i8; - self.print_expr_maybe_paren(lhs, prec + 1, fixup); + self.print_expr_maybe_paren( + lhs, + prec + 1, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: fixup.stmt + || fixup.leftmost_subexpression_in_stmt, + ..fixup + }, + ); self.space(); self.word_space("="); - self.print_expr_maybe_paren(rhs, prec, fixup); + self.print_expr_maybe_paren( + rhs, + prec, + FixupContext { stmt: false, leftmost_subexpression_in_stmt: false, ..fixup }, + ); } ast::ExprKind::AssignOp(op, lhs, rhs) => { + // Same fixups as ExprKind::Binary. let prec = AssocOp::Assign.precedence() as i8; - self.print_expr_maybe_paren(lhs, prec + 1, fixup); + self.print_expr_maybe_paren( + lhs, + prec + 1, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: fixup.stmt + || fixup.leftmost_subexpression_in_stmt, + ..fixup + }, + ); self.space(); self.word(op.node.as_str()); self.word_space("="); - self.print_expr_maybe_paren(rhs, prec, fixup); + self.print_expr_maybe_paren( + rhs, + prec, + FixupContext { stmt: false, leftmost_subexpression_in_stmt: false, ..fixup }, + ); } ast::ExprKind::Field(expr, ident) => { + // Same fixups as ExprKind::MethodCall. self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX, fixup); self.word("."); self.print_ident(*ident); } ast::ExprKind::Index(expr, index, _) => { - self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX, fixup); + // Same fixups as ExprKind::Call. + self.print_expr_maybe_paren( + expr, + parser::PREC_POSTFIX, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: fixup.stmt + || fixup.leftmost_subexpression_in_stmt, + ..fixup + }, + ); self.word("["); self.print_expr(index, FixupContext::default()); self.word("]"); @@ -544,14 +726,31 @@ impl<'a> State<'a> { // a "normal" binop gets parenthesized. (`LOr` is the lowest-precedence binop.) let fake_prec = AssocOp::LOr.precedence() as i8; if let Some(e) = start { - self.print_expr_maybe_paren(e, fake_prec, fixup); + self.print_expr_maybe_paren( + e, + fake_prec, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: fixup.stmt + || fixup.leftmost_subexpression_in_stmt, + ..fixup + }, + ); } match limits { ast::RangeLimits::HalfOpen => self.word(".."), ast::RangeLimits::Closed => self.word("..="), } if let Some(e) = end { - self.print_expr_maybe_paren(e, fake_prec, fixup); + self.print_expr_maybe_paren( + e, + fake_prec, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: false, + ..fixup + }, + ); } } ast::ExprKind::Underscore => self.word("_"), @@ -565,7 +764,15 @@ impl<'a> State<'a> { } if let Some(expr) = opt_expr { self.space(); - self.print_expr_maybe_paren(expr, parser::PREC_JUMP, fixup); + self.print_expr_maybe_paren( + expr, + parser::PREC_JUMP, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: false, + ..fixup + }, + ); } } ast::ExprKind::Continue(opt_label) => { @@ -579,7 +786,15 @@ impl<'a> State<'a> { self.word("return"); if let Some(expr) = result { self.word(" "); - self.print_expr_maybe_paren(expr, parser::PREC_JUMP, fixup); + self.print_expr_maybe_paren( + expr, + parser::PREC_JUMP, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: false, + ..fixup + }, + ); } } ast::ExprKind::Yeet(result) => { @@ -588,13 +803,25 @@ impl<'a> State<'a> { self.word("yeet"); if let Some(expr) = result { self.word(" "); - self.print_expr_maybe_paren(expr, parser::PREC_JUMP, fixup); + self.print_expr_maybe_paren( + expr, + parser::PREC_JUMP, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: false, + ..fixup + }, + ); } } ast::ExprKind::Become(result) => { self.word("become"); self.word(" "); - self.print_expr_maybe_paren(result, parser::PREC_JUMP, fixup); + self.print_expr_maybe_paren( + result, + parser::PREC_JUMP, + FixupContext { stmt: false, leftmost_subexpression_in_stmt: false, ..fixup }, + ); } ast::ExprKind::InlineAsm(a) => { // FIXME: This should have its own syntax, distinct from a macro invocation. @@ -644,10 +871,19 @@ impl<'a> State<'a> { if let Some(expr) = e { self.space(); - self.print_expr_maybe_paren(expr, parser::PREC_JUMP, fixup); + self.print_expr_maybe_paren( + expr, + parser::PREC_JUMP, + FixupContext { + stmt: false, + leftmost_subexpression_in_stmt: false, + ..fixup + }, + ); } } ast::ExprKind::Try(e) => { + // Same fixups as ExprKind::MethodCall. self.print_expr_maybe_paren(e, parser::PREC_POSTFIX, fixup); self.word("?") } @@ -663,7 +899,13 @@ impl<'a> State<'a> { self.pclose() } } + self.ann.post(self, AnnNode::Expr(expr)); + + if needs_par { + self.pclose(); + } + self.end(); } @@ -704,7 +946,7 @@ impl<'a> State<'a> { } _ => { self.end(); // Close the ibox for the pattern. - self.print_expr(body, FixupContext::default()); + self.print_expr(body, FixupContext { stmt: true, ..FixupContext::default() }); self.word(","); } } diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs index da6fffc167c72..d824260f47c18 100644 --- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs @@ -482,7 +482,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { span: Span, use_spans: UseSpans<'tcx>, ) -> DiagnosticBuilder<'cx> { - // We need all statements in the body where the binding was assigned to to later find all + // We need all statements in the body where the binding was assigned to later find all // the branching code paths where the binding *wasn't* assigned to. let inits = &self.move_data.init_path_map[mpi]; let move_path = &self.move_data.move_paths[mpi]; @@ -848,8 +848,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { move_spans.var_subdiag(None, &mut err, None, |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => MoveUseInCoroutine { var_span }, - None => MoveUseInClosure { var_span }, + hir::ClosureKind::Coroutine(_) => MoveUseInCoroutine { var_span }, + hir::ClosureKind::Closure => MoveUseInClosure { var_span }, } }); @@ -893,10 +893,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { let place = &borrow.borrowed_place; let desc_place = self.describe_any_place(place.as_ref()); match kind { - Some(_) => { + hir::ClosureKind::Coroutine(_) => { BorrowUsePlaceCoroutine { place: desc_place, var_span, is_single_var: true } } - None => BorrowUsePlaceClosure { place: desc_place, var_span, is_single_var: true }, + hir::ClosureKind::Closure => { + BorrowUsePlaceClosure { place: desc_place, var_span, is_single_var: true } + } } }); @@ -1040,12 +1042,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => BorrowUsePlaceCoroutine { + hir::ClosureKind::Coroutine(_) => BorrowUsePlaceCoroutine { place: desc_place, var_span, is_single_var: true, }, - None => BorrowUsePlaceClosure { + hir::ClosureKind::Closure => BorrowUsePlaceClosure { place: desc_place, var_span, is_single_var: true, @@ -1124,12 +1126,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { borrow_spans.var_subdiag(None, &mut err, Some(gen_borrow_kind), |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => BorrowUsePlaceCoroutine { + hir::ClosureKind::Coroutine(_) => BorrowUsePlaceCoroutine { place: desc_place, var_span, is_single_var: false, }, - None => { + hir::ClosureKind::Closure => { BorrowUsePlaceClosure { place: desc_place, var_span, is_single_var: false } } } @@ -1144,10 +1146,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { let borrow_place = &issued_borrow.borrowed_place; let borrow_place_desc = self.describe_any_place(borrow_place.as_ref()); match kind { - Some(_) => { + hir::ClosureKind::Coroutine(_) => { FirstBorrowUsePlaceCoroutine { place: borrow_place_desc, var_span } } - None => FirstBorrowUsePlaceClosure { place: borrow_place_desc, var_span }, + hir::ClosureKind::Closure => { + FirstBorrowUsePlaceClosure { place: borrow_place_desc, var_span } + } } }, ); @@ -1159,8 +1163,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => SecondBorrowUsePlaceCoroutine { place: desc_place, var_span }, - None => SecondBorrowUsePlaceClosure { place: desc_place, var_span }, + hir::ClosureKind::Coroutine(_) => { + SecondBorrowUsePlaceCoroutine { place: desc_place, var_span } + } + hir::ClosureKind::Closure => { + SecondBorrowUsePlaceClosure { place: desc_place, var_span } + } } }, ); @@ -1651,7 +1659,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { fn visit_expr(&mut self, e: &'hir hir::Expr<'hir>) { if e.span.contains(self.capture_span) { if let hir::ExprKind::Closure(&hir::Closure { - movability: None, + kind: hir::ClosureKind::Closure, body, fn_arg_span, fn_decl: hir::FnDecl { inputs, .. }, @@ -1686,7 +1694,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { && let Some(init) = local.init { if let hir::Expr { - kind: hir::ExprKind::Closure(&hir::Closure { movability: None, .. }), + kind: + hir::ExprKind::Closure(&hir::Closure { + kind: hir::ClosureKind::Closure, + .. + }), .. } = init && init.span.contains(self.capture_span) @@ -2537,7 +2549,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { } } } - CoroutineKind::Coroutine => "coroutine", + CoroutineKind::Coroutine(_) => "coroutine", }, None => "closure", }; @@ -2838,8 +2850,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { loan_spans.var_subdiag(None, &mut err, Some(loan.kind), |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => BorrowUseInCoroutine { var_span }, - None => BorrowUseInClosure { var_span }, + hir::ClosureKind::Coroutine(_) => BorrowUseInCoroutine { var_span }, + hir::ClosureKind::Closure => BorrowUseInClosure { var_span }, } }); @@ -2854,8 +2866,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { loan_spans.var_subdiag(None, &mut err, Some(loan.kind), |kind, var_span| { use crate::session_diagnostics::CaptureVarCause::*; match kind { - Some(_) => BorrowUseInCoroutine { var_span }, - None => BorrowUseInClosure { var_span }, + hir::ClosureKind::Coroutine(_) => BorrowUseInCoroutine { var_span }, + hir::ClosureKind::Closure => BorrowUseInClosure { var_span }, } }); @@ -3055,7 +3067,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { ) -> Option> { // Define a fallback for when we can't match a closure. let fallback = || { - let is_closure = self.infcx.tcx.is_closure(self.mir_def_id().to_def_id()); + let is_closure = self.infcx.tcx.is_closure_or_coroutine(self.mir_def_id().to_def_id()); if is_closure { None } else { @@ -3265,7 +3277,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { sig: ty::PolyFnSig<'tcx>, ) -> Option> { debug!("annotate_fn_sig: did={:?} sig={:?}", did, sig); - let is_closure = self.infcx.tcx.is_closure(did.to_def_id()); + let is_closure = self.infcx.tcx.is_closure_or_coroutine(did.to_def_id()); let fn_hir_id = self.infcx.tcx.local_def_id_to_hir_id(did); let fn_decl = self.infcx.tcx.hir().fn_decl_by_hir_id(fn_hir_id)?; diff --git a/compiler/rustc_borrowck/src/diagnostics/mod.rs b/compiler/rustc_borrowck/src/diagnostics/mod.rs index 65dee9d0e0098..b31325485db99 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mod.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mod.rs @@ -370,7 +370,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { ty::Array(ty, _) | ty::Slice(ty) => { self.describe_field_from_ty(ty, field, variant_index, including_tuple_field) } - ty::Closure(def_id, _) | ty::Coroutine(def_id, _, _) => { + ty::Closure(def_id, _) | ty::Coroutine(def_id, _) => { // We won't be borrowck'ing here if the closure came from another crate, // so it's safe to call `expect_local`. // @@ -505,7 +505,7 @@ pub(super) enum UseSpans<'tcx> { /// The access is caused by capturing a variable for a closure. ClosureUse { /// This is true if the captured variable was from a coroutine. - coroutine_kind: Option, + closure_kind: hir::ClosureKind, /// The span of the args of the closure, including the `move` keyword if /// it's present. args_span: Span, @@ -572,9 +572,13 @@ impl UseSpans<'_> { } } + // FIXME(coroutines): Make this just return the `ClosureKind` directly? pub(super) fn coroutine_kind(self) -> Option { match self { - UseSpans::ClosureUse { coroutine_kind, .. } => coroutine_kind, + UseSpans::ClosureUse { + closure_kind: hir::ClosureKind::Coroutine(coroutine_kind), + .. + } => Some(coroutine_kind), _ => None, } } @@ -599,9 +603,9 @@ impl UseSpans<'_> { ) { use crate::InitializationRequiringAction::*; use CaptureVarPathUseCause::*; - if let UseSpans::ClosureUse { coroutine_kind, path_span, .. } = self { - match coroutine_kind { - Some(_) => { + if let UseSpans::ClosureUse { closure_kind, path_span, .. } = self { + match closure_kind { + hir::ClosureKind::Coroutine(_) => { err.subdiagnostic(match action { Borrow => BorrowInCoroutine { path_span }, MatchOn | Use => UseInCoroutine { path_span }, @@ -609,7 +613,7 @@ impl UseSpans<'_> { PartialAssignment => AssignPartInCoroutine { path_span }, }); } - None => { + hir::ClosureKind::Closure => { err.subdiagnostic(match action { Borrow => BorrowInClosure { path_span }, MatchOn | Use => UseInClosure { path_span }, @@ -627,9 +631,9 @@ impl UseSpans<'_> { dcx: Option<&rustc_errors::DiagCtxt>, err: &mut Diagnostic, kind: Option, - f: impl FnOnce(Option, Span) -> CaptureVarCause, + f: impl FnOnce(hir::ClosureKind, Span) -> CaptureVarCause, ) { - if let UseSpans::ClosureUse { coroutine_kind, capture_kind_span, path_span, .. } = self { + if let UseSpans::ClosureUse { closure_kind, capture_kind_span, path_span, .. } = self { if capture_kind_span != path_span { err.subdiagnostic(match kind { Some(kd) => match kd { @@ -645,7 +649,7 @@ impl UseSpans<'_> { None => CaptureVarKind::Move { kind_span: capture_kind_span }, }); }; - let diag = f(coroutine_kind, path_span); + let diag = f(closure_kind, path_span); match dcx { Some(hd) => err.eager_subdiagnostic(hd, diag), None => err.subdiagnostic(diag), @@ -656,7 +660,9 @@ impl UseSpans<'_> { /// Returns `false` if this place is not used in a closure. pub(super) fn for_closure(&self) -> bool { match *self { - UseSpans::ClosureUse { coroutine_kind, .. } => coroutine_kind.is_none(), + UseSpans::ClosureUse { closure_kind, .. } => { + matches!(closure_kind, hir::ClosureKind::Closure) + } _ => false, } } @@ -664,7 +670,10 @@ impl UseSpans<'_> { /// Returns `false` if this place is not used in a coroutine. pub(super) fn for_coroutine(&self) -> bool { match *self { - UseSpans::ClosureUse { coroutine_kind, .. } => coroutine_kind.is_some(), + // FIXME(coroutines): Do we want this to apply to synthetic coroutines? + UseSpans::ClosureUse { closure_kind, .. } => { + matches!(closure_kind, hir::ClosureKind::Coroutine(..)) + } _ => false, } } @@ -783,15 +792,14 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { debug!("move_spans: moved_place={:?} location={:?} stmt={:?}", moved_place, location, stmt); if let StatementKind::Assign(box (_, Rvalue::Aggregate(kind, places))) = &stmt.kind - && let AggregateKind::Closure(def_id, _) | AggregateKind::Coroutine(def_id, _, _) = - **kind + && let AggregateKind::Closure(def_id, _) | AggregateKind::Coroutine(def_id, _) = **kind { debug!("move_spans: def_id={:?} places={:?}", def_id, places); let def_id = def_id.expect_local(); - if let Some((args_span, coroutine_kind, capture_kind_span, path_span)) = + if let Some((args_span, closure_kind, capture_kind_span, path_span)) = self.closure_span(def_id, moved_place, places) { - return ClosureUse { coroutine_kind, args_span, capture_kind_span, path_span }; + return ClosureUse { closure_kind, args_span, capture_kind_span, path_span }; } } @@ -803,11 +811,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { | FakeReadCause::ForLet(Some(closure_def_id)) => { debug!("move_spans: def_id={:?} place={:?}", closure_def_id, place); let places = &[Operand::Move(place)]; - if let Some((args_span, coroutine_kind, capture_kind_span, path_span)) = + if let Some((args_span, closure_kind, capture_kind_span, path_span)) = self.closure_span(closure_def_id, moved_place, IndexSlice::from_raw(places)) { return ClosureUse { - coroutine_kind, + closure_kind, args_span, capture_kind_span, path_span, @@ -919,7 +927,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { if let StatementKind::Assign(box (_, Rvalue::Aggregate(kind, places))) = &stmt.kind { let (&def_id, is_coroutine) = match kind { box AggregateKind::Closure(def_id, _) => (def_id, false), - box AggregateKind::Coroutine(def_id, _, _) => (def_id, true), + box AggregateKind::Coroutine(def_id, _) => (def_id, true), _ => continue, }; let def_id = def_id.expect_local(); @@ -928,10 +936,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { "borrow_spans: def_id={:?} is_coroutine={:?} places={:?}", def_id, is_coroutine, places ); - if let Some((args_span, coroutine_kind, capture_kind_span, path_span)) = + if let Some((args_span, closure_kind, capture_kind_span, path_span)) = self.closure_span(def_id, Place::from(target).as_ref(), places) { - return ClosureUse { coroutine_kind, args_span, capture_kind_span, path_span }; + return ClosureUse { closure_kind, args_span, capture_kind_span, path_span }; } else { return OtherUse(use_span); } @@ -953,7 +961,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { def_id: LocalDefId, target_place: PlaceRef<'tcx>, places: &IndexSlice>, - ) -> Option<(Span, Option, Span, Span)> { + ) -> Option<(Span, hir::ClosureKind, Span, Span)> { debug!( "closure_span: def_id={:?} target_place={:?} places={:?}", def_id, target_place, places @@ -961,7 +969,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { let hir_id = self.infcx.tcx.local_def_id_to_hir_id(def_id); let expr = &self.infcx.tcx.hir().expect_expr(hir_id).kind; debug!("closure_span: hir_id={:?} expr={:?}", hir_id, expr); - if let hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, .. }) = expr { + if let hir::ExprKind::Closure(&hir::Closure { kind, fn_decl_span, .. }) = expr { for (captured_place, place) in self.infcx.tcx.closure_captures(def_id).iter().zip(places) { @@ -970,12 +978,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { if target_place == place.as_ref() => { debug!("closure_span: found captured local {:?}", place); - let body = self.infcx.tcx.hir().body(body); - let coroutine_kind = body.coroutine_kind(); - return Some(( fn_decl_span, - coroutine_kind, + kind, captured_place.get_capture_kind_span(self.infcx.tcx), captured_place.get_path_span(self.infcx.tcx), )); @@ -1242,8 +1247,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // another message for the same span if !is_loop_message { move_spans.var_subdiag(None, err, None, |kind, var_span| match kind { - Some(_) => CaptureVarCause::PartialMoveUseInCoroutine { var_span, is_partial }, - None => CaptureVarCause::PartialMoveUseInClosure { var_span, is_partial }, + hir::ClosureKind::Coroutine(_) => { + CaptureVarCause::PartialMoveUseInCoroutine { var_span, is_partial } + } + hir::ClosureKind::Closure => { + CaptureVarCause::PartialMoveUseInClosure { var_span, is_partial } + } }) } } diff --git a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs index 506933c470e25..3b3d440df97b5 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs @@ -1030,8 +1030,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { let hir = self.infcx.tcx.hir(); if let InstanceDef::Item(def_id) = source.instance && let Some(Node::Expr(hir::Expr { hir_id, kind, .. })) = hir.get_if_local(def_id) - && let ExprKind::Closure(closure) = kind - && closure.movability == None + && let ExprKind::Closure(hir::Closure { kind: hir::ClosureKind::Closure, .. }) = kind && let Some(Node::Expr(expr)) = hir.find_parent(*hir_id) { let mut cur_expr = expr; diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs index 8387eaed61c79..b3450b09cdf9e 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs @@ -1041,13 +1041,15 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { } hir::ExprKind::Closure(hir::Closure { capture_clause: hir::CaptureBy::Ref, - body, + kind, .. }) => { - let body = map.body(*body); if !matches!( - body.coroutine_kind, - Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)) + kind, + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Async, + _ + ),) ) { closure_span = Some(expr.span.shrink_to_lo()); } diff --git a/compiler/rustc_borrowck/src/diagnostics/region_name.rs b/compiler/rustc_borrowck/src/diagnostics/region_name.rs index 73dc7a9600f1a..4cb49362863fc 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_name.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_name.rs @@ -674,7 +674,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> { let (return_span, mir_description, hir_ty) = match tcx.hir_node(mir_hir_id) { hir::Node::Expr(hir::Expr { - kind: hir::ExprKind::Closure(&hir::Closure { fn_decl, body, fn_decl_span, .. }), + kind: hir::ExprKind::Closure(&hir::Closure { fn_decl, kind, fn_decl_span, .. }), .. }) => { let (mut span, mut hir_ty) = match fn_decl.output { @@ -683,62 +683,86 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> { } hir::FnRetTy::Return(hir_ty) => (fn_decl.output.span(), Some(hir_ty)), }; - let mir_description = match hir.body(body).coroutine_kind { - Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, src)) => { - match src { - hir::CoroutineSource::Block => " of async block", - hir::CoroutineSource::Closure => " of async closure", - hir::CoroutineSource::Fn => { - let parent_item = - tcx.hir_node_by_def_id(hir.get_parent_item(mir_hir_id).def_id); - let output = &parent_item - .fn_decl() - .expect("coroutine lowered from async fn should be in fn") - .output; - span = output.span(); - if let hir::FnRetTy::Return(ret) = output { - hir_ty = Some(self.get_future_inner_return_ty(*ret)); - } - " of async function" - } + let mir_description = match kind { + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Async, + hir::CoroutineSource::Block, + )) => " of async block", + + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Async, + hir::CoroutineSource::Closure, + )) => " of async closure", + + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Async, + hir::CoroutineSource::Fn, + )) => { + let parent_item = + tcx.hir_node_by_def_id(hir.get_parent_item(mir_hir_id).def_id); + let output = &parent_item + .fn_decl() + .expect("coroutine lowered from async fn should be in fn") + .output; + span = output.span(); + if let hir::FnRetTy::Return(ret) = output { + hir_ty = Some(self.get_future_inner_return_ty(*ret)); } + " of async function" } - Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, src)) => { - match src { - hir::CoroutineSource::Block => " of gen block", - hir::CoroutineSource::Closure => " of gen closure", - hir::CoroutineSource::Fn => { - let parent_item = - tcx.hir_node_by_def_id(hir.get_parent_item(mir_hir_id).def_id); - let output = &parent_item - .fn_decl() - .expect("coroutine lowered from gen fn should be in fn") - .output; - span = output.span(); - " of gen function" - } - } + + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Gen, + hir::CoroutineSource::Block, + )) => " of gen block", + + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Gen, + hir::CoroutineSource::Closure, + )) => " of gen closure", + + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Gen, + hir::CoroutineSource::Fn, + )) => { + let parent_item = + tcx.hir_node_by_def_id(hir.get_parent_item(mir_hir_id).def_id); + let output = &parent_item + .fn_decl() + .expect("coroutine lowered from gen fn should be in fn") + .output; + span = output.span(); + " of gen function" } - Some(hir::CoroutineKind::Desugared( + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( hir::CoroutineDesugaring::AsyncGen, - src, - )) => match src { - hir::CoroutineSource::Block => " of async gen block", - hir::CoroutineSource::Closure => " of async gen closure", - hir::CoroutineSource::Fn => { - let parent_item = - tcx.hir_node_by_def_id(hir.get_parent_item(mir_hir_id).def_id); - let output = &parent_item - .fn_decl() - .expect("coroutine lowered from async gen fn should be in fn") - .output; - span = output.span(); - " of async gen function" - } - }, - Some(hir::CoroutineKind::Coroutine) => " of coroutine", - None => " of closure", + hir::CoroutineSource::Block, + )) => " of async gen block", + + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::AsyncGen, + hir::CoroutineSource::Closure, + )) => " of async gen closure", + + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::AsyncGen, + hir::CoroutineSource::Fn, + )) => { + let parent_item = + tcx.hir_node_by_def_id(hir.get_parent_item(mir_hir_id).def_id); + let output = &parent_item + .fn_decl() + .expect("coroutine lowered from async gen fn should be in fn") + .output; + span = output.span(); + " of async gen function" + } + + hir::ClosureKind::Coroutine(hir::CoroutineKind::Coroutine(_)) => { + " of coroutine" + } + hir::ClosureKind::Closure => " of closure", }; (span, mir_description, hir_ty) } diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index af21847cffd1e..495b255583c2a 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -274,11 +274,12 @@ fn do_mir_borrowck<'tcx>( // The first argument is the coroutine type passed by value if let Some(local) = body.local_decls.raw.get(1) // Get the interior types and args which typeck computed - && let ty::Coroutine(_, _, hir::Movability::Static) = local.ty.kind() + && let ty::Coroutine(def_id, _) = *local.ty.kind() + && tcx.coroutine_movability(def_id) == hir::Movability::Movable { - false - } else { true + } else { + false }; for (idx, move_data) in promoted_move_data { @@ -1306,7 +1307,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // moved into the closure and subsequently used by the closure, // in order to populate our used_mut set. match **aggregate_kind { - AggregateKind::Closure(def_id, _) | AggregateKind::Coroutine(def_id, _, _) => { + AggregateKind::Closure(def_id, _) | AggregateKind::Coroutine(def_id, _) => { let def_id = def_id.expect_local(); let BorrowCheckResult { used_mut_upvars, .. } = self.infcx.tcx.mir_borrowck(def_id); @@ -1612,7 +1613,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Coroutine(_, _, _) + | ty::Coroutine(_, _) | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) @@ -1636,7 +1637,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { return; } } - ty::Closure(_, _) | ty::Coroutine(_, _, _) | ty::Tuple(_) => (), + ty::Closure(_, _) | ty::Coroutine(_, _) | ty::Tuple(_) => (), ty::Bool | ty::Char | ty::Int(_) diff --git a/compiler/rustc_borrowck/src/type_check/input_output.rs b/compiler/rustc_borrowck/src/type_check/input_output.rs index f717d91c35c60..5bd7cc9514ca2 100644 --- a/compiler/rustc_borrowck/src/type_check/input_output.rs +++ b/compiler/rustc_borrowck/src/type_check/input_output.rs @@ -22,7 +22,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { #[instrument(skip(self, body), level = "debug")] pub(super) fn check_signature_annotation(&mut self, body: &Body<'tcx>) { let mir_def_id = body.source.def_id().expect_local(); - if !self.tcx().is_closure(mir_def_id.to_def_id()) { + if !self.tcx().is_closure_or_coroutine(mir_def_id.to_def_id()) { return; } let user_provided_poly_sig = self.tcx().closure_user_provided_sig(mir_def_id); diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index 8a862953fba21..80575e30a8d23 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -762,7 +762,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { let (variant, args) = match base_ty { PlaceTy { ty, variant_index: Some(variant_index) } => match *ty.kind() { ty::Adt(adt_def, args) => (adt_def.variant(variant_index), args), - ty::Coroutine(def_id, args, _) => { + ty::Coroutine(def_id, args) => { let mut variants = args.as_coroutine().state_tys(def_id, tcx); let Some(mut variant) = variants.nth(variant_index.into()) else { bug!( @@ -790,7 +790,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { }), }; } - ty::Coroutine(_, args, _) => { + ty::Coroutine(_, args) => { // Only prefix fields (upvars and current state) are // accessible without a variant index. return match args.as_coroutine().prefix_tys().get(field.index()) { @@ -1784,7 +1784,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { }), } } - AggregateKind::Coroutine(_, args, _) => { + AggregateKind::Coroutine(_, args) => { // It doesn't make sense to look at a field beyond the prefix; // these require a variant index, and are not initialized in // aggregate rvalues. @@ -2392,7 +2392,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { AggregateKind::Array(_) => None, AggregateKind::Tuple => None, AggregateKind::Closure(_, _) => None, - AggregateKind::Coroutine(_, _, _) => None, + AggregateKind::Coroutine(_, _) => None, }, } } @@ -2620,7 +2620,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { // desugaring. A closure gets desugared to a struct, and // these extra requirements are basically like where // clauses on the struct. - AggregateKind::Closure(def_id, args) | AggregateKind::Coroutine(def_id, args, _) => { + AggregateKind::Closure(def_id, args) | AggregateKind::Coroutine(def_id, args) => { (def_id, self.prove_closure_bounds(tcx, def_id.expect_local(), args, location)) } diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs index 2b83c7871396a..a02304a2f8b30 100644 --- a/compiler/rustc_borrowck/src/universal_regions.rs +++ b/compiler/rustc_borrowck/src/universal_regions.rs @@ -14,7 +14,6 @@ use rustc_data_structures::fx::FxHashMap; use rustc_errors::Diagnostic; -use rustc_hir as hir; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::lang_items::LangItem; use rustc_hir::BodyOwnerKind; @@ -94,7 +93,7 @@ pub enum DefiningTy<'tcx> { /// The MIR is a coroutine. The signature is that coroutines take /// no parameters and return the result of /// `ClosureArgs::coroutine_return_ty`. - Coroutine(DefId, GenericArgsRef<'tcx>, hir::Movability), + Coroutine(DefId, GenericArgsRef<'tcx>), /// The MIR is a fn item with the given `DefId` and args. The signature /// of the function can be bound then with the `fn_sig` query. @@ -118,7 +117,7 @@ impl<'tcx> DefiningTy<'tcx> { pub fn upvar_tys(self) -> &'tcx ty::List> { match self { DefiningTy::Closure(_, args) => args.as_closure().upvar_tys(), - DefiningTy::Coroutine(_, args, _) => args.as_coroutine().upvar_tys(), + DefiningTy::Coroutine(_, args) => args.as_coroutine().upvar_tys(), DefiningTy::FnDef(..) | DefiningTy::Const(..) | DefiningTy::InlineConst(..) => { ty::List::empty() } @@ -354,7 +353,7 @@ impl<'tcx> UniversalRegions<'tcx> { err.note(format!("late-bound region is {:?}", self.to_region_vid(r))); }); } - DefiningTy::Coroutine(def_id, args, _) => { + DefiningTy::Coroutine(def_id, args) => { let v = with_no_trimmed_paths!( args[tcx.generics_of(def_id).parent_count..] .iter() @@ -527,7 +526,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { debug!("build: local regions = {}..{}", first_local_index, num_universals); let yield_ty = match defining_ty { - DefiningTy::Coroutine(_, args, _) => Some(args.as_coroutine().yield_ty()), + DefiningTy::Coroutine(_, args) => Some(args.as_coroutine().yield_ty()), _ => None, }; @@ -562,9 +561,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { match *defining_ty.kind() { ty::Closure(def_id, args) => DefiningTy::Closure(def_id, args), - ty::Coroutine(def_id, args, movability) => { - DefiningTy::Coroutine(def_id, args, movability) - } + ty::Coroutine(def_id, args) => DefiningTy::Coroutine(def_id, args), ty::FnDef(def_id, args) => DefiningTy::FnDef(def_id, args), _ => span_bug!( tcx.def_span(self.mir_def), @@ -620,7 +617,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { let identity_args = GenericArgs::identity_for_item(tcx, typeck_root_def_id); let fr_args = match defining_ty { DefiningTy::Closure(_, args) - | DefiningTy::Coroutine(_, args, _) + | DefiningTy::Coroutine(_, args) | DefiningTy::InlineConst(_, args) => { // In the case of closures, we rely on the fact that // the first N elements in the ClosureArgs are @@ -685,11 +682,11 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { ) } - DefiningTy::Coroutine(def_id, args, movability) => { + DefiningTy::Coroutine(def_id, args) => { assert_eq!(self.mir_def.to_def_id(), def_id); let resume_ty = args.as_coroutine().resume_ty(); let output = args.as_coroutine().return_ty(); - let coroutine_ty = Ty::new_coroutine(tcx, def_id, args, movability); + let coroutine_ty = Ty::new_coroutine(tcx, def_id, args); let inputs_and_output = self.infcx.tcx.mk_type_list(&[coroutine_ty, resume_ty, output]); ty::Binder::dummy(inputs_and_output) diff --git a/compiler/rustc_codegen_cranelift/.github/workflows/main.yml b/compiler/rustc_codegen_cranelift/.github/workflows/main.yml index 05dc28d074530..9bbb18fc37fca 100644 --- a/compiler/rustc_codegen_cranelift/.github/workflows/main.yml +++ b/compiler/rustc_codegen_cranelift/.github/workflows/main.yml @@ -175,14 +175,10 @@ jobs: path: build/cg_clif key: ${{ runner.os }}-x86_64-unknown-linux-gnu-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }} - - name: Cache cargo bin dir - uses: actions/cache@v3 - with: - path: ~/.cargo/bin - key: ${{ runner.os }}-${{ matrix.env.TARGET_TRIPLE }}-cargo-bin-dir-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }} - - name: Install hyperfine - run: cargo install hyperfine || true + run: | + sudo apt update + sudo apt install -y hyperfine - name: Prepare dependencies run: ./y.sh prepare @@ -257,14 +253,14 @@ jobs: - name: Upload prebuilt cg_clif if: matrix.os == 'windows-latest' || matrix.env.TARGET_TRIPLE != 'x86_64-pc-windows-gnu' - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: cg_clif-${{ matrix.env.TARGET_TRIPLE }} path: cg_clif.tar.xz - name: Upload prebuilt cg_clif (cross compile) if: matrix.os != 'windows-latest' && matrix.env.TARGET_TRIPLE == 'x86_64-pc-windows-gnu' - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: cg_clif-${{ runner.os }}-cross-x86_64-mingw path: cg_clif.tar.xz @@ -283,7 +279,7 @@ jobs: - uses: actions/checkout@v3 - name: Download all built artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: path: artifacts/ diff --git a/compiler/rustc_codegen_cranelift/.github/workflows/rustc.yml b/compiler/rustc_codegen_cranelift/.github/workflows/rustc.yml index cb5dd51fee310..8085dc58263cc 100644 --- a/compiler/rustc_codegen_cranelift/.github/workflows/rustc.yml +++ b/compiler/rustc_codegen_cranelift/.github/workflows/rustc.yml @@ -43,6 +43,11 @@ jobs: path: build/cg_clif key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }} + - name: Install ripgrep + run: | + sudo apt update + sudo apt install -y ripgrep + - name: Prepare dependencies run: ./y.sh prepare diff --git a/compiler/rustc_codegen_cranelift/rust-toolchain b/compiler/rustc_codegen_cranelift/rust-toolchain index e1e1760c5977f..a086c0293601f 100644 --- a/compiler/rustc_codegen_cranelift/rust-toolchain +++ b/compiler/rustc_codegen_cranelift/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2023-12-24" +channel = "nightly-2023-12-31" components = ["rust-src", "rustc-dev", "llvm-tools"] diff --git a/compiler/rustc_codegen_cranelift/scripts/setup_rust_fork.sh b/compiler/rustc_codegen_cranelift/scripts/setup_rust_fork.sh index 731828caae2c4..684a5d0729355 100644 --- a/compiler/rustc_codegen_cranelift/scripts/setup_rust_fork.sh +++ b/compiler/rustc_codegen_cranelift/scripts/setup_rust_fork.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash set -e -# CG_CLIF_FORCE_GNU_AS will force usage of as instead of the LLVM backend of rustc as we +# CG_CLIF_FORCE_GNU_AS will force usage of as instead of the LLVM backend of rustc as # the LLVM backend isn't compiled in here. export CG_CLIF_FORCE_GNU_AS=1 @@ -11,20 +11,19 @@ export CG_CLIF_FORCE_GNU_AS=1 CG_CLIF_STDLIB_REMAP_PATH_PREFIX=/rustc/FAKE_PREFIX ./y.sh build echo "[SETUP] Rust fork" -git clone https://github.com/rust-lang/rust.git --filter=tree:0 || true +git clone --quiet https://github.com/rust-lang/rust.git --filter=tree:0 || true pushd rust git fetch -git checkout -- . -git checkout "$(rustc -V | cut -d' ' -f3 | tr -d '(')" +git checkout --no-progress -- . +git checkout --no-progress "$(rustc -V | cut -d' ' -f3 | tr -d '(')" + +git submodule update --quiet --init src/tools/cargo library/backtrace library/stdarch git -c user.name=Dummy -c user.email=dummy@example.com -c commit.gpgSign=false \ am ../patches/*-stdlib-*.patch cat > config.toml < {} Linkage::Static => { let name = crate_info.crate_name[&cnum]; - let mut err = sess.struct_err(format!("Can't load static lib {}", name)); + let mut err = sess.dcx().struct_err(format!("Can't load static lib {}", name)); err.note("rustc_codegen_cranelift can only load dylibs in JIT mode."); err.emit(); } diff --git a/compiler/rustc_codegen_cranelift/src/global_asm.rs b/compiler/rustc_codegen_cranelift/src/global_asm.rs index af99239d81593..da07b66c762ee 100644 --- a/compiler/rustc_codegen_cranelift/src/global_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/global_asm.rs @@ -154,6 +154,8 @@ pub(crate) fn compile_global_asm( } } else { let mut child = Command::new(std::env::current_exe().unwrap()) + // Avoid a warning about the jobserver fd not being passed + .env_remove("CARGO_MAKEFLAGS") .arg("--target") .arg(&config.target) .arg("--crate-type") diff --git a/compiler/rustc_codegen_cranelift/src/value_and_place.rs b/compiler/rustc_codegen_cranelift/src/value_and_place.rs index 838c73fa21365..f016e6950d48d 100644 --- a/compiler/rustc_codegen_cranelift/src/value_and_place.rs +++ b/compiler/rustc_codegen_cranelift/src/value_and_place.rs @@ -974,8 +974,8 @@ pub(crate) fn assert_assignable<'tcx>( } } } - (&ty::Coroutine(def_id_a, args_a, mov_a), &ty::Coroutine(def_id_b, args_b, mov_b)) - if def_id_a == def_id_b && mov_a == mov_b => + (&ty::Coroutine(def_id_a, args_a), &ty::Coroutine(def_id_b, args_b)) + if def_id_a == def_id_b => { let mut types_a = args_a.types(); let mut types_b = args_b.types(); diff --git a/compiler/rustc_codegen_gcc/src/asm.rs b/compiler/rustc_codegen_gcc/src/asm.rs index ddd67a994c942..78e8e32b97299 100644 --- a/compiler/rustc_codegen_gcc/src/asm.rs +++ b/compiler/rustc_codegen_gcc/src/asm.rs @@ -634,6 +634,7 @@ fn reg_to_gcc(reg: InlineAsmRegOrRegClass) -> ConstraintOrRegister { } InlineAsmRegClass::Wasm(WasmInlineAsmRegClass::local) => "r", InlineAsmRegClass::S390x(S390xInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::S390x(S390xInlineAsmRegClass::reg_addr) => "a", InlineAsmRegClass::S390x(S390xInlineAsmRegClass::freg) => "f", InlineAsmRegClass::Err => unreachable!(), } @@ -704,7 +705,9 @@ fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegCl InlineAsmRegClass::SpirV(SpirVInlineAsmRegClass::reg) => { bug!("LLVM backend does not support SPIR-V") }, - InlineAsmRegClass::S390x(S390xInlineAsmRegClass::reg) => cx.type_i32(), + InlineAsmRegClass::S390x( + S390xInlineAsmRegClass::reg | S390xInlineAsmRegClass::reg_addr + ) => cx.type_i32(), InlineAsmRegClass::S390x(S390xInlineAsmRegClass::freg) => cx.type_f64(), InlineAsmRegClass::Err => unreachable!(), } diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs index b8a8c144dc90b..42e61b3ccb5ad 100644 --- a/compiler/rustc_codegen_gcc/src/builder.rs +++ b/compiler/rustc_codegen_gcc/src/builder.rs @@ -1296,7 +1296,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { } // Atomic Operations - fn atomic_cmpxchg(&mut self, dst: RValue<'gcc>, cmp: RValue<'gcc>, src: RValue<'gcc>, order: AtomicOrdering, failure_order: AtomicOrdering, weak: bool) -> RValue<'gcc> { + fn atomic_cmpxchg(&mut self, dst: RValue<'gcc>, cmp: RValue<'gcc>, src: RValue<'gcc>, order: AtomicOrdering, failure_order: AtomicOrdering, weak: bool) -> (RValue<'gcc>, RValue<'gcc>) { let expected = self.current_func().new_local(None, cmp.get_type(), "expected"); self.llbb().add_assignment(None, expected, cmp); // NOTE: gcc doesn't support a failure memory model that is stronger than the success @@ -1310,20 +1310,12 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { }; let success = self.compare_exchange(dst, expected, src, order, failure_order, weak); - let pair_type = self.cx.type_struct(&[src.get_type(), self.bool_type], false); - let result = self.current_func().new_local(None, pair_type, "atomic_cmpxchg_result"); - let align = Align::from_bits(64).expect("align"); // TODO(antoyo): use good align. + // NOTE: since success contains the call to the intrinsic, it must be added to the basic block before + // expected so that we store expected after the call. + let success_var = self.current_func().new_local(None, self.bool_type, "success"); + self.llbb().add_assignment(None, success_var, success); - let value_type = result.to_rvalue().get_type(); - if let Some(struct_type) = value_type.is_struct() { - self.store(success, result.access_field(None, struct_type.get_field(1)).get_address(None), align); - // NOTE: since success contains the call to the intrinsic, it must be stored before - // expected so that we store expected after the call. - self.store(expected.to_rvalue(), result.access_field(None, struct_type.get_field(0)).get_address(None), align); - } - // TODO(antoyo): handle when value is not a struct. - - result.to_rvalue() + (expected.to_rvalue(), success_var.to_rvalue()) } fn atomic_rmw(&mut self, op: AtomicRmwBinOp, dst: RValue<'gcc>, src: RValue<'gcc>, order: AtomicOrdering) -> RValue<'gcc> { diff --git a/compiler/rustc_codegen_gcc/src/type_of.rs b/compiler/rustc_codegen_gcc/src/type_of.rs index 479a814788a54..e5c0b2de4ca46 100644 --- a/compiler/rustc_codegen_gcc/src/type_of.rs +++ b/compiler/rustc_codegen_gcc/src/type_of.rs @@ -98,7 +98,7 @@ fn uncached_gcc_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout write!(&mut name, "::{}", def.variant(index).name).unwrap(); } } - if let (&ty::Coroutine(_, _, _), &Variants::Single { index }) = + if let (&ty::Coroutine(_, _), &Variants::Single { index }) = (layout.ty.kind(), &layout.variants) { write!(&mut name, "::{}", ty::CoroutineArgs::variant_name(index)).unwrap(); diff --git a/compiler/rustc_codegen_llvm/Cargo.toml b/compiler/rustc_codegen_llvm/Cargo.toml index 7122c055e7ea7..c12142e302d3d 100644 --- a/compiler/rustc_codegen_llvm/Cargo.toml +++ b/compiler/rustc_codegen_llvm/Cargo.toml @@ -8,7 +8,7 @@ test = false [dependencies] # tidy-alphabetical-start -bitflags = "1.0" +bitflags = "2.4.1" itertools = "0.11" libc = "0.2" measureme = "10.0.0" diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index 1323261ae9240..a413466093bed 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -690,6 +690,7 @@ fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) -> InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_iw) => "w", InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_ptr) => "e", InlineAsmRegClass::S390x(S390xInlineAsmRegClass::reg) => "r", + InlineAsmRegClass::S390x(S390xInlineAsmRegClass::reg_addr) => "a", InlineAsmRegClass::S390x(S390xInlineAsmRegClass::freg) => "f", InlineAsmRegClass::Msp430(Msp430InlineAsmRegClass::reg) => "r", InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg) => "r", @@ -867,7 +868,9 @@ fn dummy_output_type<'ll>(cx: &CodegenCx<'ll, '_>, reg: InlineAsmRegClass) -> &' InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_pair) => cx.type_i16(), InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_iw) => cx.type_i16(), InlineAsmRegClass::Avr(AvrInlineAsmRegClass::reg_ptr) => cx.type_i16(), - InlineAsmRegClass::S390x(S390xInlineAsmRegClass::reg) => cx.type_i32(), + InlineAsmRegClass::S390x( + S390xInlineAsmRegClass::reg | S390xInlineAsmRegClass::reg_addr, + ) => cx.type_i32(), InlineAsmRegClass::S390x(S390xInlineAsmRegClass::freg) => cx.type_f64(), InlineAsmRegClass::Msp430(Msp430InlineAsmRegClass::reg) => cx.type_i16(), InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg) => cx.type_i32(), diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs index 3cc33b8343403..b3fa7b7cd445c 100644 --- a/compiler/rustc_codegen_llvm/src/attributes.rs +++ b/compiler/rustc_codegen_llvm/src/attributes.rs @@ -481,7 +481,7 @@ pub fn from_fn_attrs<'ll, 'tcx>( // `+multivalue` feature because the purpose of the wasm abi is to match // the WebAssembly specification, which has this feature. This won't be // needed when LLVM enables this `multivalue` feature by default. - if !cx.tcx.is_closure(instance.def_id()) { + if !cx.tcx.is_closure_or_coroutine(instance.def_id()) { let abi = cx.tcx.fn_sig(instance.def_id()).skip_binder().abi(); if abi == Abi::Wasm { function_features.push("+multivalue".to_string()); diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index 8f60175a6031c..4e5fe290bb1f7 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -1072,7 +1072,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { order: rustc_codegen_ssa::common::AtomicOrdering, failure_order: rustc_codegen_ssa::common::AtomicOrdering, weak: bool, - ) -> &'ll Value { + ) -> (&'ll Value, &'ll Value) { let weak = if weak { llvm::True } else { llvm::False }; unsafe { let value = llvm::LLVMBuildAtomicCmpXchg( @@ -1085,7 +1085,9 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { llvm::False, // SingleThreaded ); llvm::LLVMSetWeak(value, weak); - value + let val = self.extract_value(value, 0); + let success = self.extract_value(value, 1); + (val, success) } } fn atomic_rmw( diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs index 33bfde03a31c3..51df14df644e0 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs @@ -58,11 +58,6 @@ pub fn finalize(cx: &CodegenCx<'_, '_>) { return; } - // The entries of the map are only used to get a list of all files with - // coverage info. In the end the list of files is passed into - // `GlobalFileTable::new()` which internally do `.sort_unstable_by()`, so - // the iteration order here does not matter. - #[allow(rustc::potential_query_instability)] let function_coverage_entries = function_coverage_map .into_iter() .map(|(instance, function_coverage)| (instance, function_coverage.into_finished())) diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs index 0befbb5a39be3..733a77d24c2a4 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs @@ -10,7 +10,7 @@ use rustc_codegen_ssa::traits::{ BaseTypeMethods, BuilderMethods, ConstMethods, CoverageInfoBuilderMethods, MiscMethods, StaticMethods, }; -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_llvm::RustString; use rustc_middle::bug; use rustc_middle::mir::coverage::CoverageKind; @@ -30,7 +30,7 @@ const VAR_ALIGN_BYTES: usize = 8; pub struct CrateCoverageContext<'ll, 'tcx> { /// Coverage data for each instrumented function identified by DefId. pub(crate) function_coverage_map: - RefCell, FunctionCoverageCollector<'tcx>>>, + RefCell, FunctionCoverageCollector<'tcx>>>, pub(crate) pgo_func_name_var_map: RefCell, &'ll llvm::Value>>, } @@ -44,8 +44,8 @@ impl<'ll, 'tcx> CrateCoverageContext<'ll, 'tcx> { pub fn take_function_coverage_map( &self, - ) -> FxHashMap, FunctionCoverageCollector<'tcx>> { - self.function_coverage_map.replace(FxHashMap::default()) + ) -> FxIndexMap, FunctionCoverageCollector<'tcx>> { + self.function_coverage_map.replace(FxIndexMap::default()) } } diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs index 59c075a3d3e04..76c9ac6614a30 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs @@ -1066,7 +1066,7 @@ fn build_upvar_field_di_nodes<'ll, 'tcx>( closure_or_coroutine_di_node: &'ll DIType, ) -> SmallVec<&'ll DIType> { let (&def_id, up_var_tys) = match closure_or_coroutine_ty.kind() { - ty::Coroutine(def_id, args, _) => (def_id, args.as_coroutine().prefix_tys()), + ty::Coroutine(def_id, args) => (def_id, args.as_coroutine().prefix_tys()), ty::Closure(def_id, args) => (def_id, args.as_closure().upvar_tys()), _ => { bug!( diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs index 4a2861af44c1d..4792b0798dfb8 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs @@ -679,7 +679,7 @@ fn build_union_fields_for_direct_tag_coroutine<'ll, 'tcx>( }; let (coroutine_def_id, coroutine_args) = match coroutine_type_and_layout.ty.kind() { - &ty::Coroutine(def_id, args, _) => (def_id, args.as_coroutine()), + &ty::Coroutine(def_id, args) => (def_id, args.as_coroutine()), _ => unreachable!(), }; diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs index eef8dbb33b49f..7f671d1d06129 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs @@ -336,7 +336,7 @@ pub fn build_coroutine_variant_struct_type_di_node<'ll, 'tcx>( let variant_layout = coroutine_type_and_layout.for_variant(cx, variant_index); let coroutine_args = match coroutine_type_and_layout.ty.kind() { - ty::Coroutine(_, args, _) => args.as_coroutine(), + ty::Coroutine(_, args) => args.as_coroutine(), _ => unreachable!(), }; diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs index cba4e3811d51b..3dbe820b8ff9b 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs @@ -135,7 +135,7 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>( unique_type_id: UniqueTypeId<'tcx>, ) -> DINodeCreationResult<'ll> { let coroutine_type = unique_type_id.expect_ty(); - let &ty::Coroutine(coroutine_def_id, _, _) = coroutine_type.kind() else { + let &ty::Coroutine(coroutine_def_id, _) = coroutine_type.kind() else { bug!("build_coroutine_di_node() called with non-coroutine type: `{:?}`", coroutine_type) }; diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs index 81702baa8c053..aefca6b34f577 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs @@ -722,7 +722,7 @@ pub mod debuginfo { // These values **must** match with LLVMRustDIFlags!! bitflags! { #[repr(transparent)] - #[derive(Default)] + #[derive(Clone, Copy, Default)] pub struct DIFlags: u32 { const FlagZero = 0; const FlagPrivate = 1; @@ -751,7 +751,7 @@ pub mod debuginfo { // These values **must** match with LLVMRustDISPFlags!! bitflags! { #[repr(transparent)] - #[derive(Default)] + #[derive(Clone, Copy, Default)] pub struct DISPFlags: u32 { const SPFlagZero = 0; const SPFlagVirtual = 1; diff --git a/compiler/rustc_codegen_llvm/src/type_of.rs b/compiler/rustc_codegen_llvm/src/type_of.rs index 624ce6d8813e7..57b46382c9676 100644 --- a/compiler/rustc_codegen_llvm/src/type_of.rs +++ b/compiler/rustc_codegen_llvm/src/type_of.rs @@ -54,7 +54,7 @@ fn uncached_llvm_type<'a, 'tcx>( write!(&mut name, "::{}", def.variant(index).name).unwrap(); } } - if let (&ty::Coroutine(_, _, _), &Variants::Single { index }) = + if let (&ty::Coroutine(_, _), &Variants::Single { index }) = (layout.ty.kind(), &layout.variants) { write!(&mut name, "::{}", ty::CoroutineArgs::variant_name(index)).unwrap(); diff --git a/compiler/rustc_codegen_ssa/Cargo.toml b/compiler/rustc_codegen_ssa/Cargo.toml index 3f2ed257d0830..7d2f5bb193a39 100644 --- a/compiler/rustc_codegen_ssa/Cargo.toml +++ b/compiler/rustc_codegen_ssa/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start ar_archive_writer = "0.1.5" -bitflags = "1.2.1" +bitflags = "2.4.1" cc = "1.0.69" itertools = "0.11" jobserver = "0.1.27" diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs index 4ff497f2fdd3f..215649f33ff1d 100644 --- a/compiler/rustc_codegen_ssa/src/back/link.rs +++ b/compiler/rustc_codegen_ssa/src/back/link.rs @@ -1186,15 +1186,22 @@ mod win { } } -fn add_sanitizer_libraries(sess: &Session, crate_type: CrateType, linker: &mut dyn Linker) { - // On macOS the runtimes are distributed as dylibs which should be linked to - // both executables and dynamic shared objects. Everywhere else the runtimes - // are currently distributed as static libraries which should be linked to - // executables only. +fn add_sanitizer_libraries( + sess: &Session, + flavor: LinkerFlavor, + crate_type: CrateType, + linker: &mut dyn Linker, +) { + // On macOS and Windows using MSVC the runtimes are distributed as dylibs + // which should be linked to both executables and dynamic libraries. + // Everywhere else the runtimes are currently distributed as static + // libraries which should be linked to executables only. let needs_runtime = !sess.target.is_like_android && match crate_type { CrateType::Executable => true, - CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro => sess.target.is_like_osx, + CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro => { + sess.target.is_like_osx || sess.target.is_like_msvc + } CrateType::Rlib | CrateType::Staticlib => false, }; @@ -1204,26 +1211,31 @@ fn add_sanitizer_libraries(sess: &Session, crate_type: CrateType, linker: &mut d let sanitizer = sess.opts.unstable_opts.sanitizer; if sanitizer.contains(SanitizerSet::ADDRESS) { - link_sanitizer_runtime(sess, linker, "asan"); + link_sanitizer_runtime(sess, flavor, linker, "asan"); } if sanitizer.contains(SanitizerSet::LEAK) { - link_sanitizer_runtime(sess, linker, "lsan"); + link_sanitizer_runtime(sess, flavor, linker, "lsan"); } if sanitizer.contains(SanitizerSet::MEMORY) { - link_sanitizer_runtime(sess, linker, "msan"); + link_sanitizer_runtime(sess, flavor, linker, "msan"); } if sanitizer.contains(SanitizerSet::THREAD) { - link_sanitizer_runtime(sess, linker, "tsan"); + link_sanitizer_runtime(sess, flavor, linker, "tsan"); } if sanitizer.contains(SanitizerSet::HWADDRESS) { - link_sanitizer_runtime(sess, linker, "hwasan"); + link_sanitizer_runtime(sess, flavor, linker, "hwasan"); } if sanitizer.contains(SanitizerSet::SAFESTACK) { - link_sanitizer_runtime(sess, linker, "safestack"); + link_sanitizer_runtime(sess, flavor, linker, "safestack"); } } -fn link_sanitizer_runtime(sess: &Session, linker: &mut dyn Linker, name: &str) { +fn link_sanitizer_runtime( + sess: &Session, + flavor: LinkerFlavor, + linker: &mut dyn Linker, + name: &str, +) { fn find_sanitizer_runtime(sess: &Session, filename: &str) -> PathBuf { let session_tlib = filesearch::make_target_lib_path(&sess.sysroot, sess.opts.target_triple.triple()); @@ -1254,6 +1266,10 @@ fn link_sanitizer_runtime(sess: &Session, linker: &mut dyn Linker, name: &str) { let rpath = path.to_str().expect("non-utf8 component in path"); linker.args(&["-Wl,-rpath", "-Xlinker", rpath]); linker.link_dylib(&filename, false, true); + } else if sess.target.is_like_msvc && flavor == LinkerFlavor::Msvc(Lld::No) && name == "asan" { + // MSVC provides the `/INFERASANLIBS` argument to automatically find the + // compatible ASAN library. + linker.arg("/INFERASANLIBS"); } else { let filename = format!("librustc{channel}_rt.{name}.a"); let path = find_sanitizer_runtime(sess, &filename).join(&filename); @@ -2076,7 +2092,7 @@ fn linker_with_args<'a>( ); // Sanitizer libraries. - add_sanitizer_libraries(sess, crate_type, cmd); + add_sanitizer_libraries(sess, flavor, crate_type, cmd); // Object code from the current crate. // Take careful note of the ordering of the arguments we pass to the linker diff --git a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs index f5f2416abb6dc..63fd7b42f7ba1 100644 --- a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs +++ b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs @@ -232,7 +232,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { } sym::thread_local => codegen_fn_attrs.flags |= CodegenFnAttrFlags::THREAD_LOCAL, sym::track_caller => { - let is_closure = tcx.is_closure(did.to_def_id()); + let is_closure = tcx.is_closure_or_coroutine(did.to_def_id()); if !is_closure && let Some(fn_sig) = fn_sig() @@ -277,7 +277,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { } } sym::target_feature => { - if !tcx.is_closure(did.to_def_id()) + if !tcx.is_closure_or_coroutine(did.to_def_id()) && let Some(fn_sig) = fn_sig() && fn_sig.skip_binder().unsafety() == hir::Unsafety::Normal { @@ -531,7 +531,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs { // would result in this closure being compiled without the inherited target features, but this // is probably a poor usage of `#[inline(always)]` and easily avoided by not using the attribute. if tcx.features().target_feature_11 - && tcx.is_closure(did.to_def_id()) + && tcx.is_closure_or_coroutine(did.to_def_id()) && codegen_fn_attrs.inline != InlineAttr::Always { let owner_id = tcx.parent(did.to_def_id()); diff --git a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs index 1b01fe0365462..4f9f70648bd88 100644 --- a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs +++ b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs @@ -585,7 +585,7 @@ fn coroutine_kind_label(coroutine_kind: Option) -> &'static str { Some(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, CoroutineSource::Fn)) => { "async_gen_fn" } - Some(CoroutineKind::Coroutine) => "coroutine", + Some(CoroutineKind::Coroutine(_)) => "coroutine", None => "closure", } } diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs index 9b60f0844a067..0d88df632803f 100644 --- a/compiler/rustc_codegen_ssa/src/lib.rs +++ b/compiler/rustc_codegen_ssa/src/lib.rs @@ -110,6 +110,7 @@ pub enum ModuleKind { } bitflags::bitflags! { + #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct MemFlags: u8 { const VOLATILE = 1 << 0; const NONTEMPORAL = 1 << 1; diff --git a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs index 533803ea7ff2b..8530bf9e2b363 100644 --- a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs +++ b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs @@ -335,7 +335,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { cmp = bx.ptrtoint(cmp, bx.type_isize()); src = bx.ptrtoint(src, bx.type_isize()); } - let pair = bx.atomic_cmpxchg( + let (val, success) = bx.atomic_cmpxchg( dst, cmp, src, @@ -343,8 +343,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { parse_ordering(bx, failure), weak, ); - let val = bx.extract_value(pair, 0); - let success = bx.extract_value(pair, 1); let val = bx.from_immediate(val); let success = bx.from_immediate(success); diff --git a/compiler/rustc_codegen_ssa/src/traits/builder.rs b/compiler/rustc_codegen_ssa/src/traits/builder.rs index aa411f002a0c6..1c5c78e6ca200 100644 --- a/compiler/rustc_codegen_ssa/src/traits/builder.rs +++ b/compiler/rustc_codegen_ssa/src/traits/builder.rs @@ -296,7 +296,7 @@ pub trait BuilderMethods<'a, 'tcx>: order: AtomicOrdering, failure_order: AtomicOrdering, weak: bool, - ) -> Self::Value; + ) -> (Self::Value, Self::Value); fn atomic_rmw( &mut self, op: AtomicRmwBinOp, diff --git a/compiler/rustc_const_eval/src/interpret/discriminant.rs b/compiler/rustc_const_eval/src/interpret/discriminant.rs index d9f583c1d1f68..bb8c17cf7791d 100644 --- a/compiler/rustc_const_eval/src/interpret/discriminant.rs +++ b/compiler/rustc_const_eval/src/interpret/discriminant.rs @@ -171,7 +171,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { ty::Adt(adt, _) => { adt.discriminants(*self.tcx).find(|(_, var)| var.val == discr_bits) } - ty::Coroutine(def_id, args, _) => { + ty::Coroutine(def_id, args) => { let args = args.as_coroutine(); args.discriminants(def_id, *self.tcx).find(|(_, var)| var.val == discr_bits) } diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs index c29f23b913f68..1e9e7d94596f8 100644 --- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs +++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs @@ -85,7 +85,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>( | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Coroutine(_, _, _) + | ty::Coroutine(_, _) | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index 07500f74477b4..8b44b87647dab 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -217,7 +217,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, ' // Now we know we are projecting to a field, so figure out which one. match layout.ty.kind() { // coroutines and closures. - ty::Closure(def_id, _) | ty::Coroutine(def_id, _, _) => { + ty::Closure(def_id, _) | ty::Coroutine(def_id, _) => { let mut name = None; // FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar // https://github.com/rust-lang/project-rfc-2229/issues/46 diff --git a/compiler/rustc_const_eval/src/transform/check_consts/check.rs b/compiler/rustc_const_eval/src/transform/check_consts/check.rs index c1ab62ac0b86e..92955c4ed14ca 100644 --- a/compiler/rustc_const_eval/src/transform/check_consts/check.rs +++ b/compiler/rustc_const_eval/src/transform/check_consts/check.rs @@ -938,8 +938,17 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm), - TerminatorKind::CoroutineDrop | TerminatorKind::Yield { .. } => { - self.check_op(ops::Coroutine(hir::CoroutineKind::Coroutine)) + TerminatorKind::Yield { .. } => self.check_op(ops::Coroutine( + self.tcx + .coroutine_kind(self.body.source.def_id()) + .expect("Only expected to have a yield in a coroutine"), + )), + + TerminatorKind::CoroutineDrop => { + span_bug!( + self.body.source_info(location).span, + "We should not encounter TerminatorKind::CoroutineDrop after coroutine transform" + ); } TerminatorKind::UnwindTerminate(_) => { diff --git a/compiler/rustc_const_eval/src/transform/check_consts/mod.rs b/compiler/rustc_const_eval/src/transform/check_consts/mod.rs index fbc95072802f9..98276ff2e68d6 100644 --- a/compiler/rustc_const_eval/src/transform/check_consts/mod.rs +++ b/compiler/rustc_const_eval/src/transform/check_consts/mod.rs @@ -72,7 +72,7 @@ impl<'mir, 'tcx> ConstCx<'mir, 'tcx> { pub fn fn_sig(&self) -> PolyFnSig<'tcx> { let did = self.def_id().to_def_id(); - if self.tcx.is_closure(did) { + if self.tcx.is_closure_or_coroutine(did) { let ty = self.tcx.type_of(did).instantiate_identity(); let ty::Closure(_, args) = ty.kind() else { bug!("type_of closure not ty::Closure") }; args.as_closure().sig() diff --git a/compiler/rustc_const_eval/src/transform/validate.rs b/compiler/rustc_const_eval/src/transform/validate.rs index 68ded1d324fe8..b249ffb84b3a8 100644 --- a/compiler/rustc_const_eval/src/transform/validate.rs +++ b/compiler/rustc_const_eval/src/transform/validate.rs @@ -694,7 +694,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { }; check_equal(self, location, f_ty); } - &ty::Coroutine(def_id, args, _) => { + &ty::Coroutine(def_id, args) => { let f_ty = if let Some(var) = parent_ty.variant_index { let gen_body = if def_id == self.body.source.def_id() { self.body diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index a82b65b19a882..976e42ad76836 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -51,7 +51,7 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { | ty::FnDef(def_id, args) | ty::Alias(ty::Projection | ty::Opaque, ty::AliasTy { def_id, args, .. }) | ty::Closure(def_id, args) - | ty::Coroutine(def_id, args, _) => self.print_def_path(def_id, args), + | ty::Coroutine(def_id, args) => self.print_def_path(def_id, args), ty::Foreign(def_id) => self.print_def_path(def_id, &[]), ty::Alias(ty::Weak, _) => bug!("type_name: unexpected weak projection"), diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml index 4732783a12d95..23949deaade87 100644 --- a/compiler/rustc_data_structures/Cargo.toml +++ b/compiler/rustc_data_structures/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start arrayvec = { version = "0.7", default-features = false } -bitflags = "1.2.1" +bitflags = "2.4.1" elsa = "=1.7.1" ena = "0.14.2" indexmap = { version = "2.0.0" } diff --git a/compiler/rustc_data_structures/src/hashes.rs b/compiler/rustc_data_structures/src/hashes.rs index ad068cdbc9841..291ee5bbe26e3 100644 --- a/compiler/rustc_data_structures/src/hashes.rs +++ b/compiler/rustc_data_structures/src/hashes.rs @@ -25,7 +25,7 @@ impl Hash64 { pub const ZERO: Hash64 = Hash64 { inner: 0 }; #[inline] - pub(crate) fn new(n: u64) -> Self { + pub fn new(n: u64) -> Self { Self { inner: n } } diff --git a/compiler/rustc_data_structures/src/lib.rs b/compiler/rustc_data_structures/src/lib.rs index 3ef87684fa5d7..93b4032c31089 100644 --- a/compiler/rustc_data_structures/src/lib.rs +++ b/compiler/rustc_data_structures/src/lib.rs @@ -150,3 +150,14 @@ pub fn make_display(f: impl Fn(&mut fmt::Formatter<'_>) -> fmt::Result) -> impl // See comments in src/librustc_middle/lib.rs #[doc(hidden)] pub fn __noop_fix_for_27438() {} + +#[macro_export] +macro_rules! external_bitflags_debug { + ($Name:ident) => { + impl ::std::fmt::Debug for $Name { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + ::bitflags::parser::to_writer(self, f) + } + } + }; +} diff --git a/compiler/rustc_data_structures/src/profiling.rs b/compiler/rustc_data_structures/src/profiling.rs index ef7375a732064..e29d4811980c7 100644 --- a/compiler/rustc_data_structures/src/profiling.rs +++ b/compiler/rustc_data_structures/src/profiling.rs @@ -101,6 +101,7 @@ use parking_lot::RwLock; use smallvec::SmallVec; bitflags::bitflags! { + #[derive(Clone, Copy)] struct EventFilter: u16 { const GENERIC_ACTIVITIES = 1 << 0; const QUERY_PROVIDERS = 1 << 1; @@ -114,14 +115,14 @@ bitflags::bitflags! { const INCR_RESULT_HASHING = 1 << 8; const ARTIFACT_SIZES = 1 << 9; - const DEFAULT = Self::GENERIC_ACTIVITIES.bits | - Self::QUERY_PROVIDERS.bits | - Self::QUERY_BLOCKED.bits | - Self::INCR_CACHE_LOADS.bits | - Self::INCR_RESULT_HASHING.bits | - Self::ARTIFACT_SIZES.bits; + const DEFAULT = Self::GENERIC_ACTIVITIES.bits() | + Self::QUERY_PROVIDERS.bits() | + Self::QUERY_BLOCKED.bits() | + Self::INCR_CACHE_LOADS.bits() | + Self::INCR_RESULT_HASHING.bits() | + Self::ARTIFACT_SIZES.bits(); - const ARGS = Self::QUERY_KEYS.bits | Self::FUNCTION_ARGS.bits; + const ARGS = Self::QUERY_KEYS.bits() | Self::FUNCTION_ARGS.bits(); } } diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index ca6b0afc76a9b..2041ffefe77a1 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -12,6 +12,7 @@ #![feature(lazy_cell)] #![feature(let_chains)] #![feature(panic_update_hook)] +#![feature(result_flattening)] #![recursion_limit = "256"] #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] @@ -1249,8 +1250,7 @@ pub fn catch_fatal_errors R, R>(f: F) -> Result interface::Result<()>) -> i32 { - let result = catch_fatal_errors(f).and_then(|result| result); - match result { + match catch_fatal_errors(f).flatten() { Ok(()) => EXIT_SUCCESS, Err(_) => EXIT_FAILURE, } diff --git a/compiler/rustc_driver_impl/src/pretty.rs b/compiler/rustc_driver_impl/src/pretty.rs index 7cd63bc6422c3..e5a7d5501151a 100644 --- a/compiler/rustc_driver_impl/src/pretty.rs +++ b/compiler/rustc_driver_impl/src/pretty.rs @@ -146,7 +146,7 @@ impl<'a> pprust_ast::PpAnn for AstHygieneAnn<'a> { } pprust_ast::AnnNode::Crate(_) => { s.s.hardbreak(); - let verbose = self.sess.verbose(); + let verbose = self.sess.verbose_internals(); s.synth_comment(rustc_span::hygiene::debug_hygiene_data(verbose)); s.s.hardbreak_if_not_bol(); } diff --git a/compiler/rustc_error_codes/src/error_codes/E0379.md b/compiler/rustc_error_codes/src/error_codes/E0379.md index ab438e4144712..35f546cfdb737 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0379.md +++ b/compiler/rustc_error_codes/src/error_codes/E0379.md @@ -6,6 +6,10 @@ Erroneous code example: trait Foo { const fn bar() -> u32; // error! } + +impl Foo for () { + const fn bar() -> u32 { 0 } // error! +} ``` Trait methods cannot be declared `const` by design. For more information, see diff --git a/compiler/rustc_error_codes/src/error_codes/E0453.md b/compiler/rustc_error_codes/src/error_codes/E0453.md index 11789db8f365a..86ca0d9eca9de 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0453.md +++ b/compiler/rustc_error_codes/src/error_codes/E0453.md @@ -8,8 +8,8 @@ Example of erroneous code: #[allow(non_snake_case)] fn main() { - let MyNumber = 2; // error: allow(non_snake_case) overruled by outer - // forbid(non_snake_case) + // error: allow(non_snake_case) incompatible with previous forbid + let MyNumber = 2; } ``` diff --git a/compiler/rustc_errors/src/diagnostic.rs b/compiler/rustc_errors/src/diagnostic.rs index c226b2d41bdef..49431fb7b3f18 100644 --- a/compiler/rustc_errors/src/diagnostic.rs +++ b/compiler/rustc_errors/src/diagnostic.rs @@ -3,7 +3,7 @@ use crate::{ CodeSuggestion, DiagnosticBuilder, DiagnosticMessage, EmissionGuarantee, Level, MultiSpan, SubdiagnosticMessage, Substitution, SubstitutionPart, SuggestionStyle, }; -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_error_messages::fluent_value_from_str_list_sep_by_and; use rustc_error_messages::FluentValue; use rustc_lint_defs::{Applicability, LintExpectationId}; @@ -259,7 +259,7 @@ impl Diagnostic { pub(crate) fn update_unstable_expectation_id( &mut self, - unstable_to_stable: &FxHashMap, + unstable_to_stable: &FxIndexMap, ) { if let Level::Expect(expectation_id) | Level::Warning(Some(expectation_id)) = &mut self.level diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index e436591fdd99b..6707e17e90f4a 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -55,7 +55,7 @@ pub use termcolor::{Color, ColorSpec, WriteColor}; use crate::diagnostic_impls::{DelayedAtWithNewline, DelayedAtWithoutNewline}; use emitter::{is_case_difference, DynEmitter, Emitter, EmitterWriter}; use registry::Registry; -use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet}; +use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet}; use rustc_data_structures::stable_hasher::{Hash128, StableHasher}; use rustc_data_structures::sync::{Lock, Lrc}; use rustc_data_structures::AtomicRef; @@ -1318,7 +1318,7 @@ impl DiagCtxt { pub fn update_unstable_expectation_id( &self, - unstable_to_stable: &FxHashMap, + unstable_to_stable: &FxIndexMap, ) { let mut inner = self.inner.borrow_mut(); let diags = std::mem::take(&mut inner.unstable_expect_diagnostics); @@ -1698,7 +1698,7 @@ pub enum Level { /// internal overflows, some file operation errors. /// /// Its `EmissionGuarantee` is `FatalAbort`, except in the non-aborting "almost fatal" case - /// that is occasionaly used, where it is `FatalError`. + /// that is occasionally used, where it is `FatalError`. Fatal, /// An error in the code being compiled, which prevents compilation from finishing. This is the diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index e9736d6f2c8ae..e9797abcbdf24 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -10,7 +10,7 @@ use crate::mbe::transcribe::transcribe; use rustc_ast as ast; use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*}; -use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_ast_pretty::pprust; use rustc_attr::{self as attr, TransparencyError}; @@ -213,7 +213,7 @@ fn expand_macro<'cx>( let arm_span = rhses[i].span(); // rhs has holes ( `$id` and `$(...)` that need filled) - let mut tts = match transcribe(cx, &named_matches, rhs, rhs_span, transparency) { + let tts = match transcribe(cx, &named_matches, rhs, rhs_span, transparency) { Ok(tts) => tts, Err(mut err) => { err.emit(); @@ -221,37 +221,6 @@ fn expand_macro<'cx>( } }; - // Replace all the tokens for the corresponding positions in the macro, to maintain - // proper positions in error reporting, while maintaining the macro_backtrace. - if tts.len() == rhs.tts.len() { - tts = tts.map_enumerated_owned(|i, mut tt| { - let rhs_tt = &rhs.tts[i]; - let ctxt = tt.span().ctxt(); - match (&mut tt, rhs_tt) { - // preserve the delim spans if able - ( - TokenTree::Delimited(target_sp, ..), - mbe::TokenTree::Delimited(source_sp, ..), - ) => { - target_sp.open = source_sp.open.with_ctxt(ctxt); - target_sp.close = source_sp.close.with_ctxt(ctxt); - } - ( - TokenTree::Delimited(target_sp, ..), - mbe::TokenTree::MetaVar(source_sp, ..), - ) => { - target_sp.open = source_sp.with_ctxt(ctxt); - target_sp.close = source_sp.with_ctxt(ctxt).shrink_to_hi(); - } - _ => { - let sp = rhs_tt.span().with_ctxt(ctxt); - tt.set_span(sp); - } - } - tt - }); - } - if cx.trace_macros() { let msg = format!("to `{}`", pprust::tts_to_string(&tts)); trace_macros_note(&mut cx.expansions, sp, msg); diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index f2a9875ffd28a..c969ca7ef89b3 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -4,7 +4,7 @@ use crate::errors::{ NoSyntaxVarsExprRepeat, VarStillRepeating, }; use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree, NamedMatch}; -use crate::mbe::{self, MetaVarExpr}; +use crate::mbe::{self, KleeneOp, MetaVarExpr}; use rustc_ast::mut_visit::{self, MutVisitor}; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree}; @@ -42,6 +42,7 @@ enum Frame<'a> { tts: &'a [mbe::TokenTree], idx: usize, sep: Option, + kleene_op: KleeneOp, }, } @@ -207,7 +208,7 @@ pub(super) fn transcribe<'a>( // Is the repetition empty? if len == 0 { - if seq.kleene.op == mbe::KleeneOp::OneOrMore { + if seq.kleene.op == KleeneOp::OneOrMore { // FIXME: this really ought to be caught at macro definition // time... It happens when the Kleene operator in the matcher and // the body for the same meta-variable do not match. @@ -227,6 +228,7 @@ pub(super) fn transcribe<'a>( idx: 0, sep: seq.separator.clone(), tts: &delimited.tts, + kleene_op: seq.kleene.op, }); } } @@ -243,7 +245,7 @@ pub(super) fn transcribe<'a>( MatchedTokenTree(tt) => { // `tt`s are emitted into the output stream directly as "raw tokens", // without wrapping them into groups. - result.push(tt.clone()); + result.push(maybe_use_metavar_location(cx, &stack, sp, tt)); } MatchedNonterminal(nt) => { // Other variables are emitted into the output stream as groups with @@ -308,6 +310,62 @@ pub(super) fn transcribe<'a>( } } +/// Usually metavariables `$var` produce interpolated tokens, which have an additional place for +/// keeping both the original span and the metavariable span. For `tt` metavariables that's not the +/// case however, and there's no place for keeping a second span. So we try to give the single +/// produced span a location that would be most useful in practice (the hygiene part of the span +/// must not be changed). +/// +/// Different locations are useful for different purposes: +/// - The original location is useful when we need to report a diagnostic for the original token in +/// isolation, without combining it with any surrounding tokens. This case occurs, but it is not +/// very common in practice. +/// - The metavariable location is useful when we need to somehow combine the token span with spans +/// of its surrounding tokens. This is the most common way to use token spans. +/// +/// So this function replaces the original location with the metavariable location in all cases +/// except these two: +/// - The metavariable is an element of undelimited sequence `$($tt)*`. +/// These are typically used for passing larger amounts of code, and tokens in that code usually +/// combine with each other and not with tokens outside of the sequence. +/// - The metavariable span comes from a different crate, then we prefer the more local span. +/// +/// FIXME: Find a way to keep both original and metavariable spans for all tokens without +/// regressing compilation time too much. Several experiments for adding such spans were made in +/// the past (PR #95580, #118517, #118671) and all showed some regressions. +fn maybe_use_metavar_location( + cx: &ExtCtxt<'_>, + stack: &[Frame<'_>], + metavar_span: Span, + orig_tt: &TokenTree, +) -> TokenTree { + let undelimited_seq = matches!( + stack.last(), + Some(Frame::Sequence { + tts: [_], + sep: None, + kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore, + .. + }) + ); + if undelimited_seq || cx.source_map().is_imported(metavar_span) { + return orig_tt.clone(); + } + + match orig_tt { + TokenTree::Token(Token { kind, span }, spacing) => { + let span = metavar_span.with_ctxt(span.ctxt()); + TokenTree::Token(Token { kind: kind.clone(), span }, *spacing) + } + TokenTree::Delimited(dspan, dspacing, delimiter, tts) => { + let open = metavar_span.shrink_to_lo().with_ctxt(dspan.open.ctxt()); + let close = metavar_span.shrink_to_hi().with_ctxt(dspan.close.ctxt()); + let dspan = DelimSpan::from_pair(open, close); + TokenTree::Delimited(dspan, *dspacing, *delimiter, tts.clone()) + } + } +} + /// Lookup the meta-var named `ident` and return the matched token tree from the invocation using /// the set of matches `interpolations`. /// diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 5523543cd4fb9..4442b67df6e28 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -36,6 +36,8 @@ const GATED_CFGS: &[GatedCfg] = &[ (sym::sanitize, sym::cfg_sanitize, cfg_fn!(cfg_sanitize)), (sym::version, sym::cfg_version, cfg_fn!(cfg_version)), (sym::relocation_model, sym::cfg_relocation_model, cfg_fn!(cfg_relocation_model)), + (sym::sanitizer_cfi_generalize_pointers, sym::cfg_sanitizer_cfi, cfg_fn!(cfg_sanitizer_cfi)), + (sym::sanitizer_cfi_normalize_integers, sym::cfg_sanitizer_cfi, cfg_fn!(cfg_sanitizer_cfi)), ]; /// Find a gated cfg determined by the `pred`icate which is given the cfg's name. diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 60586f54fd584..e6faad7438457 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -371,6 +371,8 @@ declare_features! ( (unstable, cfg_relocation_model, "1.73.0", Some(114929)), /// Allows the use of `#[cfg(sanitize = "option")]`; set when -Zsanitizer is used. (unstable, cfg_sanitize, "1.41.0", Some(39699)), + /// Allows `cfg(sanitizer_cfi_generalize_pointers)` and `cfg(sanitizer_cfi_normalize_integers)`. + (unstable, cfg_sanitizer_cfi, "CURRENT_RUSTC_VERSION", Some(89653)), /// Allows `cfg(target_abi = "...")`. (unstable, cfg_target_abi, "1.55.0", Some(80970)), /// Allows `cfg(target(abi = "..."))`. @@ -523,7 +525,7 @@ declare_features! ( /// Allows the `#[must_not_suspend]` attribute. (unstable, must_not_suspend, "1.57.0", Some(83310)), /// Allows using `#[naked]` on functions. - (unstable, naked_functions, "1.9.0", Some(32408)), + (unstable, naked_functions, "1.9.0", Some(90957)), /// Allows specifying the as-needed link modifier (unstable, native_link_modifiers_as_needed, "1.53.0", Some(81490)), /// Allow negative trait implementations. diff --git a/compiler/rustc_hir/src/def_path_hash_map.rs b/compiler/rustc_hir/src/def_path_hash_map.rs index 8bfb47af26f1a..9a6dee1e511df 100644 --- a/compiler/rustc_hir/src/def_path_hash_map.rs +++ b/compiler/rustc_hir/src/def_path_hash_map.rs @@ -1,21 +1,22 @@ -use rustc_data_structures::fingerprint::Fingerprint; -use rustc_span::def_id::{DefIndex, DefPathHash}; +use rustc_data_structures::stable_hasher::Hash64; +use rustc_span::def_id::DefIndex; #[derive(Clone, Default)] pub struct Config; impl odht::Config for Config { - type Key = DefPathHash; + // This hash-map is single-crate, so we only need to key by the local hash. + type Key = Hash64; type Value = DefIndex; - type EncodedKey = [u8; 16]; + type EncodedKey = [u8; 8]; type EncodedValue = [u8; 4]; type H = odht::UnHashFn; #[inline] - fn encode_key(k: &DefPathHash) -> [u8; 16] { - k.0.to_le_bytes() + fn encode_key(k: &Hash64) -> [u8; 8] { + k.as_u64().to_le_bytes() } #[inline] @@ -24,8 +25,8 @@ impl odht::Config for Config { } #[inline] - fn decode_key(k: &[u8; 16]) -> DefPathHash { - DefPathHash(Fingerprint::from_le_bytes(*k)) + fn decode_key(k: &[u8; 8]) -> Hash64 { + Hash64::new(u64::from_le_bytes(*k)) } #[inline] diff --git a/compiler/rustc_hir/src/definitions.rs b/compiler/rustc_hir/src/definitions.rs index 2ab9a6ef32ce5..9fb1fc19bf4e4 100644 --- a/compiler/rustc_hir/src/definitions.rs +++ b/compiler/rustc_hir/src/definitions.rs @@ -20,27 +20,42 @@ use std::hash::Hash; /// Internally the `DefPathTable` holds a tree of `DefKey`s, where each `DefKey` /// stores the `DefIndex` of its parent. /// There is one `DefPathTable` for each crate. -#[derive(Clone, Default, Debug)] +#[derive(Debug)] pub struct DefPathTable { + stable_crate_id: StableCrateId, index_to_key: IndexVec, - def_path_hashes: IndexVec, + // We do only store the local hash, as all the definitions are from the current crate. + def_path_hashes: IndexVec, def_path_hash_to_index: DefPathHashMap, } impl DefPathTable { + fn new(stable_crate_id: StableCrateId) -> DefPathTable { + DefPathTable { + stable_crate_id, + index_to_key: Default::default(), + def_path_hashes: Default::default(), + def_path_hash_to_index: Default::default(), + } + } + fn allocate(&mut self, key: DefKey, def_path_hash: DefPathHash) -> DefIndex { + // Assert that all DefPathHashes correctly contain the local crate's StableCrateId. + debug_assert_eq!(self.stable_crate_id, def_path_hash.stable_crate_id()); + let local_hash = def_path_hash.local_hash(); + let index = { let index = DefIndex::from(self.index_to_key.len()); debug!("DefPathTable::insert() - {:?} <-> {:?}", key, index); self.index_to_key.push(key); index }; - self.def_path_hashes.push(def_path_hash); + self.def_path_hashes.push(local_hash); debug_assert!(self.def_path_hashes.len() == self.index_to_key.len()); // Check for hash collisions of DefPathHashes. These should be // exceedingly rare. - if let Some(existing) = self.def_path_hash_to_index.insert(&def_path_hash, &index) { + if let Some(existing) = self.def_path_hash_to_index.insert(&local_hash, &index) { let def_path1 = DefPath::make(LOCAL_CRATE, existing, |idx| self.def_key(idx)); let def_path2 = DefPath::make(LOCAL_CRATE, index, |idx| self.def_key(idx)); @@ -58,13 +73,6 @@ impl DefPathTable { ); } - // Assert that all DefPathHashes correctly contain the local crate's - // StableCrateId - #[cfg(debug_assertions)] - if let Some(root) = self.def_path_hashes.get(CRATE_DEF_INDEX) { - assert!(def_path_hash.stable_crate_id() == root.stable_crate_id()); - } - index } @@ -73,19 +81,19 @@ impl DefPathTable { self.index_to_key[index] } + #[instrument(level = "trace", skip(self), ret)] #[inline(always)] pub fn def_path_hash(&self, index: DefIndex) -> DefPathHash { let hash = self.def_path_hashes[index]; - debug!("def_path_hash({:?}) = {:?}", index, hash); - hash + DefPathHash::new(self.stable_crate_id, hash) } pub fn enumerated_keys_and_path_hashes( &self, - ) -> impl Iterator + ExactSizeIterator + '_ { + ) -> impl Iterator + ExactSizeIterator + '_ { self.index_to_key .iter_enumerated() - .map(move |(index, key)| (index, key, &self.def_path_hashes[index])) + .map(move |(index, key)| (index, key, self.def_path_hash(index))) } } @@ -96,9 +104,6 @@ impl DefPathTable { pub struct Definitions { table: DefPathTable, next_disambiguator: UnordMap<(LocalDefId, DefPathData), u32>, - - /// The [StableCrateId] of the local crate. - stable_crate_id: StableCrateId, } /// A unique identifier that we can use to lookup a definition @@ -329,11 +334,11 @@ impl Definitions { let def_path_hash = key.compute_stable_hash(parent_hash); // Create the root definition. - let mut table = DefPathTable::default(); + let mut table = DefPathTable::new(stable_crate_id); let root = LocalDefId { local_def_index: table.allocate(key, def_path_hash) }; assert_eq!(root.local_def_index, CRATE_DEF_INDEX); - Definitions { table, next_disambiguator: Default::default(), stable_crate_id } + Definitions { table, next_disambiguator: Default::default() } } /// Adds a definition with a parent definition. @@ -375,10 +380,10 @@ impl Definitions { hash: DefPathHash, err: &mut dyn FnMut() -> !, ) -> LocalDefId { - debug_assert!(hash.stable_crate_id() == self.stable_crate_id); + debug_assert!(hash.stable_crate_id() == self.table.stable_crate_id); self.table .def_path_hash_to_index - .get(&hash) + .get(&hash.local_hash()) .map(|local_def_index| LocalDefId { local_def_index }) .unwrap_or_else(|| err()) } diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index 452f5d0b7ace9..2c34fc13919bc 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -420,9 +420,15 @@ pub enum GenericArgsParentheses { /// A modifier on a trait bound. #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable_Generic)] pub enum TraitBoundModifier { + /// `Type: Trait` None, + /// `Type: !Trait` Negative, + /// `Type: ?Trait` Maybe, + /// `Type: const Trait` + Const, + /// `Type: ~const Trait` MaybeConst, } @@ -945,7 +951,18 @@ pub struct Closure<'hir> { pub fn_decl_span: Span, /// The span of the argument block `|...|` pub fn_arg_span: Option, - pub movability: Option, + pub kind: ClosureKind, +} + +#[derive(Clone, PartialEq, Eq, Debug, Copy, Hash, HashStable_Generic, Encodable, Decodable)] +pub enum ClosureKind { + /// This is a plain closure expression. + Closure, + /// This is a coroutine expression -- i.e. a closure expression in which + /// we've found a `yield`. These can arise either from "plain" coroutine + /// usage (e.g. `let x = || { yield (); }`) or from a desugared expression + /// (e.g. `async` and `gen` blocks). + Coroutine(CoroutineKind), } /// A block of statements `{ .. }`, which may have a label (in this case the @@ -1335,17 +1352,12 @@ pub struct BodyId { pub struct Body<'hir> { pub params: &'hir [Param<'hir>], pub value: &'hir Expr<'hir>, - pub coroutine_kind: Option, } impl<'hir> Body<'hir> { pub fn id(&self) -> BodyId { BodyId { hir_id: self.value.hir_id } } - - pub fn coroutine_kind(&self) -> Option { - self.coroutine_kind - } } /// The type of source expression that caused this coroutine to be created. @@ -1355,7 +1367,18 @@ pub enum CoroutineKind { Desugared(CoroutineDesugaring, CoroutineSource), /// A coroutine literal created via a `yield` inside a closure. - Coroutine, + Coroutine(Movability), +} + +impl CoroutineKind { + pub fn movability(self) -> Movability { + match self { + CoroutineKind::Desugared(CoroutineDesugaring::Async, _) + | CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) => Movability::Static, + CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => Movability::Movable, + CoroutineKind::Coroutine(mov) => mov, + } + } } impl fmt::Display for CoroutineKind { @@ -1365,7 +1388,7 @@ impl fmt::Display for CoroutineKind { d.fmt(f)?; k.fmt(f) } - CoroutineKind::Coroutine => f.write_str("coroutine"), + CoroutineKind::Coroutine(_) => f.write_str("coroutine"), } } } @@ -2087,12 +2110,6 @@ pub enum YieldSource { Yield, } -impl YieldSource { - pub fn is_await(&self) -> bool { - matches!(self, YieldSource::Await { .. }) - } -} - impl fmt::Display for YieldSource { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(match self { @@ -3661,7 +3678,7 @@ mod size_asserts { use super::*; // tidy-alphabetical-start static_assert_size!(Block<'_>, 48); - static_assert_size!(Body<'_>, 32); + static_assert_size!(Body<'_>, 24); static_assert_size!(Expr<'_>, 64); static_assert_size!(ExprKind<'_>, 48); static_assert_size!(FnDecl<'_>, 40); diff --git a/compiler/rustc_hir/src/intravisit.rs b/compiler/rustc_hir/src/intravisit.rs index 67e058a3219ec..e58e4c8fe0edb 100644 --- a/compiler/rustc_hir/src/intravisit.rs +++ b/compiler/rustc_hir/src/intravisit.rs @@ -757,7 +757,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr<'v>) capture_clause: _, fn_decl_span: _, fn_arg_span: _, - movability: _, + kind: _, constness: _, }) => { walk_list!(visitor, visit_generic_param, bound_generic_params); diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl index 139e1c0ac5fdc..d8b6b9a1272fb 100644 --- a/compiler/rustc_hir_analysis/messages.ftl +++ b/compiler/rustc_hir_analysis/messages.ftl @@ -70,7 +70,7 @@ hir_analysis_coercion_between_struct_same_note = expected coercion between the s hir_analysis_coercion_between_struct_single_note = expected a single field to be coerced, none found hir_analysis_const_bound_for_non_const_trait = - ~const can only be applied to `#[const_trait]` traits + `{$modifier}` can only be applied to `#[const_trait]` traits hir_analysis_const_impl_for_non_const_trait = const `impl` for trait `{$trait_name}` which is not marked with `#[const_trait]` diff --git a/compiler/rustc_hir_analysis/src/astconv/bounds.rs b/compiler/rustc_hir_analysis/src/astconv/bounds.rs index 6e71cf16ee8b4..91b3807d74462 100644 --- a/compiler/rustc_hir_analysis/src/astconv/bounds.rs +++ b/compiler/rustc_hir_analysis/src/astconv/bounds.rs @@ -112,6 +112,9 @@ impl<'tcx> dyn AstConv<'tcx> + '_ { match ast_bound { hir::GenericBound::Trait(poly_trait_ref, modifier) => { let (constness, polarity) = match modifier { + hir::TraitBoundModifier::Const => { + (ty::BoundConstness::Const, ty::ImplPolarity::Positive) + } hir::TraitBoundModifier::MaybeConst => { (ty::BoundConstness::ConstIfConst, ty::ImplPolarity::Positive) } diff --git a/compiler/rustc_hir_analysis/src/astconv/mod.rs b/compiler/rustc_hir_analysis/src/astconv/mod.rs index 8197fea5b298e..092df257dbfae 100644 --- a/compiler/rustc_hir_analysis/src/astconv/mod.rs +++ b/compiler/rustc_hir_analysis/src/astconv/mod.rs @@ -560,11 +560,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { inferred_params: vec![], infer_args, }; - if let ty::BoundConstness::ConstIfConst = constness + if let ty::BoundConstness::Const | ty::BoundConstness::ConstIfConst = constness && generics.has_self && !tcx.has_attr(def_id, sym::const_trait) { - let e = tcx.dcx().emit_err(crate::errors::ConstBoundForNonConstTrait { span }); + let e = tcx.dcx().emit_err(crate::errors::ConstBoundForNonConstTrait { + span, + modifier: constness.as_str(), + }); arg_count.correct = Err(GenericArgCountMismatch { reported: Some(e), invalid_args: vec![] }); } diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index d2e96ac74df84..5ccb7ac389641 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -8,7 +8,7 @@ use rustc_attr as attr; use rustc_errors::{ErrorGuaranteed, MultiSpan}; use rustc_hir as hir; use rustc_hir::def::{CtorKind, DefKind}; -use rustc_hir::def_id::LocalModDefId; +use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::Node; use rustc_infer::infer::{RegionVariableOrigin, TyCtxtInferExt}; use rustc_infer::traits::{Obligation, TraitEngineExt as _}; @@ -198,8 +198,8 @@ fn check_static_inhabited(tcx: TyCtxt<'_>, def_id: LocalDefId) { /// Checks that an opaque type does not contain cycles and does not use `Self` or `T::Foo` /// projections that would result in "inheriting lifetimes". -fn check_opaque(tcx: TyCtxt<'_>, id: hir::ItemId) { - let item = tcx.hir().item(id); +fn check_opaque(tcx: TyCtxt<'_>, def_id: LocalDefId) { + let item = tcx.hir().expect_item(def_id); let hir::ItemKind::OpaqueTy(hir::OpaqueTy { origin, .. }) = item.kind else { tcx.dcx().span_delayed_bug(item.span, "expected opaque item"); return; @@ -440,40 +440,31 @@ fn check_static_linkage(tcx: TyCtxt<'_>, def_id: LocalDefId) { } } -fn check_item_type(tcx: TyCtxt<'_>, id: hir::ItemId) { - debug!( - "check_item_type(it.def_id={:?}, it.name={})", - id.owner_id, - tcx.def_path_str(id.owner_id) - ); +pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) { let _indenter = indenter(); - match tcx.def_kind(id.owner_id) { + match tcx.def_kind(def_id) { DefKind::Static(..) => { - tcx.ensure().typeck(id.owner_id.def_id); - maybe_check_static_with_link_section(tcx, id.owner_id.def_id); - check_static_inhabited(tcx, id.owner_id.def_id); - check_static_linkage(tcx, id.owner_id.def_id); + tcx.ensure().typeck(def_id); + maybe_check_static_with_link_section(tcx, def_id); + check_static_inhabited(tcx, def_id); + check_static_linkage(tcx, def_id); } DefKind::Const => { - tcx.ensure().typeck(id.owner_id.def_id); + tcx.ensure().typeck(def_id); } DefKind::Enum => { - check_enum(tcx, id.owner_id.def_id); + check_enum(tcx, def_id); } DefKind::Fn => {} // entirely within check_item_body DefKind::Impl { of_trait } => { - if of_trait && let Some(impl_trait_ref) = tcx.impl_trait_ref(id.owner_id) { - check_impl_items_against_trait( - tcx, - id.owner_id.def_id, - impl_trait_ref.instantiate_identity(), - ); - check_on_unimplemented(tcx, id); + if of_trait && let Some(impl_trait_ref) = tcx.impl_trait_ref(def_id) { + check_impl_items_against_trait(tcx, def_id, impl_trait_ref.instantiate_identity()); + check_on_unimplemented(tcx, def_id); } } DefKind::Trait => { - let assoc_items = tcx.associated_items(id.owner_id); - check_on_unimplemented(tcx, id); + let assoc_items = tcx.associated_items(def_id); + check_on_unimplemented(tcx, def_id); for &assoc_item in assoc_items.in_definition_order() { match assoc_item.kind { @@ -482,12 +473,12 @@ fn check_item_type(tcx: TyCtxt<'_>, id: hir::ItemId) { forbid_intrinsic_abi(tcx, assoc_item.ident(tcx).span, abi); } ty::AssocKind::Type if assoc_item.defaultness(tcx).has_value() => { - let trait_args = GenericArgs::identity_for_item(tcx, id.owner_id); + let trait_args = GenericArgs::identity_for_item(tcx, def_id); let _: Result<_, rustc_errors::ErrorGuaranteed> = check_type_bounds( tcx, assoc_item, assoc_item, - ty::TraitRef::new(tcx, id.owner_id.to_def_id(), trait_args), + ty::TraitRef::new(tcx, def_id.to_def_id(), trait_args), ); } _ => {} @@ -495,13 +486,13 @@ fn check_item_type(tcx: TyCtxt<'_>, id: hir::ItemId) { } } DefKind::Struct => { - check_struct(tcx, id.owner_id.def_id); + check_struct(tcx, def_id); } DefKind::Union => { - check_union(tcx, id.owner_id.def_id); + check_union(tcx, def_id); } DefKind::OpaqueTy => { - let origin = tcx.opaque_type_origin(id.owner_id.def_id); + let origin = tcx.opaque_type_origin(def_id); if let hir::OpaqueTyOrigin::FnReturn(fn_def_id) | hir::OpaqueTyOrigin::AsyncFn(fn_def_id) = origin && let hir::Node::TraitItem(trait_item) = tcx.hir_node_by_def_id(fn_def_id) @@ -509,16 +500,16 @@ fn check_item_type(tcx: TyCtxt<'_>, id: hir::ItemId) { { // Skip opaques from RPIT in traits with no default body. } else { - check_opaque(tcx, id); + check_opaque(tcx, def_id); } } DefKind::TyAlias => { - let pty_ty = tcx.type_of(id.owner_id).instantiate_identity(); - let generics = tcx.generics_of(id.owner_id); + let pty_ty = tcx.type_of(def_id).instantiate_identity(); + let generics = tcx.generics_of(def_id); check_type_params_are_used(tcx, generics, pty_ty); } DefKind::ForeignMod => { - let it = tcx.hir().item(id); + let it = tcx.hir().expect_item(def_id); let hir::ItemKind::ForeignMod { abi, items } = it.kind else { return; }; @@ -589,19 +580,19 @@ fn check_item_type(tcx: TyCtxt<'_>, id: hir::ItemId) { } } DefKind::GlobalAsm => { - let it = tcx.hir().item(id); + let it = tcx.hir().expect_item(def_id); let hir::ItemKind::GlobalAsm(asm) = it.kind else { span_bug!(it.span, "DefKind::GlobalAsm but got {:#?}", it) }; - InlineAsmCtxt::new_global_asm(tcx).check_asm(asm, id.owner_id.def_id); + InlineAsmCtxt::new_global_asm(tcx).check_asm(asm, def_id); } _ => {} } } -pub(super) fn check_on_unimplemented(tcx: TyCtxt<'_>, item: hir::ItemId) { +pub(super) fn check_on_unimplemented(tcx: TyCtxt<'_>, def_id: LocalDefId) { // an error would be reported if this fails. - let _ = OnUnimplementedDirective::of_item(tcx, item.owner_id.to_def_id()); + let _ = OnUnimplementedDirective::of_item(tcx, def_id.to_def_id()); } pub(super) fn check_specialization_validity<'tcx>( @@ -1309,16 +1300,6 @@ pub(super) fn check_type_params_are_used<'tcx>( } } -pub(super) fn check_mod_item_types(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) { - let module = tcx.hir_module_items(module_def_id); - for id in module.items() { - check_item_type(tcx, id); - } - if module_def_id == LocalModDefId::CRATE_DEF_ID { - super::entry::check_for_entry_fn(tcx); - } -} - fn async_opaque_type_cycle_error(tcx: TyCtxt<'_>, span: Span) -> ErrorGuaranteed { struct_span_err!(tcx.dcx(), span, E0733, "recursion in an `async fn` requires boxing") .span_label(span, "recursive `async fn`") diff --git a/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs b/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs index 71fbd983b6a6f..fd1571426c86a 100644 --- a/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs +++ b/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs @@ -32,7 +32,7 @@ pub(super) fn check_refining_return_position_impl_trait_in_trait<'tcx>( return; } - // If a type in the trait ref is private, then there's also no reason to to do this check. + // If a type in the trait ref is private, then there's also no reason to do this check. let impl_def_id = impl_m.container_id(tcx); for arg in impl_trait_ref.args { if let Some(ty) = arg.as_type() diff --git a/compiler/rustc_hir_analysis/src/check/mod.rs b/compiler/rustc_hir_analysis/src/check/mod.rs index faec72cfeb644..f60d6950670d9 100644 --- a/compiler/rustc_hir_analysis/src/check/mod.rs +++ b/compiler/rustc_hir_analysis/src/check/mod.rs @@ -75,7 +75,6 @@ pub use check::check_abi; use std::num::NonZeroU32; -use check::check_mod_item_types; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_errors::ErrorGuaranteed; use rustc_errors::{pluralize, struct_span_err, Diagnostic, DiagnosticBuilder}; @@ -110,7 +109,6 @@ pub fn provide(providers: &mut Providers) { wfcheck::provide(providers); *providers = Providers { adt_destructor, - check_mod_item_types, region_scope_tree, collect_return_position_impl_trait_in_trait_tys, compare_impl_const: compare_impl_item::compare_impl_const_raw, diff --git a/compiler/rustc_hir_analysis/src/check/region.rs b/compiler/rustc_hir_analysis/src/check/region.rs index 34d3f20d0cfa9..eab83c7a25467 100644 --- a/compiler/rustc_hir_analysis/src/check/region.rs +++ b/compiler/rustc_hir_analysis/src/check/region.rs @@ -824,10 +824,6 @@ impl<'tcx> Visitor<'tcx> for RegionResolutionVisitor<'tcx> { resolve_local(self, None, Some(body.value)); } - if body.coroutine_kind.is_some() { - self.scope_tree.body_expr_count.insert(body_id, self.expr_and_pat_count); - } - // Restore context we had at the start. self.expr_and_pat_count = outer_ec; self.cx = outer_cx; diff --git a/compiler/rustc_hir_analysis/src/check/wfcheck.rs b/compiler/rustc_hir_analysis/src/check/wfcheck.rs index 580d4bd5b0209..5f26da9c87f4d 100644 --- a/compiler/rustc_hir_analysis/src/check/wfcheck.rs +++ b/compiler/rustc_hir_analysis/src/check/wfcheck.rs @@ -172,7 +172,7 @@ fn check_item<'tcx>(tcx: TyCtxt<'tcx>, item: &'tcx hir::Item<'tcx>) -> Result<() item.name = ? tcx.def_path_str(def_id) ); - match item.kind { + let res = match item.kind { // Right now we check that every default trait implementation // has an implementation of itself. Basically, a case like: // @@ -271,7 +271,11 @@ fn check_item<'tcx>(tcx: TyCtxt<'tcx>, item: &'tcx hir::Item<'tcx>) -> Result<() } } _ => Ok(()), - } + }; + + crate::check::check::check_item_type(tcx, def_id); + + res } fn check_foreign_item(tcx: TyCtxt<'_>, item: &hir::ForeignItem<'_>) -> Result<(), ErrorGuaranteed> { @@ -1909,7 +1913,11 @@ fn check_mod_type_wf(tcx: TyCtxt<'_>, module: LocalModDefId) -> Result<(), Error let mut res = items.par_items(|item| tcx.ensure().check_well_formed(item.owner_id)); res = res.and(items.par_impl_items(|item| tcx.ensure().check_well_formed(item.owner_id))); res = res.and(items.par_trait_items(|item| tcx.ensure().check_well_formed(item.owner_id))); - res.and(items.par_foreign_items(|item| tcx.ensure().check_well_formed(item.owner_id))) + res = res.and(items.par_foreign_items(|item| tcx.ensure().check_well_formed(item.owner_id))); + if module == LocalModDefId::CRATE_DEF_ID { + super::entry::check_for_entry_fn(tcx); + } + res } fn error_392(tcx: TyCtxt<'_>, span: Span, param_name: Symbol) -> DiagnosticBuilder<'_> { diff --git a/compiler/rustc_hir_analysis/src/collect.rs b/compiler/rustc_hir_analysis/src/collect.rs index 4513653b64481..8aeab2ca67e39 100644 --- a/compiler/rustc_hir_analysis/src/collect.rs +++ b/compiler/rustc_hir_analysis/src/collect.rs @@ -1551,10 +1551,14 @@ fn compute_sig_of_foreign_fn_decl<'tcx>( fn coroutine_kind(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option { match tcx.hir_node_by_def_id(def_id) { - Node::Expr(&rustc_hir::Expr { - kind: rustc_hir::ExprKind::Closure(&rustc_hir::Closure { body, .. }), + Node::Expr(&hir::Expr { + kind: + hir::ExprKind::Closure(&rustc_hir::Closure { + kind: hir::ClosureKind::Coroutine(kind), + .. + }), .. - }) => tcx.hir().body(body).coroutine_kind(), + }) => Some(kind), _ => None, } } diff --git a/compiler/rustc_hir_analysis/src/collect/generics_of.rs b/compiler/rustc_hir_analysis/src/collect/generics_of.rs index 4abebb4596674..b44b2eefabbc6 100644 --- a/compiler/rustc_hir_analysis/src/collect/generics_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/generics_of.rs @@ -315,7 +315,10 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics { if is_host_effect { if let Some(idx) = host_effect_index { - bug!("parent also has host effect param? index: {idx}, def: {def_id:?}"); + tcx.dcx().span_delayed_bug( + param.span, + format!("parent also has host effect param? index: {idx}, def: {def_id:?}"), + ); } host_effect_index = Some(index as usize); @@ -338,14 +341,14 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics { // cares about anything but the length is instantiation, // and we don't do that for closures. if let Node::Expr(&hir::Expr { - kind: hir::ExprKind::Closure(hir::Closure { movability: gen, .. }), - .. + kind: hir::ExprKind::Closure(hir::Closure { kind, .. }), .. }) = node { - let dummy_args = if gen.is_some() { - &["", "", "", "", ""][..] - } else { - &["", "", ""][..] + let dummy_args = match kind { + ClosureKind::Closure => &["", "", ""][..], + ClosureKind::Coroutine(_) => { + &["", "", "", "", ""][..] + } }; params.extend(dummy_args.iter().map(|&arg| ty::GenericParamDef { diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs index 41f30057902b6..75e7a5524a744 100644 --- a/compiler/rustc_hir_analysis/src/errors.rs +++ b/compiler/rustc_hir_analysis/src/errors.rs @@ -408,6 +408,7 @@ pub struct ConstImplForNonConstTrait { pub struct ConstBoundForNonConstTrait { #[primary_span] pub span: Span, + pub modifier: &'static str, } #[derive(Diagnostic)] diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs index 81d8982eb1574..b9e7500c89420 100644 --- a/compiler/rustc_hir_analysis/src/lib.rs +++ b/compiler/rustc_hir_analysis/src/lib.rs @@ -200,18 +200,9 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> { })?; } - let errs = tcx.sess.time("wf_checking", || { + tcx.sess.time("wf_checking", || { tcx.hir().try_par_for_each_module(|module| tcx.ensure().check_mod_type_wf(module)) - }); - - // NOTE: This is copy/pasted in librustdoc/core.rs and should be kept in sync. - tcx.sess.time("item_types_checking", || { - tcx.hir().for_each_module(|module| tcx.ensure().check_mod_item_types(module)) - }); - - // HACK: `check_mod_type_wf` may spuriously emit errors due to `span_delayed_bug`, even if - // those errors only actually get emitted in `check_mod_item_types`. - errs?; + })?; if tcx.features().rustc_attrs { tcx.sess.track_errors(|| collect::test_opaque_hidden_types(tcx))?; diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index feaec5ac620ef..d6eea07cfbc38 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -1407,7 +1407,7 @@ impl<'a> State<'a> { body, fn_decl_span: _, fn_arg_span: _, - movability: _, + kind: _, def_id: _, }) => { self.print_closure_binder(binder, bound_generic_params); diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs index 2f8ad96deb430..de2cb5a6d5c7c 100644 --- a/compiler/rustc_hir_typeck/src/callee.rs +++ b/compiler/rustc_hir_typeck/src/callee.rs @@ -298,17 +298,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let parent_node = self.tcx.hir_node(parent_hir_id); if let ( hir::Node::Expr(hir::Expr { - kind: hir::ExprKind::Closure(&hir::Closure { fn_decl_span, body, .. }), + kind: hir::ExprKind::Closure(&hir::Closure { fn_decl_span, kind, .. }), .. }), hir::ExprKind::Block(..), ) = (parent_node, callee_node) { - let fn_decl_span = if hir.body(body).coroutine_kind - == Some(hir::CoroutineKind::Desugared( + let fn_decl_span = if matches!( + kind, + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( hir::CoroutineDesugaring::Async, - hir::CoroutineSource::Closure, - )) { + hir::CoroutineSource::Closure + ),) + ) { // Actually need to unwrap one more layer of HIR to get to // the _real_ closure... let async_closure = hir.parent_id(parent_hir_id); diff --git a/compiler/rustc_hir_typeck/src/cast.rs b/compiler/rustc_hir_typeck/src/cast.rs index 0d21c013d6753..631854444793b 100644 --- a/compiler/rustc_hir_typeck/src/cast.rs +++ b/compiler/rustc_hir_typeck/src/cast.rs @@ -539,25 +539,19 @@ impl<'a, 'tcx> CastCheck<'tcx> { match self.expr_ty.kind() { ty::Ref(_, _, mt) => { let mtstr = mt.prefix_str(); - if self.cast_ty.is_trait() { - match fcx.tcx.sess.source_map().span_to_snippet(self.cast_span) { - Ok(s) => { - err.span_suggestion( - self.cast_span, - "try casting to a reference instead", - format!("&{mtstr}{s}"), - Applicability::MachineApplicable, - ); - } - Err(_) => { - let msg = format!("did you mean `&{mtstr}{tstr}`?"); - err.span_help(self.cast_span, msg); - } + match fcx.tcx.sess.source_map().span_to_snippet(self.cast_span) { + Ok(s) => { + err.span_suggestion( + self.cast_span, + "try casting to a reference instead", + format!("&{mtstr}{s}"), + Applicability::MachineApplicable, + ); + } + Err(_) => { + let msg = format!("did you mean `&{mtstr}{tstr}`?"); + err.span_help(self.cast_span, msg); } - } else { - let msg = - format!("consider using an implicit coercion to `&{mtstr}{tstr}` instead"); - err.span_help(self.span, msg); } } ty::Adt(def, ..) if def.is_box() => { diff --git a/compiler/rustc_hir_typeck/src/check.rs b/compiler/rustc_hir_typeck/src/check.rs index f4bcee384a740..a852c3d2be375 100644 --- a/compiler/rustc_hir_typeck/src/check.rs +++ b/compiler/rustc_hir_typeck/src/check.rs @@ -28,10 +28,10 @@ use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode}; pub(super) fn check_fn<'a, 'tcx>( fcx: &mut FnCtxt<'a, 'tcx>, fn_sig: ty::FnSig<'tcx>, + coroutine_types: Option>, decl: &'tcx hir::FnDecl<'tcx>, fn_def_id: LocalDefId, body: &'tcx hir::Body<'tcx>, - can_be_coroutine: Option, params_can_be_unsized: bool, ) -> Option> { let fn_id = fcx.tcx.local_def_id_to_hir_id(fn_def_id); @@ -49,56 +49,13 @@ pub(super) fn check_fn<'a, 'tcx>( fcx.param_env, )); + fcx.coroutine_types = coroutine_types; fcx.ret_coercion = Some(RefCell::new(CoerceMany::new(ret_ty))); let span = body.value.span; forbid_intrinsic_abi(tcx, span, fn_sig.abi); - if let Some(kind) = body.coroutine_kind - && can_be_coroutine.is_some() - { - let yield_ty = match kind { - hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) - | hir::CoroutineKind::Coroutine => { - let yield_ty = fcx.next_ty_var(TypeVariableOrigin { - kind: TypeVariableOriginKind::TypeInference, - span, - }); - fcx.require_type_is_sized(yield_ty, span, traits::SizedYieldType); - yield_ty - } - // HACK(-Ztrait-solver=next): In the *old* trait solver, we must eagerly - // guide inference on the yield type so that we can handle `AsyncIterator` - // in this block in projection correctly. In the new trait solver, it is - // not a problem. - hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => { - let yield_ty = fcx.next_ty_var(TypeVariableOrigin { - kind: TypeVariableOriginKind::TypeInference, - span, - }); - fcx.require_type_is_sized(yield_ty, span, traits::SizedYieldType); - - Ty::new_adt( - tcx, - tcx.adt_def(tcx.require_lang_item(hir::LangItem::Poll, Some(span))), - tcx.mk_args(&[Ty::new_adt( - tcx, - tcx.adt_def(tcx.require_lang_item(hir::LangItem::Option, Some(span))), - tcx.mk_args(&[yield_ty.into()]), - ) - .into()]), - ) - } - hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => Ty::new_unit(tcx), - }; - - // Resume type defaults to `()` if the coroutine has no argument. - let resume_ty = fn_sig.inputs().get(0).copied().unwrap_or_else(|| Ty::new_unit(tcx)); - - fcx.resume_yield_tys = Some((resume_ty, yield_ty)); - } - GatherLocalsVisitor::new(fcx).visit_body(body); // C-variadic fns also have a `VaList` input that's not listed in `fn_sig` @@ -127,7 +84,8 @@ pub(super) fn check_fn<'a, 'tcx>( // ty_span == binding_span iff this is a closure parameter with no type ascription, // or if it's an implicit `self` parameter traits::SizedArgumentType( - if ty_span == Some(param.span) && tcx.is_closure(fn_def_id.into()) { + if ty_span == Some(param.span) && tcx.is_closure_or_coroutine(fn_def_id.into()) + { None } else { ty_span @@ -148,32 +106,6 @@ pub(super) fn check_fn<'a, 'tcx>( fcx.require_type_is_sized(declared_ret_ty, return_or_body_span, traits::SizedReturnType); fcx.check_return_expr(body.value, false); - // We insert the deferred_coroutine_interiors entry after visiting the body. - // This ensures that all nested coroutines appear before the entry of this coroutine. - // resolve_coroutine_interiors relies on this property. - let coroutine_ty = if let (Some(_), Some(coroutine_kind)) = - (can_be_coroutine, body.coroutine_kind) - { - let interior = fcx - .next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, span }); - fcx.deferred_coroutine_interiors.borrow_mut().push(( - fn_def_id, - body.id(), - interior, - coroutine_kind, - )); - - let (resume_ty, yield_ty) = fcx.resume_yield_tys.unwrap(); - Some(CoroutineTypes { - resume_ty, - yield_ty, - interior, - movability: can_be_coroutine.unwrap(), - }) - } else { - None - }; - // Finalize the return check by taking the LUB of the return types // we saw and assigning it to the expected return type. This isn't // really expected to fail, since the coercions would have failed @@ -209,7 +141,7 @@ pub(super) fn check_fn<'a, 'tcx>( check_lang_start_fn(tcx, fn_sig, fn_def_id); } - coroutine_ty + fcx.coroutine_types } fn check_panic_info_fn(tcx: TyCtxt<'_>, fn_id: LocalDefId, fn_sig: ty::FnSig<'_>) { diff --git a/compiler/rustc_hir_typeck/src/closure.rs b/compiler/rustc_hir_typeck/src/closure.rs index cd42be28e6f04..7edb5912dd530 100644 --- a/compiler/rustc_hir_typeck/src/closure.rs +++ b/compiler/rustc_hir_typeck/src/closure.rs @@ -60,109 +60,175 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } None => (None, None), }; - let body = self.tcx.hir().body(closure.body); - self.check_closure(closure, expr_span, expected_kind, body, expected_sig) + + self.check_closure(closure, expr_span, expected_kind, expected_sig) } - #[instrument(skip(self, closure, body), level = "debug", ret)] + #[instrument(skip(self, closure), level = "debug", ret)] fn check_closure( &self, closure: &hir::Closure<'tcx>, expr_span: Span, opt_kind: Option, - body: &'tcx hir::Body<'tcx>, expected_sig: Option>, ) -> Ty<'tcx> { + let tcx = self.tcx; + let body = tcx.hir().body(closure.body); + trace!("decl = {:#?}", closure.fn_decl); let expr_def_id = closure.def_id; debug!(?expr_def_id); let ClosureSignatures { bound_sig, liberated_sig } = - self.sig_of_closure(expr_def_id, closure.fn_decl, body, expected_sig); + self.sig_of_closure(expr_def_id, closure.fn_decl, closure.kind, expected_sig); debug!(?bound_sig, ?liberated_sig); + // FIXME: We could probably actually just unify this further -- + // instead of having a `FnSig` and a `Option`, + // we can have a `ClosureSignature { Coroutine { .. }, Closure { .. } }`, + // similar to how `ty::GenSig` is a distinct data structure. + let coroutine_types = match closure.kind { + hir::ClosureKind::Closure => None, + hir::ClosureKind::Coroutine(kind) => { + let yield_ty = match kind { + hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) + | hir::CoroutineKind::Coroutine(_) => { + let yield_ty = self.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::TypeInference, + span: expr_span, + }); + self.require_type_is_sized(yield_ty, expr_span, traits::SizedYieldType); + yield_ty + } + // HACK(-Ztrait-solver=next): In the *old* trait solver, we must eagerly + // guide inference on the yield type so that we can handle `AsyncIterator` + // in this block in projection correctly. In the new trait solver, it is + // not a problem. + hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => { + let yield_ty = self.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::TypeInference, + span: expr_span, + }); + self.require_type_is_sized(yield_ty, expr_span, traits::SizedYieldType); + + Ty::new_adt( + tcx, + tcx.adt_def( + tcx.require_lang_item(hir::LangItem::Poll, Some(expr_span)), + ), + tcx.mk_args(&[Ty::new_adt( + tcx, + tcx.adt_def( + tcx.require_lang_item(hir::LangItem::Option, Some(expr_span)), + ), + tcx.mk_args(&[yield_ty.into()]), + ) + .into()]), + ) + } + hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => { + tcx.types.unit + } + }; + + // Resume type defaults to `()` if the coroutine has no argument. + let resume_ty = liberated_sig.inputs().get(0).copied().unwrap_or(tcx.types.unit); + + Some(CoroutineTypes { resume_ty, yield_ty }) + } + }; + let mut fcx = FnCtxt::new(self, self.param_env, closure.def_id); - let coroutine_types = check_fn( + check_fn( &mut fcx, liberated_sig, + coroutine_types, closure.fn_decl, expr_def_id, body, - closure.movability, // Closure "rust-call" ABI doesn't support unsized params false, ); - let parent_args = GenericArgs::identity_for_item( - self.tcx, - self.tcx.typeck_root_def_id(expr_def_id.to_def_id()), - ); + let parent_args = + GenericArgs::identity_for_item(tcx, tcx.typeck_root_def_id(expr_def_id.to_def_id())); let tupled_upvars_ty = self.next_root_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::ClosureSynthetic, - span: self.tcx.def_span(expr_def_id), + span: expr_span, }); - if let Some(CoroutineTypes { resume_ty, yield_ty, interior, movability }) = coroutine_types - { - let coroutine_args = ty::CoroutineArgs::new( - self.tcx, - ty::CoroutineArgsParts { - parent_args, - resume_ty, - yield_ty, - return_ty: liberated_sig.output(), - witness: interior, - tupled_upvars_ty, - }, - ); - - return Ty::new_coroutine( - self.tcx, - expr_def_id.to_def_id(), - coroutine_args.args, - movability, - ); - } - - // Tuple up the arguments and insert the resulting function type into - // the `closures` table. - let sig = bound_sig.map_bound(|sig| { - self.tcx.mk_fn_sig( - [Ty::new_tup(self.tcx, sig.inputs())], - sig.output(), - sig.c_variadic, - sig.unsafety, - sig.abi, - ) - }); - - debug!(?sig, ?opt_kind); - - let closure_kind_ty = match opt_kind { - Some(kind) => Ty::from_closure_kind(self.tcx, kind), + match closure.kind { + hir::ClosureKind::Closure => { + assert_eq!(coroutine_types, None); + // Tuple up the arguments and insert the resulting function type into + // the `closures` table. + let sig = bound_sig.map_bound(|sig| { + tcx.mk_fn_sig( + [Ty::new_tup(tcx, sig.inputs())], + sig.output(), + sig.c_variadic, + sig.unsafety, + sig.abi, + ) + }); - // Create a type variable (for now) to represent the closure kind. - // It will be unified during the upvar inference phase (`upvar.rs`) - None => self.next_root_ty_var(TypeVariableOrigin { - // FIXME(eddyb) distinguish closure kind inference variables from the rest. - kind: TypeVariableOriginKind::ClosureSynthetic, - span: expr_span, - }), - }; + debug!(?sig, ?opt_kind); + + let closure_kind_ty = match opt_kind { + Some(kind) => Ty::from_closure_kind(tcx, kind), + + // Create a type variable (for now) to represent the closure kind. + // It will be unified during the upvar inference phase (`upvar.rs`) + None => self.next_root_ty_var(TypeVariableOrigin { + // FIXME(eddyb) distinguish closure kind inference variables from the rest. + kind: TypeVariableOriginKind::ClosureSynthetic, + span: expr_span, + }), + }; + + let closure_args = ty::ClosureArgs::new( + tcx, + ty::ClosureArgsParts { + parent_args, + closure_kind_ty, + closure_sig_as_fn_ptr_ty: Ty::new_fn_ptr(tcx, sig), + tupled_upvars_ty, + }, + ); - let closure_args = ty::ClosureArgs::new( - self.tcx, - ty::ClosureArgsParts { - parent_args, - closure_kind_ty, - closure_sig_as_fn_ptr_ty: Ty::new_fn_ptr(self.tcx, sig), - tupled_upvars_ty, - }, - ); + Ty::new_closure(tcx, expr_def_id.to_def_id(), closure_args.args) + } + hir::ClosureKind::Coroutine(_) => { + let Some(CoroutineTypes { resume_ty, yield_ty }) = coroutine_types else { + bug!("expected coroutine to have yield/resume types"); + }; + let interior = fcx.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::MiscVariable, + span: body.value.span, + }); + fcx.deferred_coroutine_interiors.borrow_mut().push(( + expr_def_id, + body.id(), + interior, + )); + + let coroutine_args = ty::CoroutineArgs::new( + tcx, + ty::CoroutineArgsParts { + parent_args, + resume_ty, + yield_ty, + return_ty: liberated_sig.output(), + witness: interior, + tupled_upvars_ty, + }, + ); - Ty::new_closure(self.tcx, expr_def_id.to_def_id(), closure_args.args) + Ty::new_coroutine(tcx, expr_def_id.to_def_id(), coroutine_args.args) + } + } } /// Given the expected type, figures out what it can about this closure we @@ -351,28 +417,28 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &self, expr_def_id: LocalDefId, decl: &hir::FnDecl<'_>, - body: &hir::Body<'_>, + closure_kind: hir::ClosureKind, expected_sig: Option>, ) -> ClosureSignatures<'tcx> { if let Some(e) = expected_sig { - self.sig_of_closure_with_expectation(expr_def_id, decl, body, e) + self.sig_of_closure_with_expectation(expr_def_id, decl, closure_kind, e) } else { - self.sig_of_closure_no_expectation(expr_def_id, decl, body) + self.sig_of_closure_no_expectation(expr_def_id, decl, closure_kind) } } /// If there is no expected signature, then we will convert the /// types that the user gave into a signature. - #[instrument(skip(self, expr_def_id, decl, body), level = "debug")] + #[instrument(skip(self, expr_def_id, decl), level = "debug")] fn sig_of_closure_no_expectation( &self, expr_def_id: LocalDefId, decl: &hir::FnDecl<'_>, - body: &hir::Body<'_>, + closure_kind: hir::ClosureKind, ) -> ClosureSignatures<'tcx> { - let bound_sig = self.supplied_sig_of_closure(expr_def_id, decl, body); + let bound_sig = self.supplied_sig_of_closure(expr_def_id, decl, closure_kind); - self.closure_sigs(expr_def_id, body, bound_sig) + self.closure_sigs(expr_def_id, bound_sig) } /// Invoked to compute the signature of a closure expression. This @@ -422,24 +488,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// - `expected_sig`: the expected signature (if any). Note that /// this is missing a binder: that is, there may be late-bound /// regions with depth 1, which are bound then by the closure. - #[instrument(skip(self, expr_def_id, decl, body), level = "debug")] + #[instrument(skip(self, expr_def_id, decl), level = "debug")] fn sig_of_closure_with_expectation( &self, expr_def_id: LocalDefId, decl: &hir::FnDecl<'_>, - body: &hir::Body<'_>, + closure_kind: hir::ClosureKind, expected_sig: ExpectedSig<'tcx>, ) -> ClosureSignatures<'tcx> { // Watch out for some surprises and just ignore the // expectation if things don't see to match up with what we // expect. if expected_sig.sig.c_variadic() != decl.c_variadic { - return self.sig_of_closure_no_expectation(expr_def_id, decl, body); + return self.sig_of_closure_no_expectation(expr_def_id, decl, closure_kind); } else if expected_sig.sig.skip_binder().inputs_and_output.len() != decl.inputs.len() + 1 { return self.sig_of_closure_with_mismatched_number_of_arguments( expr_def_id, decl, - body, expected_sig, ); } @@ -463,16 +528,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // anonymize away, so as not to confuse the user. let bound_sig = self.tcx.anonymize_bound_vars(bound_sig); - let closure_sigs = self.closure_sigs(expr_def_id, body, bound_sig); + let closure_sigs = self.closure_sigs(expr_def_id, bound_sig); // Up till this point, we have ignored the annotations that the user // gave. This function will check that they unify successfully. // Along the way, it also writes out entries for types that the user // wrote into our typeck results, which are then later used by the privacy // check. - match self.merge_supplied_sig_with_expectation(expr_def_id, decl, body, closure_sigs) { + match self.merge_supplied_sig_with_expectation( + expr_def_id, + decl, + closure_kind, + closure_sigs, + ) { Ok(infer_ok) => self.register_infer_ok_obligations(infer_ok), - Err(_) => self.sig_of_closure_no_expectation(expr_def_id, decl, body), + Err(_) => self.sig_of_closure_no_expectation(expr_def_id, decl, closure_kind), } } @@ -480,7 +550,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &self, expr_def_id: LocalDefId, decl: &hir::FnDecl<'_>, - body: &hir::Body<'_>, expected_sig: ExpectedSig<'tcx>, ) -> ClosureSignatures<'tcx> { let expr_map_node = self.tcx.hir_node_by_def_id(expr_def_id); @@ -511,25 +580,25 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let error_sig = self.error_sig_of_closure(decl, guar); - self.closure_sigs(expr_def_id, body, error_sig) + self.closure_sigs(expr_def_id, error_sig) } /// Enforce the user's types against the expectation. See /// `sig_of_closure_with_expectation` for details on the overall /// strategy. - #[instrument(level = "debug", skip(self, expr_def_id, decl, body, expected_sigs))] + #[instrument(level = "debug", skip(self, expr_def_id, decl, expected_sigs))] fn merge_supplied_sig_with_expectation( &self, expr_def_id: LocalDefId, decl: &hir::FnDecl<'_>, - body: &hir::Body<'_>, + closure_kind: hir::ClosureKind, mut expected_sigs: ClosureSignatures<'tcx>, ) -> InferResult<'tcx, ClosureSignatures<'tcx>> { // Get the signature S that the user gave. // // (See comment on `sig_of_closure_with_expectation` for the // meaning of these letters.) - let supplied_sig = self.supplied_sig_of_closure(expr_def_id, decl, body); + let supplied_sig = self.supplied_sig_of_closure(expr_def_id, decl, closure_kind); debug!(?supplied_sig); @@ -611,17 +680,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// types that the user gave into a signature. /// /// Also, record this closure signature for later. - #[instrument(skip(self, decl, body), level = "debug", ret)] + #[instrument(skip(self, decl), level = "debug", ret)] fn supplied_sig_of_closure( &self, expr_def_id: LocalDefId, decl: &hir::FnDecl<'_>, - body: &hir::Body<'_>, + closure_kind: hir::ClosureKind, ) -> ty::PolyFnSig<'tcx> { let astconv: &dyn AstConv<'_> = self; trace!("decl = {:#?}", decl); - debug!(?body.coroutine_kind); + debug!(?closure_kind); let hir_id = self.tcx.local_def_id_to_hir_id(expr_def_id); let bound_vars = self.tcx.late_bound_vars(hir_id); @@ -630,36 +699,41 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let supplied_arguments = decl.inputs.iter().map(|a| astconv.ast_ty_to_ty(a)); let supplied_return = match decl.output { hir::FnRetTy::Return(ref output) => astconv.ast_ty_to_ty(output), - hir::FnRetTy::DefaultReturn(_) => match body.coroutine_kind { + hir::FnRetTy::DefaultReturn(_) => match closure_kind { // In the case of the async block that we create for a function body, // we expect the return type of the block to match that of the enclosing // function. - Some(hir::CoroutineKind::Desugared( + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( hir::CoroutineDesugaring::Async, hir::CoroutineSource::Fn, )) => { debug!("closure is async fn body"); - let def_id = self.tcx.hir().body_owner_def_id(body.id()); - self.deduce_future_output_from_obligations(expr_def_id, def_id).unwrap_or_else( - || { - // AFAIK, deducing the future output - // always succeeds *except* in error cases - // like #65159. I'd like to return Error - // here, but I can't because I can't - // easily (and locally) prove that we - // *have* reported an - // error. --nikomatsakis - astconv.ty_infer(None, decl.output.span()) - }, - ) + self.deduce_future_output_from_obligations(expr_def_id).unwrap_or_else(|| { + // AFAIK, deducing the future output + // always succeeds *except* in error cases + // like #65159. I'd like to return Error + // here, but I can't because I can't + // easily (and locally) prove that we + // *have* reported an + // error. --nikomatsakis + astconv.ty_infer(None, decl.output.span()) + }) } // All `gen {}` and `async gen {}` must return unit. - Some( + hir::ClosureKind::Coroutine( hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) | hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _), ) => self.tcx.types.unit, - _ => astconv.ty_infer(None, decl.output.span()), + // For async blocks, we just fall back to `_` here. + // For closures/coroutines, we know nothing about the return + // type unless it was supplied. + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Async, + _, + )) + | hir::ClosureKind::Coroutine(hir::CoroutineKind::Coroutine(_)) + | hir::ClosureKind::Closure => astconv.ty_infer(None, decl.output.span()), }, }; @@ -688,16 +762,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Future`, so we do this by searching through the /// obligations to extract the `T`. #[instrument(skip(self), level = "debug", ret)] - fn deduce_future_output_from_obligations( - &self, - expr_def_id: LocalDefId, - body_def_id: LocalDefId, - ) -> Option> { + fn deduce_future_output_from_obligations(&self, body_def_id: LocalDefId) -> Option> { let ret_coercion = self.ret_coercion.as_ref().unwrap_or_else(|| { - span_bug!(self.tcx.def_span(expr_def_id), "async fn coroutine outside of a fn") + span_bug!(self.tcx.def_span(body_def_id), "async fn coroutine outside of a fn") }); - let closure_span = self.tcx.def_span(expr_def_id); + let closure_span = self.tcx.def_span(body_def_id); let ret_ty = ret_coercion.borrow().expected_ty(); let ret_ty = self.try_structurally_resolve_type(closure_span, ret_ty); @@ -842,12 +912,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn closure_sigs( &self, expr_def_id: LocalDefId, - body: &hir::Body<'_>, bound_sig: ty::PolyFnSig<'tcx>, ) -> ClosureSignatures<'tcx> { let liberated_sig = self.tcx().liberate_late_bound_regions(expr_def_id.to_def_id(), bound_sig); - let liberated_sig = self.normalize(body.value.span, liberated_sig); + let liberated_sig = self.normalize(self.tcx.def_span(expr_def_id), liberated_sig); ClosureSignatures { bound_sig, liberated_sig } } } diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs index 267fd00da84e8..44d9f1ed81846 100644 --- a/compiler/rustc_hir_typeck/src/expr.rs +++ b/compiler/rustc_hir_typeck/src/expr.rs @@ -15,6 +15,7 @@ use crate::errors::{ use crate::fatally_break_rust; use crate::method::SelfSource; use crate::type_error_struct; +use crate::CoroutineTypes; use crate::Expectation::{self, ExpectCastableToType, ExpectHasType, NoExpectation}; use crate::{ report_unexpected_variant_res, BreakableCtxt, Diverges, FnCtxt, Needs, @@ -187,8 +188,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expected: Expectation<'tcx>, args: &'tcx [hir::Expr<'tcx>], ) -> Ty<'tcx> { - if self.tcx().sess.verbose() { - // make this code only run with -Zverbose because it is probably slow + if self.tcx().sess.verbose_internals() { + // make this code only run with -Zverbose-internals because it is probably slow if let Ok(lint_str) = self.tcx.sess.source_map().span_to_snippet(expr.span) { if !lint_str.contains('\n') { debug!("expr text: {lint_str}"); @@ -349,7 +350,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ExprKind::Index(base, idx, brackets_span) => { self.check_expr_index(base, idx, expr, brackets_span) } - ExprKind::Yield(value, ref src) => self.check_expr_yield(value, expr, src), + ExprKind::Yield(value, _) => self.check_expr_yield(value, expr), hir::ExprKind::Err(guar) => Ty::new_error(tcx, guar), } } @@ -1131,8 +1132,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let mut err = self.demand_suptype_diag(expr.span, expected_ty, actual_ty).unwrap(); let lhs_ty = self.check_expr(lhs); let rhs_ty = self.check_expr(rhs); + let refs_can_coerce = |lhs: Ty<'tcx>, rhs: Ty<'tcx>| { + let lhs = Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_erased, lhs.peel_refs()); + let rhs = Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_erased, rhs.peel_refs()); + self.can_coerce(rhs, lhs) + }; let (applicability, eq) = if self.can_coerce(rhs_ty, lhs_ty) { (Applicability::MachineApplicable, true) + } else if refs_can_coerce(rhs_ty, lhs_ty) { + // The lhs and rhs are likely missing some references in either side. Subsequent + // suggestions will show up. + (Applicability::MaybeIncorrect, true) } else if let ExprKind::Binary( Spanned { node: hir::BinOpKind::And | hir::BinOpKind::Or, .. }, _, @@ -1142,7 +1152,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // if x == 1 && y == 2 { .. } // + let actual_lhs_ty = self.check_expr(rhs_expr); - (Applicability::MaybeIncorrect, self.can_coerce(rhs_ty, actual_lhs_ty)) + ( + Applicability::MaybeIncorrect, + self.can_coerce(rhs_ty, actual_lhs_ty) + || refs_can_coerce(rhs_ty, actual_lhs_ty), + ) } else if let ExprKind::Binary( Spanned { node: hir::BinOpKind::And | hir::BinOpKind::Or, .. }, lhs_expr, @@ -1152,7 +1166,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // if x == 1 && y == 2 { .. } // + let actual_rhs_ty = self.check_expr(lhs_expr); - (Applicability::MaybeIncorrect, self.can_coerce(actual_rhs_ty, lhs_ty)) + ( + Applicability::MaybeIncorrect, + self.can_coerce(actual_rhs_ty, lhs_ty) + || refs_can_coerce(actual_rhs_ty, lhs_ty), + ) } else { (Applicability::MaybeIncorrect, false) }; @@ -3145,22 +3163,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &self, value: &'tcx hir::Expr<'tcx>, expr: &'tcx hir::Expr<'tcx>, - src: &'tcx hir::YieldSource, ) -> Ty<'tcx> { - match self.resume_yield_tys { - Some((resume_ty, yield_ty)) => { + match self.coroutine_types { + Some(CoroutineTypes { resume_ty, yield_ty }) => { self.check_expr_coercible_to_type(value, yield_ty, None); resume_ty } - // Given that this `yield` expression was generated as a result of lowering a `.await`, - // we know that the yield type must be `()`; however, the context won't contain this - // information. Hence, we check the source of the yield expression here and check its - // value's type against `()` (this check should always hold). - None if src.is_await() => { - self.check_expr_coercible_to_type(value, Ty::new_unit(self.tcx), None); - Ty::new_unit(self.tcx) - } _ => { self.dcx().emit_err(YieldExprOutsideOfCoroutine { span: expr.span }); // Avoid expressions without types during writeback (#78653). diff --git a/compiler/rustc_hir_typeck/src/fallback.rs b/compiler/rustc_hir_typeck/src/fallback.rs index 023bd70be174e..aa8bbad1d1246 100644 --- a/compiler/rustc_hir_typeck/src/fallback.rs +++ b/compiler/rustc_hir_typeck/src/fallback.rs @@ -85,7 +85,7 @@ impl<'tcx> FnCtxt<'_, 'tcx> { return false; } - // not setting the `fallback_has_occured` field here because + // not setting the `fallback_has_occurred` field here because // that field is only used for type fallback diagnostics. for effect in unsolved_effects { let expected = self.tcx.consts.true_; diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs index 994f11b57d195..cb109a2e02428 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs @@ -534,7 +534,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let coroutines = std::mem::take(&mut *self.deferred_coroutine_interiors.borrow_mut()); debug!(?coroutines); - for &(expr_def_id, body_id, interior, _) in coroutines.iter() { + for &(expr_def_id, body_id, interior) in coroutines.iter() { debug!(?expr_def_id); // Create the `CoroutineWitness` type that we will unify with `interior`. diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs index 635284c5f7323..fde3d41faecf0 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs @@ -5,7 +5,7 @@ mod checks; mod suggestions; use crate::coercion::DynamicCoerceMany; -use crate::{Diverges, EnclosingBreakables, Inherited}; +use crate::{CoroutineTypes, Diverges, EnclosingBreakables, Inherited}; use rustc_errors::{DiagCtxt, ErrorGuaranteed}; use rustc_hir as hir; use rustc_hir::def_id::{DefId, LocalDefId}; @@ -68,7 +68,7 @@ pub struct FnCtxt<'a, 'tcx> { /// First span of a return site that we find. Used in error messages. pub(super) ret_coercion_span: Cell>, - pub(super) resume_yield_tys: Option<(Ty<'tcx>, Ty<'tcx>)>, + pub(super) coroutine_types: Option>, /// Whether the last checked node generates a divergence (e.g., /// `return` will set this to `Always`). In general, when entering @@ -122,7 +122,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err_count_on_creation: inh.tcx.dcx().err_count(), ret_coercion: None, ret_coercion_span: Cell::new(None), - resume_yield_tys: None, + coroutine_types: None, diverges: Cell::new(Diverges::Maybe), enclosing_breakables: RefCell::new(EnclosingBreakables { stack: Vec::new(), diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index d2917b25c54bd..b542132d71ce5 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -2443,7 +2443,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } }; - // Suggest dereferencing the lhs for expressions such as `&T == T` + // Suggest dereferencing the lhs for expressions such as `&T <= T` if let Some(hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Binary(_, lhs, ..), .. diff --git a/compiler/rustc_hir_typeck/src/gather_locals.rs b/compiler/rustc_hir_typeck/src/gather_locals.rs index 0cca779b1560e..e169b45d725f4 100644 --- a/compiler/rustc_hir_typeck/src/gather_locals.rs +++ b/compiler/rustc_hir_typeck/src/gather_locals.rs @@ -150,7 +150,7 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> { // ascription, or if it's an implicit `self` parameter traits::SizedArgumentType( if ty_span == ident.span - && self.fcx.tcx.is_closure(self.fcx.body_id.into()) + && self.fcx.tcx.is_closure_or_coroutine(self.fcx.body_id.into()) { None } else { diff --git a/compiler/rustc_hir_typeck/src/inherited.rs b/compiler/rustc_hir_typeck/src/inherited.rs index 7a6a2b2a01029..4ad46845f0ba8 100644 --- a/compiler/rustc_hir_typeck/src/inherited.rs +++ b/compiler/rustc_hir_typeck/src/inherited.rs @@ -55,8 +55,7 @@ pub struct Inherited<'tcx> { pub(super) deferred_asm_checks: RefCell, hir::HirId)>>, - pub(super) deferred_coroutine_interiors: - RefCell, hir::CoroutineKind)>>, + pub(super) deferred_coroutine_interiors: RefCell)>>, /// Whenever we introduce an adjustment from `!` into a type variable, /// we record that type variable here. This is later used to inform diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs index d910643942003..6044b1fdd40af 100644 --- a/compiler/rustc_hir_typeck/src/lib.rs +++ b/compiler/rustc_hir_typeck/src/lib.rs @@ -193,7 +193,7 @@ fn typeck_with_fallback<'tcx>( let fn_sig = tcx.liberate_late_bound_regions(def_id.to_def_id(), fn_sig); let fn_sig = fcx.normalize(body.value.span, fn_sig); - check_fn(&mut fcx, fn_sig, decl, def_id, body, None, tcx.features().unsized_fn_params); + check_fn(&mut fcx, fn_sig, None, decl, def_id, body, tcx.features().unsized_fn_params); } else { let expected_type = if let Some(&hir::Ty { kind: hir::TyKind::Infer, span, .. }) = body_ty { Some(fcx.next_ty_var(TypeVariableOrigin { @@ -295,18 +295,13 @@ fn typeck_with_fallback<'tcx>( /// When `check_fn` is invoked on a coroutine (i.e., a body that /// includes yield), it returns back some information about the yield /// points. +#[derive(Debug, PartialEq, Copy, Clone)] struct CoroutineTypes<'tcx> { /// Type of coroutine argument / values returned by `yield`. resume_ty: Ty<'tcx>, /// Type of value that is yielded. yield_ty: Ty<'tcx>, - - /// Types that are captured (see `CoroutineInterior` for more). - interior: Ty<'tcx>, - - /// Indicates if the coroutine is movable or static (immovable). - movability: hir::Movability, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs index 6530d828b3be2..8a179c5a440e8 100644 --- a/compiler/rustc_hir_typeck/src/method/suggest.rs +++ b/compiler/rustc_hir_typeck/src/method/suggest.rs @@ -2252,6 +2252,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &self, err: &mut Diagnostic, errors: Vec>, + suggest_derive: bool, ) { let all_local_types_needing_impls = errors.iter().all(|e| match e.obligation.predicate.kind().skip_binder() { @@ -2322,10 +2323,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .iter() .map(|e| (e.obligation.predicate, None, Some(e.obligation.cause.clone()))) .collect(); - self.suggest_derive(err, &preds); + if suggest_derive { + self.suggest_derive(err, &preds); + } else { + // The predicate comes from a binop where the lhs and rhs have different types. + let _ = self.note_predicate_source_and_get_derives(err, &preds); + } } - pub fn suggest_derive( + fn note_predicate_source_and_get_derives( &self, err: &mut Diagnostic, unsatisfied_predicates: &[( @@ -2333,7 +2339,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Option>, Option>, )], - ) { + ) -> Vec<(String, Span, Symbol)> { let mut derives = Vec::<(String, Span, Symbol)>::new(); let mut traits = Vec::new(); for (pred, _, _) in unsatisfied_predicates { @@ -2382,21 +2388,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { traits.sort(); traits.dedup(); - derives.sort(); - derives.dedup(); - - let mut derives_grouped = Vec::<(String, Span, String)>::new(); - for (self_name, self_span, trait_name) in derives.into_iter() { - if let Some((last_self_name, _, ref mut last_trait_names)) = derives_grouped.last_mut() - { - if last_self_name == &self_name { - last_trait_names.push_str(format!(", {trait_name}").as_str()); - continue; - } - } - derives_grouped.push((self_name, self_span, trait_name.to_string())); - } - let len = traits.len(); if len > 0 { let span = @@ -2419,6 +2410,34 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ); } + derives + } + + pub(crate) fn suggest_derive( + &self, + err: &mut Diagnostic, + unsatisfied_predicates: &[( + ty::Predicate<'tcx>, + Option>, + Option>, + )], + ) { + let mut derives = self.note_predicate_source_and_get_derives(err, unsatisfied_predicates); + derives.sort(); + derives.dedup(); + + let mut derives_grouped = Vec::<(String, Span, String)>::new(); + for (self_name, self_span, trait_name) in derives.into_iter() { + if let Some((last_self_name, _, ref mut last_trait_names)) = derives_grouped.last_mut() + { + if last_self_name == &self_name { + last_trait_names.push_str(format!(", {trait_name}").as_str()); + continue; + } + } + derives_grouped.push((self_name, self_span, trait_name.to_string())); + } + for (self_name, self_span, traits) in &derives_grouped { err.span_suggestion_verbose( self_span.shrink_to_lo(), @@ -3306,6 +3325,7 @@ fn print_disambiguation_help<'tcx>( span: Span, item: ty::AssocItem, ) -> Option { + let trait_impl_type = trait_ref.self_ty().peel_refs(); let trait_ref = if item.fn_has_self_parameter { trait_ref.print_only_trait_name().to_string() } else { @@ -3318,27 +3338,34 @@ fn print_disambiguation_help<'tcx>( { let def_kind_descr = tcx.def_kind_descr(item.kind.as_def_kind(), item.def_id); let item_name = item.ident(tcx); - let rcvr_ref = tcx - .fn_sig(item.def_id) - .skip_binder() - .skip_binder() - .inputs() - .get(0) - .and_then(|ty| ty.ref_mutability()) - .map_or("", |mutbl| mutbl.ref_prefix_str()); - let args = format!( - "({}{})", - rcvr_ref, - std::iter::once(receiver) - .chain(args.iter()) - .map(|arg| tcx - .sess - .source_map() - .span_to_snippet(arg.span) - .unwrap_or_else(|_| { "_".to_owned() })) - .collect::>() - .join(", "), + let first_input = + tcx.fn_sig(item.def_id).instantiate_identity().skip_binder().inputs().get(0); + let (first_arg_type, rcvr_ref) = ( + first_input.map(|first| first.peel_refs()), + first_input + .and_then(|ty| ty.ref_mutability()) + .map_or("", |mutbl| mutbl.ref_prefix_str()), ); + + // If the type of first arg of this assoc function is `Self` or current trait impl type or `arbitrary_self_types`, we need to take the receiver as args. Otherwise, we don't. + let args = if let Some(first_arg_type) = first_arg_type + && (first_arg_type == tcx.types.self_param + || first_arg_type == trait_impl_type + || item.fn_has_self_parameter) + { + Some(receiver) + } else { + None + } + .into_iter() + .chain(args) + .map(|arg| { + tcx.sess.source_map().span_to_snippet(arg.span).unwrap_or_else(|_| "_".to_owned()) + }) + .collect::>() + .join(", "); + + let args = format!("({}{})", rcvr_ref, args); err.span_suggestion_verbose( span, format!( diff --git a/compiler/rustc_hir_typeck/src/op.rs b/compiler/rustc_hir_typeck/src/op.rs index 7fe5e409aa1c8..7b49a7cc009db 100644 --- a/compiler/rustc_hir_typeck/src/op.rs +++ b/compiler/rustc_hir_typeck/src/op.rs @@ -47,7 +47,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let Some(lhs_deref_ty) = self.deref_once_mutably_for_diagnostic(lhs_ty) { if self .lookup_op_method( - lhs_deref_ty, + (lhs, lhs_deref_ty), Some((rhs, rhs_ty)), Op::Binary(op, IsAssign::Yes), expected, @@ -58,7 +58,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // emitted a better suggestion during error handling in check_overloaded_binop. if self .lookup_op_method( - lhs_ty, + (lhs, lhs_ty), Some((rhs, rhs_ty)), Op::Binary(op, IsAssign::Yes), expected, @@ -246,7 +246,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }); let result = self.lookup_op_method( - lhs_ty, + (lhs_expr, lhs_ty), Some((rhs_expr, rhs_ty_var)), Op::Binary(op, is_assign), expected, @@ -318,7 +318,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { lhs_expr.span, format!("cannot use `{}=` on type `{}`", op.node.as_str(), lhs_ty), ); - self.note_unmet_impls_on_type(&mut err, errors); + self.note_unmet_impls_on_type(&mut err, errors, false); (err, None) } IsAssign::No => { @@ -375,7 +375,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.span_label(lhs_expr.span, lhs_ty.to_string()); err.span_label(rhs_expr.span, rhs_ty.to_string()); } - self.note_unmet_impls_on_type(&mut err, errors); + let suggest_derive = self.can_eq(self.param_env, lhs_ty, rhs_ty); + self.note_unmet_impls_on_type(&mut err, errors, suggest_derive); (err, output_def_id) } }; @@ -391,7 +392,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { |err: &mut DiagnosticBuilder<'_, _>, lhs_deref_ty: Ty<'tcx>| { if self .lookup_op_method( - lhs_deref_ty, + (lhs_expr, lhs_deref_ty), Some((rhs_expr, rhs_ty)), Op::Binary(op, is_assign), expected, @@ -424,7 +425,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { rhs_new_mutbl: Option| { if self .lookup_op_method( - lhs_adjusted_ty, + (lhs_expr, lhs_adjusted_ty), Some((rhs_expr, rhs_adjusted_ty)), Op::Binary(op, is_assign), expected, @@ -479,7 +480,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let is_compatible_after_call = |lhs_ty, rhs_ty| { self.lookup_op_method( - lhs_ty, + (lhs_expr, lhs_ty), Some((rhs_expr, rhs_ty)), Op::Binary(op, is_assign), expected, @@ -578,7 +579,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // suggestion for the user. let errors = self .lookup_op_method( - lhs_ty, + (lhs_expr, lhs_ty), Some((rhs_expr, rhs_ty)), Op::Binary(op, is_assign), expected, @@ -779,7 +780,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expected: Expectation<'tcx>, ) -> Ty<'tcx> { assert!(op.is_by_value()); - match self.lookup_op_method(operand_ty, None, Op::Unary(op, ex.span), expected) { + match self.lookup_op_method((ex, operand_ty), None, Op::Unary(op, ex.span), expected) { Ok(method) => { self.write_method_call_and_enforce_effects(ex.hir_id, ex.span, method); method.sig.output() @@ -852,7 +853,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Str | Never | Char | Tuple(_) | Array(_, _) => {} Ref(_, lty, _) if *lty.kind() == Str => {} _ => { - self.note_unmet_impls_on_type(&mut err, errors); + self.note_unmet_impls_on_type(&mut err, errors, true); } } } @@ -865,7 +866,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn lookup_op_method( &self, - lhs_ty: Ty<'tcx>, + (lhs_expr, lhs_ty): (&'tcx hir::Expr<'tcx>, Ty<'tcx>), opt_rhs: Option<(&'tcx hir::Expr<'tcx>, Ty<'tcx>)>, op: Op, expected: Expectation<'tcx>, @@ -909,8 +910,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let cause = self.cause( span, traits::BinOp { + lhs_hir_id: lhs_expr.hir_id, + rhs_hir_id: opt_rhs_expr.map(|expr| expr.hir_id), rhs_span: opt_rhs_expr.map(|expr| expr.span), - is_lit: opt_rhs_expr.is_some_and(|expr| matches!(expr.kind, hir::ExprKind::Lit(_))), + rhs_is_lit: opt_rhs_expr + .is_some_and(|expr| matches!(expr.kind, hir::ExprKind::Lit(_))), output_ty: expected.only_has_type(self), }, ); diff --git a/compiler/rustc_hir_typeck/src/pat.rs b/compiler/rustc_hir_typeck/src/pat.rs index 6deeb8bee5d2a..02a35110716b5 100644 --- a/compiler/rustc_hir_typeck/src/pat.rs +++ b/compiler/rustc_hir_typeck/src/pat.rs @@ -2080,7 +2080,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// ``` /// /// If we're in an irrefutable pattern we prefer the array impl candidate given that - /// the slice impl candidate would be be rejected anyway (if no ambiguity existed). + /// the slice impl candidate would be rejected anyway (if no ambiguity existed). fn pat_is_irrefutable(&self, decl_origin: Option>) -> bool { match decl_origin { Some(DeclOrigin::LocalDecl { els: None }) => true, diff --git a/compiler/rustc_hir_typeck/src/upvar.rs b/compiler/rustc_hir_typeck/src/upvar.rs index fc525a0fd4efd..47b9d5f650326 100644 --- a/compiler/rustc_hir_typeck/src/upvar.rs +++ b/compiler/rustc_hir_typeck/src/upvar.rs @@ -172,7 +172,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let ty = self.node_ty(closure_hir_id); let (closure_def_id, args) = match *ty.kind() { ty::Closure(def_id, args) => (def_id, UpvarArgs::Closure(args)), - ty::Coroutine(def_id, args, _) => (def_id, UpvarArgs::Coroutine(args)), + ty::Coroutine(def_id, args) => (def_id, UpvarArgs::Coroutine(args)), ty::Error(_) => { // #51714: skip analysis when we have already encountered type errors return; diff --git a/compiler/rustc_incremental/src/persist/file_format.rs b/compiler/rustc_incremental/src/persist/file_format.rs index e68195acee097..b459f82f23e32 100644 --- a/compiler/rustc_incremental/src/persist/file_format.rs +++ b/compiler/rustc_incremental/src/persist/file_format.rs @@ -55,18 +55,12 @@ where debug!("save: remove old file"); } Err(err) if err.kind() == io::ErrorKind::NotFound => (), - Err(err) => { - sess.dcx().emit_err(errors::DeleteOld { name, path: path_buf, err }); - return; - } + Err(err) => sess.dcx().emit_fatal(errors::DeleteOld { name, path: path_buf, err }), } let mut encoder = match FileEncoder::new(&path_buf) { Ok(encoder) => encoder, - Err(err) => { - sess.dcx().emit_err(errors::CreateNew { name, path: path_buf, err }); - return; - } + Err(err) => sess.dcx().emit_fatal(errors::CreateNew { name, path: path_buf, err }), }; write_file_header(&mut encoder, sess); @@ -80,9 +74,7 @@ where ); debug!("save: data written to disk successfully"); } - Err((path, err)) => { - sess.dcx().emit_err(errors::WriteNew { name, path, err }); - } + Err((path, err)) => sess.dcx().emit_fatal(errors::WriteNew { name, path, err }), } } diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs index 0c0292f329ef6..b5a6374ec4b45 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs @@ -1212,6 +1212,23 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { s.push_highlighted(mutbl.prefix_str()); } + fn maybe_highlight( + t1: T, + t2: T, + (buf1, buf2): &mut (DiagnosticStyledString, DiagnosticStyledString), + tcx: TyCtxt<'_>, + ) { + let highlight = t1 != t2; + let (t1, t2) = if highlight || tcx.sess.opts.verbose { + (t1.to_string(), t2.to_string()) + } else { + // The two types are the same, elide and don't highlight. + ("_".into(), "_".into()) + }; + buf1.push(t1, highlight); + buf2.push(t2, highlight); + } + fn cmp_ty_refs<'tcx>( r1: ty::Region<'tcx>, mut1: hir::Mutability, @@ -1308,7 +1325,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { if lifetimes.0 != lifetimes.1 { values.0.push_highlighted(l1); values.1.push_highlighted(l2); - } else if lifetimes.0.is_bound() { + } else if lifetimes.0.is_bound() || self.tcx.sess.opts.verbose { values.0.push_normal(l1); values.1.push_normal(l2); } else { @@ -1329,7 +1346,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { let num_display_types = consts_offset - regions_len; for (i, (ta1, ta2)) in type_arguments.take(num_display_types).enumerate() { let i = i + regions_len; - if ta1 == ta2 && !self.tcx.sess.verbose() { + if ta1 == ta2 && !self.tcx.sess.opts.verbose { values.0.push_normal("_"); values.1.push_normal("_"); } else { @@ -1343,13 +1360,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { let const_arguments = sub1.consts().zip(sub2.consts()); for (i, (ca1, ca2)) in const_arguments.enumerate() { let i = i + consts_offset; - if ca1 == ca2 && !self.tcx.sess.verbose() { - values.0.push_normal("_"); - values.1.push_normal("_"); - } else { - values.0.push_highlighted(ca1.to_string()); - values.1.push_highlighted(ca2.to_string()); - } + maybe_highlight(ca1, ca2, &mut values, self.tcx); self.push_comma(&mut values.0, &mut values.1, len, i); } @@ -1513,16 +1524,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { (ty::FnPtr(sig1), ty::FnPtr(sig2)) => self.cmp_fn_sig(sig1, sig2), _ => { - if t1 == t2 && !self.tcx.sess.verbose() { - // The two types are the same, elide and don't highlight. - (DiagnosticStyledString::normal("_"), DiagnosticStyledString::normal("_")) - } else { - // We couldn't find anything in common, highlight everything. - ( - DiagnosticStyledString::highlighted(t1.to_string()), - DiagnosticStyledString::highlighted(t2.to_string()), - ) - } + let mut strs = (DiagnosticStyledString::new(), DiagnosticStyledString::new()); + maybe_highlight(t1, t2, &mut strs, self.tcx); + strs } } } diff --git a/compiler/rustc_infer/src/infer/freshen.rs b/compiler/rustc_infer/src/infer/freshen.rs index 11ab86277c1b1..c7cab048db1ba 100644 --- a/compiler/rustc_infer/src/infer/freshen.rs +++ b/compiler/rustc_infer/src/infer/freshen.rs @@ -5,7 +5,7 @@ //! Freshening is used primarily to get a good type for inserting into a cache. The result //! summarizes what the type inferencer knows "so far". The primary place it is used right now is //! in the trait matching algorithm, which needs to be able to cache whether an `impl` self type -//! matches some other type X -- *without* affecting `X`. That means if that if the type `X` is in +//! matches some other type X -- *without* affecting `X`. That means that if the type `X` is in //! fact an unbound type variable, we want the match to be regarded as ambiguous, because depending //! on what type that type variable is ultimately assigned, the match may or may not succeed. //! @@ -21,7 +21,7 @@ //! Because of the manipulation required to handle closures, doing arbitrary operations on //! freshened types is not recommended. However, in addition to doing equality/hash //! comparisons (for caching), it is possible to do a `ty::_match` operation between -//! 2 freshened types - this works even with the closure encoding. +//! two freshened types - this works even with the closure encoding. //! //! __An important detail concerning regions.__ The freshener also replaces *all* free regions with //! 'erased. The reason behind this is that, in general, we do not take region relationships into diff --git a/compiler/rustc_infer/src/infer/opaque_types.rs b/compiler/rustc_infer/src/infer/opaque_types.rs index 2656fd529cde6..11b5b437eff92 100644 --- a/compiler/rustc_infer/src/infer/opaque_types.rs +++ b/compiler/rustc_infer/src/infer/opaque_types.rs @@ -456,7 +456,7 @@ where args.as_closure().sig_as_fn_ptr_ty().visit_with(self); } - ty::Coroutine(_, args, _) => { + ty::Coroutine(_, args) => { // Skip lifetime parameters of the enclosing item(s) // Also skip the witness type, because that has no free regions. diff --git a/compiler/rustc_infer/src/infer/outlives/components.rs b/compiler/rustc_infer/src/infer/outlives/components.rs index 47038cfd46878..fc3d8375873b0 100644 --- a/compiler/rustc_infer/src/infer/outlives/components.rs +++ b/compiler/rustc_infer/src/infer/outlives/components.rs @@ -103,7 +103,7 @@ fn compute_components<'tcx>( compute_components(tcx, tupled_ty, out, visited); } - ty::Coroutine(_, args, _) => { + ty::Coroutine(_, args) => { // Same as the closure case let tupled_ty = args.as_coroutine().tupled_upvars_ty(); compute_components(tcx, tupled_ty, out, visited); diff --git a/compiler/rustc_infer/src/infer/relate/combine.rs b/compiler/rustc_infer/src/infer/relate/combine.rs index 6d8384e7a4791..8b31a1118cb75 100644 --- a/compiler/rustc_infer/src/infer/relate/combine.rs +++ b/compiler/rustc_infer/src/infer/relate/combine.rs @@ -335,7 +335,7 @@ impl<'tcx> InferCtxt<'tcx> { // constants and generic expressions are not yet handled correctly. let Generalization { value_may_be_infer: value, needs_wf: _ } = generalize::generalize( self, - &mut CombineDelegate { infcx: self, span, param_env }, + &mut CombineDelegate { infcx: self, span }, ct, target_vid, ty::Variance::Invariant, @@ -454,11 +454,7 @@ impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> { // adding constraints like `'x: '?2` and `?1 <: ?3`.) let Generalization { value_may_be_infer: b_ty, needs_wf } = generalize::generalize( self.infcx, - &mut CombineDelegate { - infcx: self.infcx, - param_env: self.param_env, - span: self.trace.span(), - }, + &mut CombineDelegate { infcx: self.infcx, span: self.trace.span() }, a_ty, b_vid, ambient_variance, diff --git a/compiler/rustc_infer/src/infer/relate/generalize.rs b/compiler/rustc_infer/src/infer/relate/generalize.rs index 665af7381dc75..27d37fd93697e 100644 --- a/compiler/rustc_infer/src/infer/relate/generalize.rs +++ b/compiler/rustc_infer/src/infer/relate/generalize.rs @@ -1,6 +1,7 @@ use std::mem; use rustc_data_structures::sso::SsoHashMap; +use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_hir::def_id::DefId; use rustc_middle::infer::unify_key::{ConstVarValue, ConstVariableValue}; use rustc_middle::ty::error::TypeError; @@ -55,8 +56,6 @@ pub fn generalize<'tcx, D: GeneralizerDelegate<'tcx>, T: Into> + Rela /// Abstracts the handling of region vars between HIR and MIR/NLL typechecking /// in the generalizer code. pub trait GeneralizerDelegate<'tcx> { - fn param_env(&self) -> ty::ParamEnv<'tcx>; - fn forbid_inference_vars() -> bool; fn span(&self) -> Span; @@ -66,15 +65,10 @@ pub trait GeneralizerDelegate<'tcx> { pub struct CombineDelegate<'cx, 'tcx> { pub infcx: &'cx InferCtxt<'tcx>, - pub param_env: ty::ParamEnv<'tcx>, pub span: Span, } impl<'tcx> GeneralizerDelegate<'tcx> for CombineDelegate<'_, 'tcx> { - fn param_env(&self) -> ty::ParamEnv<'tcx> { - self.param_env - } - fn forbid_inference_vars() -> bool { false } @@ -95,10 +89,6 @@ impl<'tcx, T> GeneralizerDelegate<'tcx> for T where T: TypeRelatingDelegate<'tcx>, { - fn param_env(&self) -> ty::ParamEnv<'tcx> { - >::param_env(self) - } - fn forbid_inference_vars() -> bool { >::forbid_inference_vars() } @@ -226,7 +216,9 @@ where let old_ambient_variance = self.ambient_variance; self.ambient_variance = self.ambient_variance.xform(variance); debug!(?self.ambient_variance, "new ambient variance"); - let r = self.relate(a, b)?; + // Recursive calls to `relate` can overflow the stack. For example a deeper version of + // `ui/associated-consts/issue-93775.rs`. + let r = ensure_sufficient_stack(|| self.relate(a, b))?; self.ambient_variance = old_ambient_variance; Ok(r) } diff --git a/compiler/rustc_infer/src/traits/structural_impls.rs b/compiler/rustc_infer/src/traits/structural_impls.rs index 8a7c59da09ebb..51c06c8970b16 100644 --- a/compiler/rustc_infer/src/traits/structural_impls.rs +++ b/compiler/rustc_infer/src/traits/structural_impls.rs @@ -17,7 +17,7 @@ impl<'tcx, T: fmt::Debug> fmt::Debug for Normalized<'tcx, T> { impl<'tcx, O: fmt::Debug> fmt::Debug for traits::Obligation<'tcx, O> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if ty::tls::with(|tcx| tcx.sess.verbose()) { + if ty::tls::with(|tcx| tcx.sess.verbose_internals()) { write!( f, "Obligation(predicate={:?}, cause={:?}, param_env={:?}, depth={})", diff --git a/compiler/rustc_interface/src/queries.rs b/compiler/rustc_interface/src/queries.rs index 1ea3db26e212b..07bbe78dc2d45 100644 --- a/compiler/rustc_interface/src/queries.rs +++ b/compiler/rustc_interface/src/queries.rs @@ -332,7 +332,7 @@ impl Compiler { // the global context. _timer = Some(self.sess.timer("free_global_ctxt")); if let Err((path, error)) = queries.finish() { - self.sess.dcx().emit_err(errors::FailedWritingFile { path: &path, error }); + self.sess.dcx().emit_fatal(errors::FailedWritingFile { path: &path, error }); } ret diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index 04a7714d4137e..c4a1f3a0e510b 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -714,7 +714,7 @@ fn test_unstable_options_tracking_hash() { untracked!(unpretty, Some("expanded".to_string())); untracked!(unstable_options, true); untracked!(validate_mir, true); - untracked!(verbose, true); + untracked!(verbose_internals, true); untracked!(write_long_types_to_disk, false); // tidy-alphabetical-end diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index 8afb5f2d32ebf..8d2f2aaca5574 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -2734,10 +2734,13 @@ impl<'tcx> LateLintPass<'tcx> for NamedAsmLabels { #[allow(rustc::diagnostic_outside_of_impl)] fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) { if let hir::Expr { - kind: hir::ExprKind::InlineAsm(hir::InlineAsm { template_strs, .. }), + kind: hir::ExprKind::InlineAsm(hir::InlineAsm { template_strs, options, .. }), .. } = expr { + // asm with `options(raw)` does not do replacement with `{` and `}`. + let raw = options.contains(InlineAsmOptions::RAW); + for (template_sym, template_snippet, template_span) in template_strs.iter() { let template_str = template_sym.as_str(); let find_label_span = |needle: &str| -> Option { @@ -2763,24 +2766,57 @@ impl<'tcx> LateLintPass<'tcx> for NamedAsmLabels { for statement in statements { // If there's a comment, trim it from the statement let statement = statement.find("//").map_or(statement, |idx| &statement[..idx]); + + // In this loop, if there is ever a non-label, no labels can come after it. let mut start_idx = 0; - for (idx, _) in statement.match_indices(':') { + 'label_loop: for (idx, _) in statement.match_indices(':') { let possible_label = statement[start_idx..idx].trim(); let mut chars = possible_label.chars(); - let Some(c) = chars.next() else { - // Empty string means a leading ':' in this section, which is not a label - break; + + let Some(start) = chars.next() else { + // Empty string means a leading ':' in this section, which is not a label. + break 'label_loop; }; - // A label starts with an alphabetic character or . or _ and continues with alphanumeric characters, _, or $ - if (c.is_alphabetic() || matches!(c, '.' | '_')) - && chars.all(|c| c.is_alphanumeric() || matches!(c, '_' | '$')) - { - found_labels.push(possible_label); - } else { - // If we encounter a non-label, there cannot be any further labels, so stop checking - break; + + // Whether a { bracket has been seen and its } hasn't been found yet. + let mut in_bracket = false; + + // A label starts with an ASCII alphabetic character or . or _ + // A label can also start with a format arg, if it's not a raw asm block. + if !raw && start == '{' { + in_bracket = true; + } else if !(start.is_ascii_alphabetic() || matches!(start, '.' | '_')) { + break 'label_loop; + } + + // Labels continue with ASCII alphanumeric characters, _, or $ + for c in chars { + // Inside a template format arg, any character is permitted for the puproses of label detection + // because we assume that it can be replaced with some other valid label string later. + // `options(raw)` asm blocks cannot have format args, so they are excluded from this special case. + if !raw && in_bracket { + if c == '{' { + // Nested brackets are not allowed in format args, this cannot be a label. + break 'label_loop; + } + + if c == '}' { + // The end of the format arg. + in_bracket = false; + } + } else if !raw && c == '{' { + // Start of a format arg. + in_bracket = true; + } else { + if !(c.is_ascii_alphanumeric() || matches!(c, '_' | '$')) { + // The potential label had an invalid character inside it, it cannot be a label. + break 'label_loop; + } + } } + // If all characters passed the label checks, this is likely a label. + found_labels.push(possible_label); start_idx = idx + 1; } } diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs index 40b70ba4e0429..d0fd019a8b122 100644 --- a/compiler/rustc_lint/src/context.rs +++ b/compiler/rustc_lint/src/context.rs @@ -18,34 +18,32 @@ use self::TargetLint::*; use crate::levels::LintLevelsBuilder; use crate::passes::{EarlyLintPassObject, LateLintPassObject}; -use rustc_ast::util::unicode::TEXT_FLOW_CONTROL_CHARS; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync; -use rustc_errors::{add_elided_lifetime_in_path_suggestion, DiagnosticBuilder, DiagnosticMessage}; -use rustc_errors::{Applicability, DecorateLint, MultiSpan, SuggestionStyle}; +use rustc_errors::{DecorateLint, DiagnosticBuilder, DiagnosticMessage, MultiSpan}; use rustc_feature::Features; use rustc_hir as hir; use rustc_hir::def::Res; use rustc_hir::def_id::{CrateNum, DefId}; use rustc_hir::definitions::{DefPathData, DisambiguatedDefPathData}; use rustc_middle::middle::privacy::EffectiveVisibilities; -use rustc_middle::middle::stability; use rustc_middle::ty::layout::{LayoutError, LayoutOfHelpers, TyAndLayout}; use rustc_middle::ty::print::{with_no_trimmed_paths, PrintError}; use rustc_middle::ty::{self, print::Printer, GenericArg, RegisteredTools, Ty, TyCtxt}; -use rustc_session::config::ExpectedValues; use rustc_session::lint::{BuiltinLintDiagnostics, LintExpectationId}; use rustc_session::lint::{FutureIncompatibleInfo, Level, Lint, LintBuffer, LintId}; use rustc_session::{LintStoreMarker, Session}; use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::symbol::{sym, Ident, Symbol}; -use rustc_span::{BytePos, Span}; +use rustc_span::Span; use rustc_target::abi; use std::cell::Cell; use std::iter; use std::slice; +mod diagnostics; + type EarlyLintPassFactory = dyn Fn() -> EarlyLintPassObject + sync::DynSend + sync::DynSync; type LateLintPassFactory = dyn for<'tcx> Fn(TyCtxt<'tcx>) -> LateLintPassObject<'tcx> + sync::DynSend + sync::DynSync; @@ -432,6 +430,8 @@ impl LintStore { // Note: find_best_match_for_name depends on the sort order of its input vector. // To ensure deterministic output, sort elements of the lint_groups hash map. // Also, never suggest deprecated lint groups. + // We will soon sort, so the initial order does not matter. + #[allow(rustc::potential_query_instability)] let mut groups: Vec<_> = self .lint_groups .iter() @@ -531,447 +531,9 @@ pub trait LintContext { diagnostic: BuiltinLintDiagnostics, ) { // We first generate a blank diagnostic. - self.lookup(lint, span, msg,|db| { + self.lookup(lint, span, msg, |db| { // Now, set up surrounding context. - let sess = self.sess(); - match diagnostic { - BuiltinLintDiagnostics::UnicodeTextFlow(span, content) => { - let spans: Vec<_> = content - .char_indices() - .filter_map(|(i, c)| { - TEXT_FLOW_CONTROL_CHARS.contains(&c).then(|| { - let lo = span.lo() + BytePos(2 + i as u32); - (c, span.with_lo(lo).with_hi(lo + BytePos(c.len_utf8() as u32))) - }) - }) - .collect(); - let (an, s) = match spans.len() { - 1 => ("an ", ""), - _ => ("", "s"), - }; - db.span_label(span, format!( - "this comment contains {an}invisible unicode text flow control codepoint{s}", - )); - for (c, span) in &spans { - db.span_label(*span, format!("{c:?}")); - } - db.note( - "these kind of unicode codepoints change the way text flows on \ - applications that support them, but can cause confusion because they \ - change the order of characters on the screen", - ); - if !spans.is_empty() { - db.multipart_suggestion_with_style( - "if their presence wasn't intentional, you can remove them", - spans.into_iter().map(|(_, span)| (span, "".to_string())).collect(), - Applicability::MachineApplicable, - SuggestionStyle::HideCodeAlways, - ); - } - }, - BuiltinLintDiagnostics::Normal => (), - BuiltinLintDiagnostics::AbsPathWithModule(span) => { - let (sugg, app) = match sess.source_map().span_to_snippet(span) { - Ok(ref s) => { - // FIXME(Manishearth) ideally the emitting code - // can tell us whether or not this is global - let opt_colon = - if s.trim_start().starts_with("::") { "" } else { "::" }; - - (format!("crate{opt_colon}{s}"), Applicability::MachineApplicable) - } - Err(_) => ("crate::".to_string(), Applicability::HasPlaceholders), - }; - db.span_suggestion(span, "use `crate`", sugg, app); - } - BuiltinLintDiagnostics::ProcMacroDeriveResolutionFallback(span) => { - db.span_label( - span, - "names from parent modules are not accessible without an explicit import", - ); - } - BuiltinLintDiagnostics::MacroExpandedMacroExportsAccessedByAbsolutePaths( - span_def, - ) => { - db.span_note(span_def, "the macro is defined here"); - } - BuiltinLintDiagnostics::ElidedLifetimesInPaths( - n, - path_span, - incl_angl_brckt, - insertion_span, - ) => { - add_elided_lifetime_in_path_suggestion( - sess.source_map(), - db, - n, - path_span, - incl_angl_brckt, - insertion_span, - ); - } - BuiltinLintDiagnostics::UnknownCrateTypes(span, note, sugg) => { - db.span_suggestion(span, note, sugg, Applicability::MaybeIncorrect); - } - BuiltinLintDiagnostics::UnusedImports(message, replaces, in_test_module) => { - if !replaces.is_empty() { - db.tool_only_multipart_suggestion( - message, - replaces, - Applicability::MachineApplicable, - ); - } - - if let Some(span) = in_test_module { - db.span_help( - self.sess().source_map().guess_head_span(span), - "consider adding a `#[cfg(test)]` to the containing module", - ); - } - } - BuiltinLintDiagnostics::RedundantImport(spans, ident) => { - for (span, is_imported) in spans { - let introduced = if is_imported { "imported" } else { "defined" }; - db.span_label( - span, - format!("the item `{ident}` is already {introduced} here"), - ); - } - } - BuiltinLintDiagnostics::DeprecatedMacro(suggestion, span) => { - stability::deprecation_suggestion(db, "macro", suggestion, span) - } - BuiltinLintDiagnostics::UnusedDocComment(span) => { - db.span_label(span, "rustdoc does not generate documentation for macro invocations"); - db.help("to document an item produced by a macro, \ - the macro must produce the documentation as part of its expansion"); - } - BuiltinLintDiagnostics::PatternsInFnsWithoutBody(span, ident) => { - db.span_suggestion(span, "remove `mut` from the parameter", ident, Applicability::MachineApplicable); - } - BuiltinLintDiagnostics::MissingAbi(span, default_abi) => { - db.span_label(span, "ABI should be specified here"); - db.help(format!("the default ABI is {}", default_abi.name())); - } - BuiltinLintDiagnostics::LegacyDeriveHelpers(span) => { - db.span_label(span, "the attribute is introduced here"); - } - BuiltinLintDiagnostics::ProcMacroBackCompat(note) => { - db.note(note); - } - BuiltinLintDiagnostics::OrPatternsBackCompat(span,suggestion) => { - db.span_suggestion(span, "use pat_param to preserve semantics", suggestion, Applicability::MachineApplicable); - } - BuiltinLintDiagnostics::ReservedPrefix(span) => { - db.span_label(span, "unknown prefix"); - db.span_suggestion_verbose( - span.shrink_to_hi(), - "insert whitespace here to avoid this being parsed as a prefix in Rust 2021", - " ", - Applicability::MachineApplicable, - ); - } - BuiltinLintDiagnostics::UnusedBuiltinAttribute { - attr_name, - macro_name, - invoc_span - } => { - db.span_note( - invoc_span, - format!("the built-in attribute `{attr_name}` will be ignored, since it's applied to the macro invocation `{macro_name}`") - ); - } - BuiltinLintDiagnostics::TrailingMacro(is_trailing, name) => { - if is_trailing { - db.note("macro invocations at the end of a block are treated as expressions"); - db.note(format!("to ignore the value produced by the macro, add a semicolon after the invocation of `{name}`")); - } - } - BuiltinLintDiagnostics::BreakWithLabelAndLoop(span) => { - db.multipart_suggestion( - "wrap this expression in parentheses", - vec![(span.shrink_to_lo(), "(".to_string()), - (span.shrink_to_hi(), ")".to_string())], - Applicability::MachineApplicable - ); - } - BuiltinLintDiagnostics::NamedAsmLabel(help) => { - db.help(help); - db.note("see the asm section of Rust By Example for more information"); - }, - BuiltinLintDiagnostics::UnexpectedCfgName((name, name_span), value) => { - let possibilities: Vec = sess.parse_sess.check_config.expecteds.keys().copied().collect(); - let is_from_cargo = std::env::var_os("CARGO").is_some(); - let mut is_feature_cfg = name == sym::feature; - - if is_feature_cfg && is_from_cargo { - db.help("consider defining some features in `Cargo.toml`"); - // Suggest the most probable if we found one - } else if let Some(best_match) = find_best_match_for_name(&possibilities, name, None) { - if let Some(ExpectedValues::Some(best_match_values)) = - sess.parse_sess.check_config.expecteds.get(&best_match) { - let mut possibilities = best_match_values.iter() - .flatten() - .map(Symbol::as_str) - .collect::>(); - possibilities.sort(); - - let mut should_print_possibilities = true; - if let Some((value, value_span)) = value { - if best_match_values.contains(&Some(value)) { - db.span_suggestion(name_span, "there is a config with a similar name and value", best_match, Applicability::MaybeIncorrect); - should_print_possibilities = false; - } else if best_match_values.contains(&None) { - db.span_suggestion(name_span.to(value_span), "there is a config with a similar name and no value", best_match, Applicability::MaybeIncorrect); - should_print_possibilities = false; - } else if let Some(first_value) = possibilities.first() { - db.span_suggestion(name_span.to(value_span), "there is a config with a similar name and different values", format!("{best_match} = \"{first_value}\""), Applicability::MaybeIncorrect); - } else { - db.span_suggestion(name_span.to(value_span), "there is a config with a similar name and different values", best_match, Applicability::MaybeIncorrect); - }; - } else { - db.span_suggestion(name_span, "there is a config with a similar name", best_match, Applicability::MaybeIncorrect); - } - - if !possibilities.is_empty() && should_print_possibilities { - let possibilities = possibilities.join("`, `"); - db.help(format!("expected values for `{best_match}` are: `{possibilities}`")); - } - } else { - db.span_suggestion(name_span, "there is a config with a similar name", best_match, Applicability::MaybeIncorrect); - } - - is_feature_cfg |= best_match == sym::feature; - } else if !possibilities.is_empty() { - let mut possibilities = possibilities.iter() - .map(Symbol::as_str) - .collect::>(); - possibilities.sort(); - let possibilities = possibilities.join("`, `"); - - // The list of expected names can be long (even by default) and - // so the diagnostic produced can take a lot of space. To avoid - // cloging the user output we only want to print that diagnostic - // once. - db.help_once(format!("expected names are: `{possibilities}`")); - } - - let inst = if let Some((value, _value_span)) = value { - let pre = if is_from_cargo { "\\" } else { "" }; - format!("cfg({name}, values({pre}\"{value}{pre}\"))") - } else { - format!("cfg({name})") - }; - - if is_from_cargo { - if !is_feature_cfg { - db.help(format!("consider using a Cargo feature instead or adding `println!(\"cargo:rustc-check-cfg={inst}\");` to the top of a `build.rs`")); - } - db.note("see for more information about checking conditional configuration"); - } else { - db.help(format!("to expect this configuration use `--check-cfg={inst}`")); - db.note("see for more information about checking conditional configuration"); - } - }, - BuiltinLintDiagnostics::UnexpectedCfgValue((name, name_span), value) => { - let Some(ExpectedValues::Some(values)) = &sess.parse_sess.check_config.expecteds.get(&name) else { - bug!("it shouldn't be possible to have a diagnostic on a value whose name is not in values"); - }; - let mut have_none_possibility = false; - let possibilities: Vec = values.iter() - .inspect(|a| have_none_possibility |= a.is_none()) - .copied() - .flatten() - .collect(); - let is_from_cargo = std::env::var_os("CARGO").is_some(); - - // Show the full list if all possible values for a given name, but don't do it - // for names as the possibilities could be very long - if !possibilities.is_empty() { - { - let mut possibilities = possibilities.iter().map(Symbol::as_str).collect::>(); - possibilities.sort(); - - let possibilities = possibilities.join("`, `"); - let none = if have_none_possibility { "(none), " } else { "" }; - - db.note(format!("expected values for `{name}` are: {none}`{possibilities}`")); - } - - if let Some((value, value_span)) = value { - // Suggest the most probable if we found one - if let Some(best_match) = find_best_match_for_name(&possibilities, value, None) { - db.span_suggestion(value_span, "there is a expected value with a similar name", format!("\"{best_match}\""), Applicability::MaybeIncorrect); - - } - } else if name == sym::feature && is_from_cargo { - db.help(format!("consider defining `{name}` as feature in `Cargo.toml`")); - } else if let &[first_possibility] = &possibilities[..] { - db.span_suggestion(name_span.shrink_to_hi(), "specify a config value", format!(" = \"{first_possibility}\""), Applicability::MaybeIncorrect); - } - } else if have_none_possibility { - db.note(format!("no expected value for `{name}`")); - if let Some((_value, value_span)) = value { - db.span_suggestion(name_span.shrink_to_hi().to(value_span), "remove the value", "", Applicability::MaybeIncorrect); - } - } - - let inst = if let Some((value, _value_span)) = value { - let pre = if is_from_cargo { "\\" } else { "" }; - format!("cfg({name}, values({pre}\"{value}{pre}\"))") - } else { - format!("cfg({name})") - }; - - if is_from_cargo { - if name == sym::feature { - if let Some((value, _value_span)) = value { - db.help(format!("consider adding `{value}` as a feature in `Cargo.toml`")); - } - } else { - db.help(format!("consider using a Cargo feature instead or adding `println!(\"cargo:rustc-check-cfg={inst}\");` to the top of a `build.rs`")); - } - db.note("see for more information about checking conditional configuration"); - } else { - db.help(format!("to expect this configuration use `--check-cfg={inst}`")); - db.note("see for more information about checking conditional configuration"); - } - }, - BuiltinLintDiagnostics::DeprecatedWhereclauseLocation(new_span, suggestion) => { - db.multipart_suggestion( - "move it to the end of the type declaration", - vec![(db.span.primary_span().unwrap(), "".to_string()), (new_span, suggestion)], - Applicability::MachineApplicable, - ); - db.note( - "see issue #89122 for more information", - ); - }, - BuiltinLintDiagnostics::SingleUseLifetime { - param_span, - use_span: Some((use_span, elide)), - deletion_span, - } => { - debug!(?param_span, ?use_span, ?deletion_span); - db.span_label(param_span, "this lifetime..."); - db.span_label(use_span, "...is used only here"); - if let Some(deletion_span) = deletion_span { - let msg = "elide the single-use lifetime"; - let (use_span, replace_lt) = if elide { - let use_span = sess.source_map().span_extend_while( - use_span, - char::is_whitespace, - ).unwrap_or(use_span); - (use_span, String::new()) - } else { - (use_span, "'_".to_owned()) - }; - debug!(?deletion_span, ?use_span); - - // issue 107998 for the case such as a wrong function pointer type - // `deletion_span` is empty and there is no need to report lifetime uses here - let suggestions = if deletion_span.is_empty() { - vec![(use_span, replace_lt)] - } else { - vec![(deletion_span, String::new()), (use_span, replace_lt)] - }; - db.multipart_suggestion( - msg, - suggestions, - Applicability::MachineApplicable, - ); - } - }, - BuiltinLintDiagnostics::SingleUseLifetime { - param_span: _, - use_span: None, - deletion_span, - } => { - debug!(?deletion_span); - if let Some(deletion_span) = deletion_span { - db.span_suggestion( - deletion_span, - "elide the unused lifetime", - "", - Applicability::MachineApplicable, - ); - } - }, - BuiltinLintDiagnostics::NamedArgumentUsedPositionally{ position_sp_to_replace, position_sp_for_msg, named_arg_sp, named_arg_name, is_formatting_arg} => { - db.span_label(named_arg_sp, "this named argument is referred to by position in formatting string"); - if let Some(positional_arg_for_msg) = position_sp_for_msg { - let msg = format!("this formatting argument uses named argument `{named_arg_name}` by position"); - db.span_label(positional_arg_for_msg, msg); - } - - if let Some(positional_arg_to_replace) = position_sp_to_replace { - let name = if is_formatting_arg { named_arg_name + "$" } else { named_arg_name }; - let span_to_replace = if let Ok(positional_arg_content) = - self.sess().source_map().span_to_snippet(positional_arg_to_replace) && positional_arg_content.starts_with(':') { - positional_arg_to_replace.shrink_to_lo() - } else { - positional_arg_to_replace - }; - db.span_suggestion_verbose( - span_to_replace, - "use the named argument by name to avoid ambiguity", - name, - Applicability::MaybeIncorrect, - ); - } - } - BuiltinLintDiagnostics::ByteSliceInPackedStructWithDerive => { - db.help("consider implementing the trait by hand, or remove the `packed` attribute"); - } - BuiltinLintDiagnostics::UnusedExternCrate { removal_span }=> { - db.span_suggestion( - removal_span, - "remove it", - "", - Applicability::MachineApplicable, - ); - } - BuiltinLintDiagnostics::ExternCrateNotIdiomatic { vis_span, ident_span }=> { - let suggestion_span = vis_span.between(ident_span); - db.span_suggestion_verbose( - suggestion_span, - "convert it to a `use`", - if vis_span.is_empty() { "use " } else { " use " }, - Applicability::MachineApplicable, - ); - } - BuiltinLintDiagnostics::AmbiguousGlobImports { diag } => { - rustc_errors::report_ambiguity_error(db, diag); - } - BuiltinLintDiagnostics::AmbiguousGlobReexports { name, namespace, first_reexport_span, duplicate_reexport_span } => { - db.span_label(first_reexport_span, format!("the name `{name}` in the {namespace} namespace is first re-exported here")); - db.span_label(duplicate_reexport_span, format!("but the name `{name}` in the {namespace} namespace is also re-exported here")); - } - BuiltinLintDiagnostics::HiddenGlobReexports { name, namespace, glob_reexport_span, private_item_span } => { - db.span_note(glob_reexport_span, format!("the name `{name}` in the {namespace} namespace is supposed to be publicly re-exported here")); - db.span_note(private_item_span, "but the private item here shadows it".to_owned()); - } - BuiltinLintDiagnostics::UnusedQualifications { removal_span } => { - db.span_suggestion_verbose( - removal_span, - "remove the unnecessary path segments", - "", - Applicability::MachineApplicable - ); - } - BuiltinLintDiagnostics::AssociatedConstElidedLifetime { elided, span } => { - db.span_suggestion_verbose( - if elided { span.shrink_to_hi() } else { span }, - "use the `'static` lifetime", - if elided { "'static " } else { "'static" }, - Applicability::MachineApplicable - ); - }, - BuiltinLintDiagnostics::RedundantImportVisibility { max_vis, span } => { - db.span_note(span, format!("the most public imported item is `{max_vis}`")); - db.help("reduce the glob import's visibility or increase visibility of imported items"); - } - } + diagnostics::builtin(self.sess(), diagnostic, db); // Rewrap `db`, and pass control to the user. decorate(db) }); @@ -1069,7 +631,7 @@ impl<'a> EarlyContext<'a> { pub(crate) fn new( sess: &'a Session, features: &'a Features, - warn_about_weird_lints: bool, + lint_added_lints: bool, lint_store: &'a LintStore, registered_tools: &'a RegisteredTools, buffered: LintBuffer, @@ -1078,7 +640,7 @@ impl<'a> EarlyContext<'a> { builder: LintLevelsBuilder::new( sess, features, - warn_about_weird_lints, + lint_added_lints, lint_store, registered_tools, ), diff --git a/compiler/rustc_lint/src/context/diagnostics.rs b/compiler/rustc_lint/src/context/diagnostics.rs new file mode 100644 index 0000000000000..75756c6946a6e --- /dev/null +++ b/compiler/rustc_lint/src/context/diagnostics.rs @@ -0,0 +1,537 @@ +use rustc_ast::util::unicode::TEXT_FLOW_CONTROL_CHARS; +use rustc_errors::{add_elided_lifetime_in_path_suggestion, DiagnosticBuilder}; +use rustc_errors::{Applicability, SuggestionStyle}; +use rustc_middle::middle::stability; +use rustc_session::config::ExpectedValues; +use rustc_session::lint::BuiltinLintDiagnostics; +use rustc_session::Session; +use rustc_span::edit_distance::find_best_match_for_name; +use rustc_span::symbol::{sym, Symbol}; +use rustc_span::BytePos; + +pub(super) fn builtin( + sess: &Session, + diagnostic: BuiltinLintDiagnostics, + db: &mut DiagnosticBuilder<'_, ()>, +) { + match diagnostic { + BuiltinLintDiagnostics::UnicodeTextFlow(span, content) => { + let spans: Vec<_> = content + .char_indices() + .filter_map(|(i, c)| { + TEXT_FLOW_CONTROL_CHARS.contains(&c).then(|| { + let lo = span.lo() + BytePos(2 + i as u32); + (c, span.with_lo(lo).with_hi(lo + BytePos(c.len_utf8() as u32))) + }) + }) + .collect(); + let (an, s) = match spans.len() { + 1 => ("an ", ""), + _ => ("", "s"), + }; + db.span_label( + span, + format!( + "this comment contains {an}invisible unicode text flow control codepoint{s}", + ), + ); + for (c, span) in &spans { + db.span_label(*span, format!("{c:?}")); + } + db.note( + "these kind of unicode codepoints change the way text flows on \ + applications that support them, but can cause confusion because they \ + change the order of characters on the screen", + ); + if !spans.is_empty() { + db.multipart_suggestion_with_style( + "if their presence wasn't intentional, you can remove them", + spans.into_iter().map(|(_, span)| (span, "".to_string())).collect(), + Applicability::MachineApplicable, + SuggestionStyle::HideCodeAlways, + ); + } + } + BuiltinLintDiagnostics::Normal => (), + BuiltinLintDiagnostics::AbsPathWithModule(span) => { + let (sugg, app) = match sess.source_map().span_to_snippet(span) { + Ok(ref s) => { + // FIXME(Manishearth) ideally the emitting code + // can tell us whether or not this is global + let opt_colon = if s.trim_start().starts_with("::") { "" } else { "::" }; + + (format!("crate{opt_colon}{s}"), Applicability::MachineApplicable) + } + Err(_) => ("crate::".to_string(), Applicability::HasPlaceholders), + }; + db.span_suggestion(span, "use `crate`", sugg, app); + } + BuiltinLintDiagnostics::ProcMacroDeriveResolutionFallback(span) => { + db.span_label( + span, + "names from parent modules are not accessible without an explicit import", + ); + } + BuiltinLintDiagnostics::MacroExpandedMacroExportsAccessedByAbsolutePaths(span_def) => { + db.span_note(span_def, "the macro is defined here"); + } + BuiltinLintDiagnostics::ElidedLifetimesInPaths( + n, + path_span, + incl_angl_brckt, + insertion_span, + ) => { + add_elided_lifetime_in_path_suggestion( + sess.source_map(), + db, + n, + path_span, + incl_angl_brckt, + insertion_span, + ); + } + BuiltinLintDiagnostics::UnknownCrateTypes(span, note, sugg) => { + db.span_suggestion(span, note, sugg, Applicability::MaybeIncorrect); + } + BuiltinLintDiagnostics::UnusedImports(message, replaces, in_test_module) => { + if !replaces.is_empty() { + db.tool_only_multipart_suggestion( + message, + replaces, + Applicability::MachineApplicable, + ); + } + + if let Some(span) = in_test_module { + db.span_help( + sess.source_map().guess_head_span(span), + "consider adding a `#[cfg(test)]` to the containing module", + ); + } + } + BuiltinLintDiagnostics::RedundantImport(spans, ident) => { + for (span, is_imported) in spans { + let introduced = if is_imported { "imported" } else { "defined" }; + db.span_label(span, format!("the item `{ident}` is already {introduced} here")); + } + } + BuiltinLintDiagnostics::DeprecatedMacro(suggestion, span) => { + stability::deprecation_suggestion(db, "macro", suggestion, span) + } + BuiltinLintDiagnostics::UnusedDocComment(span) => { + db.span_label(span, "rustdoc does not generate documentation for macro invocations"); + db.help("to document an item produced by a macro, \ + the macro must produce the documentation as part of its expansion"); + } + BuiltinLintDiagnostics::PatternsInFnsWithoutBody(span, ident) => { + db.span_suggestion( + span, + "remove `mut` from the parameter", + ident, + Applicability::MachineApplicable, + ); + } + BuiltinLintDiagnostics::MissingAbi(span, default_abi) => { + db.span_label(span, "ABI should be specified here"); + db.help(format!("the default ABI is {}", default_abi.name())); + } + BuiltinLintDiagnostics::LegacyDeriveHelpers(span) => { + db.span_label(span, "the attribute is introduced here"); + } + BuiltinLintDiagnostics::ProcMacroBackCompat(note) => { + db.note(note); + } + BuiltinLintDiagnostics::OrPatternsBackCompat(span, suggestion) => { + db.span_suggestion( + span, + "use pat_param to preserve semantics", + suggestion, + Applicability::MachineApplicable, + ); + } + BuiltinLintDiagnostics::ReservedPrefix(span) => { + db.span_label(span, "unknown prefix"); + db.span_suggestion_verbose( + span.shrink_to_hi(), + "insert whitespace here to avoid this being parsed as a prefix in Rust 2021", + " ", + Applicability::MachineApplicable, + ); + } + BuiltinLintDiagnostics::UnusedBuiltinAttribute { attr_name, macro_name, invoc_span } => { + db.span_note( + invoc_span, + format!("the built-in attribute `{attr_name}` will be ignored, since it's applied to the macro invocation `{macro_name}`") + ); + } + BuiltinLintDiagnostics::TrailingMacro(is_trailing, name) => { + if is_trailing { + db.note("macro invocations at the end of a block are treated as expressions"); + db.note(format!("to ignore the value produced by the macro, add a semicolon after the invocation of `{name}`")); + } + } + BuiltinLintDiagnostics::BreakWithLabelAndLoop(span) => { + db.multipart_suggestion( + "wrap this expression in parentheses", + vec![ + (span.shrink_to_lo(), "(".to_string()), + (span.shrink_to_hi(), ")".to_string()), + ], + Applicability::MachineApplicable, + ); + } + BuiltinLintDiagnostics::NamedAsmLabel(help) => { + db.help(help); + db.note("see the asm section of Rust By Example for more information"); + } + BuiltinLintDiagnostics::UnexpectedCfgName((name, name_span), value) => { + let possibilities: Vec = + sess.parse_sess.check_config.expecteds.keys().copied().collect(); + let is_from_cargo = std::env::var_os("CARGO").is_some(); + let mut is_feature_cfg = name == sym::feature; + + if is_feature_cfg && is_from_cargo { + db.help("consider defining some features in `Cargo.toml`"); + // Suggest the most probable if we found one + } else if let Some(best_match) = find_best_match_for_name(&possibilities, name, None) { + if let Some(ExpectedValues::Some(best_match_values)) = + sess.parse_sess.check_config.expecteds.get(&best_match) + { + // We will soon sort, so the initial order does not matter. + #[allow(rustc::potential_query_instability)] + let mut possibilities = + best_match_values.iter().flatten().map(Symbol::as_str).collect::>(); + possibilities.sort(); + + let mut should_print_possibilities = true; + if let Some((value, value_span)) = value { + if best_match_values.contains(&Some(value)) { + db.span_suggestion( + name_span, + "there is a config with a similar name and value", + best_match, + Applicability::MaybeIncorrect, + ); + should_print_possibilities = false; + } else if best_match_values.contains(&None) { + db.span_suggestion( + name_span.to(value_span), + "there is a config with a similar name and no value", + best_match, + Applicability::MaybeIncorrect, + ); + should_print_possibilities = false; + } else if let Some(first_value) = possibilities.first() { + db.span_suggestion( + name_span.to(value_span), + "there is a config with a similar name and different values", + format!("{best_match} = \"{first_value}\""), + Applicability::MaybeIncorrect, + ); + } else { + db.span_suggestion( + name_span.to(value_span), + "there is a config with a similar name and different values", + best_match, + Applicability::MaybeIncorrect, + ); + }; + } else { + db.span_suggestion( + name_span, + "there is a config with a similar name", + best_match, + Applicability::MaybeIncorrect, + ); + } + + if !possibilities.is_empty() && should_print_possibilities { + let possibilities = possibilities.join("`, `"); + db.help(format!( + "expected values for `{best_match}` are: `{possibilities}`" + )); + } + } else { + db.span_suggestion( + name_span, + "there is a config with a similar name", + best_match, + Applicability::MaybeIncorrect, + ); + } + + is_feature_cfg |= best_match == sym::feature; + } else if !possibilities.is_empty() { + let mut possibilities = + possibilities.iter().map(Symbol::as_str).collect::>(); + possibilities.sort(); + let possibilities = possibilities.join("`, `"); + + // The list of expected names can be long (even by default) and + // so the diagnostic produced can take a lot of space. To avoid + // cloging the user output we only want to print that diagnostic + // once. + db.help_once(format!("expected names are: `{possibilities}`")); + } + + let inst = if let Some((value, _value_span)) = value { + let pre = if is_from_cargo { "\\" } else { "" }; + format!("cfg({name}, values({pre}\"{value}{pre}\"))") + } else { + format!("cfg({name})") + }; + + if is_from_cargo { + if !is_feature_cfg { + db.help(format!("consider using a Cargo feature instead or adding `println!(\"cargo:rustc-check-cfg={inst}\");` to the top of a `build.rs`")); + } + db.note("see for more information about checking conditional configuration"); + } else { + db.help(format!("to expect this configuration use `--check-cfg={inst}`")); + db.note("see for more information about checking conditional configuration"); + } + } + BuiltinLintDiagnostics::UnexpectedCfgValue((name, name_span), value) => { + let Some(ExpectedValues::Some(values)) = + &sess.parse_sess.check_config.expecteds.get(&name) + else { + bug!( + "it shouldn't be possible to have a diagnostic on a value whose name is not in values" + ); + }; + let mut have_none_possibility = false; + // We later sort possibilities if it is not empty, so the + // order here does not matter. + #[allow(rustc::potential_query_instability)] + let possibilities: Vec = values + .iter() + .inspect(|a| have_none_possibility |= a.is_none()) + .copied() + .flatten() + .collect(); + let is_from_cargo = std::env::var_os("CARGO").is_some(); + + // Show the full list if all possible values for a given name, but don't do it + // for names as the possibilities could be very long + if !possibilities.is_empty() { + { + let mut possibilities = + possibilities.iter().map(Symbol::as_str).collect::>(); + possibilities.sort(); + + let possibilities = possibilities.join("`, `"); + let none = if have_none_possibility { "(none), " } else { "" }; + + db.note(format!("expected values for `{name}` are: {none}`{possibilities}`")); + } + + if let Some((value, value_span)) = value { + // Suggest the most probable if we found one + if let Some(best_match) = find_best_match_for_name(&possibilities, value, None) + { + db.span_suggestion( + value_span, + "there is a expected value with a similar name", + format!("\"{best_match}\""), + Applicability::MaybeIncorrect, + ); + } + } else if let &[first_possibility] = &possibilities[..] { + db.span_suggestion( + name_span.shrink_to_hi(), + "specify a config value", + format!(" = \"{first_possibility}\""), + Applicability::MaybeIncorrect, + ); + } + } else if have_none_possibility { + db.note(format!("no expected value for `{name}`")); + if let Some((_value, value_span)) = value { + db.span_suggestion( + name_span.shrink_to_hi().to(value_span), + "remove the value", + "", + Applicability::MaybeIncorrect, + ); + } + } + + let inst = if let Some((value, _value_span)) = value { + let pre = if is_from_cargo { "\\" } else { "" }; + format!("cfg({name}, values({pre}\"{value}{pre}\"))") + } else { + format!("cfg({name})") + }; + + if is_from_cargo { + if name == sym::feature { + if let Some((value, _value_span)) = value { + db.help(format!("consider adding `{value}` as a feature in `Cargo.toml`")); + } + } else { + db.help(format!("consider using a Cargo feature instead or adding `println!(\"cargo:rustc-check-cfg={inst}\");` to the top of a `build.rs`")); + } + db.note("see for more information about checking conditional configuration"); + } else { + db.help(format!("to expect this configuration use `--check-cfg={inst}`")); + db.note("see for more information about checking conditional configuration"); + } + } + BuiltinLintDiagnostics::DeprecatedWhereclauseLocation(new_span, suggestion) => { + db.multipart_suggestion( + "move it to the end of the type declaration", + vec![(db.span.primary_span().unwrap(), "".to_string()), (new_span, suggestion)], + Applicability::MachineApplicable, + ); + db.note( + "see issue #89122 for more information", + ); + } + BuiltinLintDiagnostics::SingleUseLifetime { + param_span, + use_span: Some((use_span, elide)), + deletion_span, + } => { + debug!(?param_span, ?use_span, ?deletion_span); + db.span_label(param_span, "this lifetime..."); + db.span_label(use_span, "...is used only here"); + if let Some(deletion_span) = deletion_span { + let msg = "elide the single-use lifetime"; + let (use_span, replace_lt) = if elide { + let use_span = sess + .source_map() + .span_extend_while(use_span, char::is_whitespace) + .unwrap_or(use_span); + (use_span, String::new()) + } else { + (use_span, "'_".to_owned()) + }; + debug!(?deletion_span, ?use_span); + + // issue 107998 for the case such as a wrong function pointer type + // `deletion_span` is empty and there is no need to report lifetime uses here + let suggestions = if deletion_span.is_empty() { + vec![(use_span, replace_lt)] + } else { + vec![(deletion_span, String::new()), (use_span, replace_lt)] + }; + db.multipart_suggestion(msg, suggestions, Applicability::MachineApplicable); + } + } + BuiltinLintDiagnostics::SingleUseLifetime { + param_span: _, + use_span: None, + deletion_span, + } => { + debug!(?deletion_span); + if let Some(deletion_span) = deletion_span { + db.span_suggestion( + deletion_span, + "elide the unused lifetime", + "", + Applicability::MachineApplicable, + ); + } + } + BuiltinLintDiagnostics::NamedArgumentUsedPositionally { + position_sp_to_replace, + position_sp_for_msg, + named_arg_sp, + named_arg_name, + is_formatting_arg, + } => { + db.span_label( + named_arg_sp, + "this named argument is referred to by position in formatting string", + ); + if let Some(positional_arg_for_msg) = position_sp_for_msg { + let msg = format!( + "this formatting argument uses named argument `{named_arg_name}` by position" + ); + db.span_label(positional_arg_for_msg, msg); + } + + if let Some(positional_arg_to_replace) = position_sp_to_replace { + let name = if is_formatting_arg { named_arg_name + "$" } else { named_arg_name }; + let span_to_replace = if let Ok(positional_arg_content) = + sess.source_map().span_to_snippet(positional_arg_to_replace) + && positional_arg_content.starts_with(':') + { + positional_arg_to_replace.shrink_to_lo() + } else { + positional_arg_to_replace + }; + db.span_suggestion_verbose( + span_to_replace, + "use the named argument by name to avoid ambiguity", + name, + Applicability::MaybeIncorrect, + ); + } + } + BuiltinLintDiagnostics::ByteSliceInPackedStructWithDerive => { + db.help("consider implementing the trait by hand, or remove the `packed` attribute"); + } + BuiltinLintDiagnostics::UnusedExternCrate { removal_span } => { + db.span_suggestion(removal_span, "remove it", "", Applicability::MachineApplicable); + } + BuiltinLintDiagnostics::ExternCrateNotIdiomatic { vis_span, ident_span } => { + let suggestion_span = vis_span.between(ident_span); + db.span_suggestion_verbose( + suggestion_span, + "convert it to a `use`", + if vis_span.is_empty() { "use " } else { " use " }, + Applicability::MachineApplicable, + ); + } + BuiltinLintDiagnostics::AmbiguousGlobImports { diag } => { + rustc_errors::report_ambiguity_error(db, diag); + } + BuiltinLintDiagnostics::AmbiguousGlobReexports { + name, + namespace, + first_reexport_span, + duplicate_reexport_span, + } => { + db.span_label( + first_reexport_span, + format!("the name `{name}` in the {namespace} namespace is first re-exported here"), + ); + db.span_label( + duplicate_reexport_span, + format!( + "but the name `{name}` in the {namespace} namespace is also re-exported here" + ), + ); + } + BuiltinLintDiagnostics::HiddenGlobReexports { + name, + namespace, + glob_reexport_span, + private_item_span, + } => { + db.span_note(glob_reexport_span, format!("the name `{name}` in the {namespace} namespace is supposed to be publicly re-exported here")); + db.span_note(private_item_span, "but the private item here shadows it".to_owned()); + } + BuiltinLintDiagnostics::UnusedQualifications { removal_span } => { + db.span_suggestion_verbose( + removal_span, + "remove the unnecessary path segments", + "", + Applicability::MachineApplicable, + ); + } + BuiltinLintDiagnostics::AssociatedConstElidedLifetime { elided, span } => { + db.span_suggestion_verbose( + if elided { span.shrink_to_hi() } else { span }, + "use the `'static` lifetime", + if elided { "'static " } else { "'static" }, + Applicability::MachineApplicable, + ); + } + BuiltinLintDiagnostics::RedundantImportVisibility { max_vis, span } => { + db.span_note(span, format!("the most public imported item is `{max_vis}`")); + db.help("reduce the glob import's visibility or increase visibility of imported items"); + } + } +} diff --git a/compiler/rustc_lint/src/levels.rs b/compiler/rustc_lint/src/levels.rs index 17c56f1ca582e..5950bc76adeb7 100644 --- a/compiler/rustc_lint/src/levels.rs +++ b/compiler/rustc_lint/src/levels.rs @@ -15,7 +15,7 @@ use crate::{ }; use rustc_ast as ast; use rustc_ast_pretty::pprust; -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::FxIndexMap; use rustc_errors::{DecorateLint, DiagnosticBuilder, DiagnosticMessage, MultiSpan}; use rustc_feature::{Features, GateIssue}; use rustc_hir as hir; @@ -73,7 +73,7 @@ rustc_index::newtype_index! { struct LintSet { // -A,-W,-D flags, a `Symbol` for the flag itself and `Level` for which // flag. - specs: FxHashMap, + specs: FxIndexMap, parent: LintStackIndex, } @@ -86,7 +86,7 @@ impl LintLevelSets { &self, lint: &'static Lint, idx: LintStackIndex, - aux: Option<&FxHashMap>, + aux: Option<&FxIndexMap>, sess: &Session, ) -> LevelAndSource { let lint = LintId::of(lint); @@ -101,7 +101,7 @@ impl LintLevelSets { &self, id: LintId, mut idx: LintStackIndex, - aux: Option<&FxHashMap>, + aux: Option<&FxIndexMap>, ) -> (Option, LintLevelSource) { if let Some(specs) = aux { if let Some(&(level, src)) = specs.get(&id) { @@ -132,10 +132,10 @@ fn lint_expectations(tcx: TyCtxt<'_>, (): ()) -> Vec<(LintExpectationId, LintExp cur: hir::CRATE_HIR_ID, specs: ShallowLintLevelMap::default(), expectations: Vec::new(), - unstable_to_stable_ids: FxHashMap::default(), - empty: FxHashMap::default(), + unstable_to_stable_ids: FxIndexMap::default(), + empty: FxIndexMap::default(), }, - warn_about_weird_lints: false, + lint_added_lints: false, store, registered_tools: tcx.registered_tools(()), }; @@ -161,10 +161,10 @@ fn shallow_lint_levels_on(tcx: TyCtxt<'_>, owner: hir::OwnerId) -> ShallowLintLe tcx, cur: owner.into(), specs: ShallowLintLevelMap::default(), - empty: FxHashMap::default(), + empty: FxIndexMap::default(), attrs, }, - warn_about_weird_lints: false, + lint_added_lints: false, store, registered_tools: tcx.registered_tools(()), }; @@ -209,14 +209,14 @@ pub struct TopDown { } pub trait LintLevelsProvider { - fn current_specs(&self) -> &FxHashMap; + fn current_specs(&self) -> &FxIndexMap; fn insert(&mut self, id: LintId, lvl: LevelAndSource); fn get_lint_level(&self, lint: &'static Lint, sess: &Session) -> LevelAndSource; fn push_expectation(&mut self, _id: LintExpectationId, _expectation: LintExpectation) {} } impl LintLevelsProvider for TopDown { - fn current_specs(&self) -> &FxHashMap { + fn current_specs(&self) -> &FxIndexMap { &self.sets.list[self.cur].specs } @@ -234,12 +234,12 @@ struct LintLevelQueryMap<'tcx> { cur: HirId, specs: ShallowLintLevelMap, /// Empty hash map to simplify code. - empty: FxHashMap, + empty: FxIndexMap, attrs: &'tcx hir::AttributeMap<'tcx>, } impl LintLevelsProvider for LintLevelQueryMap<'_> { - fn current_specs(&self) -> &FxHashMap { + fn current_specs(&self) -> &FxIndexMap { self.specs.specs.get(&self.cur.local_id).unwrap_or(&self.empty) } fn insert(&mut self, id: LintId, lvl: LevelAndSource) { @@ -257,13 +257,13 @@ struct QueryMapExpectationsWrapper<'tcx> { /// Level map for `cur`. specs: ShallowLintLevelMap, expectations: Vec<(LintExpectationId, LintExpectation)>, - unstable_to_stable_ids: FxHashMap, + unstable_to_stable_ids: FxIndexMap, /// Empty hash map to simplify code. - empty: FxHashMap, + empty: FxIndexMap, } impl LintLevelsProvider for QueryMapExpectationsWrapper<'_> { - fn current_specs(&self) -> &FxHashMap { + fn current_specs(&self) -> &FxIndexMap { self.specs.specs.get(&self.cur.local_id).unwrap_or(&self.empty) } fn insert(&mut self, id: LintId, lvl: LevelAndSource) { @@ -451,7 +451,7 @@ pub struct LintLevelsBuilder<'s, P> { sess: &'s Session, features: &'s Features, provider: P, - warn_about_weird_lints: bool, + lint_added_lints: bool, store: &'s LintStore, registered_tools: &'s RegisteredTools, } @@ -464,7 +464,7 @@ impl<'s> LintLevelsBuilder<'s, TopDown> { pub(crate) fn new( sess: &'s Session, features: &'s Features, - warn_about_weird_lints: bool, + lint_added_lints: bool, store: &'s LintStore, registered_tools: &'s RegisteredTools, ) -> Self { @@ -472,7 +472,7 @@ impl<'s> LintLevelsBuilder<'s, TopDown> { sess, features, provider: TopDown { sets: LintLevelSets::new(), cur: COMMAND_LINE }, - warn_about_weird_lints, + lint_added_lints, store, registered_tools, }; @@ -486,7 +486,7 @@ impl<'s> LintLevelsBuilder<'s, TopDown> { .provider .sets .list - .push(LintSet { specs: FxHashMap::default(), parent: COMMAND_LINE }); + .push(LintSet { specs: FxIndexMap::default(), parent: COMMAND_LINE }); self.add_command_line(); } @@ -512,7 +512,7 @@ impl<'s> LintLevelsBuilder<'s, TopDown> { ) -> BuilderPush { let prev = self.provider.cur; self.provider.cur = - self.provider.sets.list.push(LintSet { specs: FxHashMap::default(), parent: prev }); + self.provider.sets.list.push(LintSet { specs: FxIndexMap::default(), parent: prev }); self.add(attrs, is_crate_node, source_hir_id); @@ -547,7 +547,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { self.features } - fn current_specs(&self) -> &FxHashMap { + fn current_specs(&self) -> &FxIndexMap { self.provider.current_specs() } @@ -642,7 +642,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { // // This means that this only errors if we're truly lowering the lint // level from forbid. - if level != Level::Forbid { + if self.lint_added_lints && level != Level::Forbid { if let Level::Forbid = old_level { // Backwards compatibility check: // @@ -968,7 +968,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { continue; } - _ if !self.warn_about_weird_lints => {} + _ if !self.lint_added_lints => {} CheckLintNameResult::Renamed(ref replace) => { let suggestion = @@ -1029,7 +1029,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { } } - if !is_crate_node { + if self.lint_added_lints && !is_crate_node { for (id, &(level, ref src)) in self.current_specs().iter() { if !id.lint.crate_level_only { continue; @@ -1054,33 +1054,33 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { /// Checks if the lint is gated on a feature that is not enabled. /// /// Returns `true` if the lint's feature is enabled. - // FIXME only emit this once for each attribute, instead of repeating it 4 times for - // pre-expansion lints, post-expansion lints, `shallow_lint_levels_on` and `lint_expectations`. #[track_caller] fn check_gated_lint(&self, lint_id: LintId, span: Span, lint_from_cli: bool) -> bool { if let Some(feature) = lint_id.lint.feature_gate { if !self.features.active(feature) { - let lint = builtin::UNKNOWN_LINTS; - let (level, src) = self.lint_level(builtin::UNKNOWN_LINTS); - struct_lint_level( - self.sess, - lint, - level, - src, - Some(span.into()), - fluent::lint_unknown_gated_lint, - |lint| { - lint.set_arg("name", lint_id.lint.name_lower()); - lint.note(fluent::lint_note); - rustc_session::parse::add_feature_diagnostics_for_issue( - lint, - &self.sess.parse_sess, - feature, - GateIssue::Language, - lint_from_cli, - ); - }, - ); + if self.lint_added_lints { + let lint = builtin::UNKNOWN_LINTS; + let (level, src) = self.lint_level(builtin::UNKNOWN_LINTS); + struct_lint_level( + self.sess, + lint, + level, + src, + Some(span.into()), + fluent::lint_unknown_gated_lint, + |lint| { + lint.set_arg("name", lint_id.lint.name_lower()); + lint.note(fluent::lint_note); + rustc_session::parse::add_feature_diagnostics_for_issue( + lint, + &self.sess.parse_sess, + feature, + GateIssue::Language, + lint_from_cli, + ); + }, + ); + } return false; } } diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index 0fc24e88b3b21..93904fb5c5658 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -25,7 +25,6 @@ //! //! This API is completely unstable and subject to change. -#![allow(rustc::potential_query_instability)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(rustdoc_internals)] diff --git a/compiler/rustc_lint/src/non_ascii_idents.rs b/compiler/rustc_lint/src/non_ascii_idents.rs index 4f92fcd71c63d..08b2bf6af3731 100644 --- a/compiler/rustc_lint/src/non_ascii_idents.rs +++ b/compiler/rustc_lint/src/non_ascii_idents.rs @@ -174,6 +174,8 @@ impl EarlyLintPass for NonAsciiIdents { // Sort by `Span` so that error messages make sense with respect to the // order of identifier locations in the code. + // We will soon sort, so the initial order does not matter. + #[allow(rustc::potential_query_instability)] let mut symbols: Vec<_> = symbols.iter().collect(); symbols.sort_by_key(|k| k.1); @@ -287,6 +289,8 @@ impl EarlyLintPass for NonAsciiIdents { } if has_suspicious { + // The end result is put in `lint_reports` which is sorted. + #[allow(rustc::potential_query_instability)] let verified_augmented_script_sets = script_states .iter() .flat_map(|(k, v)| match v { @@ -299,6 +303,8 @@ impl EarlyLintPass for NonAsciiIdents { let mut lint_reports: BTreeMap<(Span, Vec), AugmentedScriptSet> = BTreeMap::new(); + // The end result is put in `lint_reports` which is sorted. + #[allow(rustc::potential_query_instability)] 'outerloop: for (augment_script_set, usage) in script_states { let ScriptSetUsage::Suspicious(mut ch_list, sp) = usage else { continue }; diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index 399e6968fae32..e35d1ee0461e3 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -2817,8 +2817,8 @@ declare_lint! { /// [`ptr::from_exposed_addr`]. /// /// [issue #95228]: https://github.com/rust-lang/rust/issues/95228 - /// [`ptr::with_addr`]: https://doc.rust-lang.org/core/ptr/fn.with_addr - /// [`ptr::from_exposed_addr`]: https://doc.rust-lang.org/core/ptr/fn.from_exposed_addr + /// [`ptr::with_addr`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.with_addr + /// [`ptr::from_exposed_addr`]: https://doc.rust-lang.org/core/ptr/fn.from_exposed_addr.html pub FUZZY_PROVENANCE_CASTS, Allow, "a fuzzy integer to pointer cast is used", @@ -2863,8 +2863,8 @@ declare_lint! { /// about the semantics. /// /// [issue #95228]: https://github.com/rust-lang/rust/issues/95228 - /// [`ptr::addr`]: https://doc.rust-lang.org/core/ptr/fn.addr - /// [`ptr::expose_addr`]: https://doc.rust-lang.org/core/ptr/fn.expose_addr + /// [`ptr::addr`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.addr + /// [`ptr::expose_addr`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.expose_addr pub LOSSY_PROVENANCE_CASTS, Allow, "a lossy pointer to integer cast is used", diff --git a/compiler/rustc_metadata/Cargo.toml b/compiler/rustc_metadata/Cargo.toml index a4012592c099e..08cc8173eb049 100644 --- a/compiler/rustc_metadata/Cargo.toml +++ b/compiler/rustc_metadata/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -bitflags = "1.2.1" +bitflags = "2.4.1" libloading = "0.7.1" odht = { version = "0.3.1", features = ["nightly"] } rustc_ast = { path = "../rustc_ast" } diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 55daf441a751d..2de29db9e5c84 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -507,14 +507,20 @@ impl<'a, 'tcx> Decodable> for ExpnId { impl<'a, 'tcx> Decodable> for Span { fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Span { - let mode = SpanEncodingMode::decode(decoder); - let data = match mode { - SpanEncodingMode::Direct => SpanData::decode(decoder), - SpanEncodingMode::Shorthand(position) => decoder.with_position(position, |decoder| { - let mode = SpanEncodingMode::decode(decoder); - debug_assert!(matches!(mode, SpanEncodingMode::Direct)); - SpanData::decode(decoder) - }), + let start = decoder.position(); + let tag = SpanTag(decoder.peek_byte()); + let data = if tag.kind() == SpanKind::Indirect { + // Skip past the tag we just peek'd. + decoder.read_u8(); + let offset_or_position = decoder.read_usize(); + let position = if tag.is_relative_offset() { + start - offset_or_position + } else { + offset_or_position + }; + decoder.with_position(position, SpanData::decode) + } else { + SpanData::decode(decoder) }; Span::new(data.lo, data.hi, data.ctxt, data.parent) } @@ -522,17 +528,17 @@ impl<'a, 'tcx> Decodable> for Span { impl<'a, 'tcx> Decodable> for SpanData { fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> SpanData { - let ctxt = SyntaxContext::decode(decoder); - let tag = u8::decode(decoder); + let tag = SpanTag::decode(decoder); + let ctxt = tag.context().unwrap_or_else(|| SyntaxContext::decode(decoder)); - if tag == TAG_PARTIAL_SPAN { + if tag.kind() == SpanKind::Partial { return DUMMY_SP.with_ctxt(ctxt).data(); } - debug_assert!(tag == TAG_VALID_SPAN_LOCAL || tag == TAG_VALID_SPAN_FOREIGN); + debug_assert!(tag.kind() == SpanKind::Local || tag.kind() == SpanKind::Foreign); let lo = BytePos::decode(decoder); - let len = BytePos::decode(decoder); + let len = tag.length().unwrap_or_else(|| BytePos::decode(decoder)); let hi = lo + len; let Some(sess) = decoder.sess else { @@ -573,7 +579,7 @@ impl<'a, 'tcx> Decodable> for SpanData { // treat the 'local' and 'foreign' cases almost identically during deserialization: // we can call `imported_source_file` for the proper crate, and binary search // through the returned slice using our span. - let source_file = if tag == TAG_VALID_SPAN_LOCAL { + let source_file = if tag.kind() == SpanKind::Local { decoder.cdata().imported_source_file(metadata_index, sess) } else { // When we encode a proc-macro crate, all `Span`s should be encoded diff --git a/compiler/rustc_metadata/src/rmeta/def_path_hash_map.rs b/compiler/rustc_metadata/src/rmeta/def_path_hash_map.rs index 4f280bb9d80e2..9950bc1c31f70 100644 --- a/compiler/rustc_metadata/src/rmeta/def_path_hash_map.rs +++ b/compiler/rustc_metadata/src/rmeta/def_path_hash_map.rs @@ -19,7 +19,9 @@ impl DefPathHashMapRef<'_> { #[inline] pub fn def_path_hash_to_def_index(&self, def_path_hash: &DefPathHash) -> DefIndex { match *self { - DefPathHashMapRef::OwnedFromMetadata(ref map) => map.get(def_path_hash).unwrap(), + DefPathHashMapRef::OwnedFromMetadata(ref map) => { + map.get(&def_path_hash.local_hash()).unwrap() + } DefPathHashMapRef::BorrowedFromTcx(_) => { panic!("DefPathHashMap::BorrowedFromTcx variant only exists for serialization") } diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index e968f48a60c3a..5b296c098bcec 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -169,11 +169,25 @@ impl<'a, 'tcx> Encodable> for ExpnId { impl<'a, 'tcx> Encodable> for Span { fn encode(&self, s: &mut EncodeContext<'a, 'tcx>) { match s.span_shorthands.entry(*self) { - Entry::Occupied(o) => SpanEncodingMode::Shorthand(*o.get()).encode(s), + Entry::Occupied(o) => { + // If an offset is smaller than the absolute position, we encode with the offset. + // This saves space since smaller numbers encode in less bits. + let last_location = *o.get(); + // This cannot underflow. Metadata is written with increasing position(), so any + // previously saved offset must be smaller than the current position. + let offset = s.opaque.position() - last_location; + if offset < last_location { + SpanTag::indirect(true).encode(s); + offset.encode(s); + } else { + SpanTag::indirect(false).encode(s); + last_location.encode(s); + } + } Entry::Vacant(v) => { let position = s.opaque.position(); v.insert(position); - SpanEncodingMode::Direct.encode(s); + // Data is encoded with a SpanTag prefix (see below). self.data().encode(s); } } @@ -213,14 +227,15 @@ impl<'a, 'tcx> Encodable> for SpanData { // IMPORTANT: If this is ever changed, be sure to update // `rustc_span::hygiene::raw_encode_expn_id` to handle // encoding `ExpnData` for proc-macro crates. - if s.is_proc_macro { - SyntaxContext::root().encode(s); - } else { - self.ctxt.encode(s); - } + let ctxt = if s.is_proc_macro { SyntaxContext::root() } else { self.ctxt }; if self.is_dummy() { - return TAG_PARTIAL_SPAN.encode(s); + let tag = SpanTag::new(SpanKind::Partial, ctxt, 0); + tag.encode(s); + if tag.context().is_none() { + ctxt.encode(s); + } + return; } // The Span infrastructure should make sure that this invariant holds: @@ -238,7 +253,12 @@ impl<'a, 'tcx> Encodable> for SpanData { if !source_file.contains(self.hi) { // Unfortunately, macro expansion still sometimes generates Spans // that malformed in this way. - return TAG_PARTIAL_SPAN.encode(s); + let tag = SpanTag::new(SpanKind::Partial, ctxt, 0); + tag.encode(s); + if tag.context().is_none() { + ctxt.encode(s); + } + return; } // There are two possible cases here: @@ -257,7 +277,7 @@ impl<'a, 'tcx> Encodable> for SpanData { // if we're a proc-macro crate. // This allows us to avoid loading the dependencies of proc-macro crates: all of // the information we need to decode `Span`s is stored in the proc-macro crate. - let (tag, metadata_index) = if source_file.is_imported() && !s.is_proc_macro { + let (kind, metadata_index) = if source_file.is_imported() && !s.is_proc_macro { // To simplify deserialization, we 'rebase' this span onto the crate it originally came // from (the crate that 'owns' the file it references. These rebased 'lo' and 'hi' // values are relative to the source map information for the 'foreign' crate whose @@ -275,7 +295,7 @@ impl<'a, 'tcx> Encodable> for SpanData { } }; - (TAG_VALID_SPAN_FOREIGN, metadata_index) + (SpanKind::Foreign, metadata_index) } else { // Record the fact that we need to encode the data for this `SourceFile` let source_files = @@ -284,7 +304,7 @@ impl<'a, 'tcx> Encodable> for SpanData { let metadata_index: u32 = metadata_index.try_into().expect("cannot export more than U32_MAX files"); - (TAG_VALID_SPAN_LOCAL, metadata_index) + (SpanKind::Local, metadata_index) }; // Encode the start position relative to the file start, so we profit more from the @@ -295,14 +315,20 @@ impl<'a, 'tcx> Encodable> for SpanData { // from the variable-length integer encoding that we use. let len = self.hi - self.lo; + let tag = SpanTag::new(kind, ctxt, len.0 as usize); tag.encode(s); + if tag.context().is_none() { + ctxt.encode(s); + } lo.encode(s); - len.encode(s); + if tag.length().is_none() { + len.encode(s); + } // Encode the index of the `SourceFile` for the span, in order to make decoding faster. metadata_index.encode(s); - if tag == TAG_VALID_SPAN_FOREIGN { + if kind == SpanKind::Foreign { // This needs to be two lines to avoid holding the `s.source_file_cache` // while calling `cnum.encode(s)` let cnum = s.source_file_cache.0.cnum; @@ -1432,7 +1458,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { if def_kind == DefKind::Closure && let Some(coroutine_kind) = self.tcx.coroutine_kind(def_id) { - self.tables.coroutine_kind.set(def_id.index, Some(coroutine_kind)); + self.tables.coroutine_kind.set(def_id.index, Some(coroutine_kind)) } if let DefKind::Enum | DefKind::Struct | DefKind::Union = def_kind { self.encode_info_for_adt(local_id); @@ -2229,12 +2255,12 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path) { // If we forget this, compilation can succeed with an incomplete rmeta file, // causing an ICE when the rmeta file is read by another compilation. if let Err((path, err)) = ecx.opaque.finish() { - tcx.dcx().emit_err(FailWriteFile { path: &path, err }); + tcx.dcx().emit_fatal(FailWriteFile { path: &path, err }); } let file = ecx.opaque.file(); if let Err(err) = encode_root_position(file, root.position.get()) { - tcx.dcx().emit_err(FailWriteFile { path: ecx.opaque.path(), err }); + tcx.dcx().emit_fatal(FailWriteFile { path: ecx.opaque.path(), err }); } // Record metadata size for self-profiling diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index d496e7494e7f8..54ee50c235860 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -66,12 +66,6 @@ const METADATA_VERSION: u8 = 9; /// unsigned integer, and further followed by the rustc version string. pub const METADATA_HEADER: &[u8] = &[b'r', b'u', b's', b't', 0, 0, 0, METADATA_VERSION]; -#[derive(Encodable, Decodable)] -enum SpanEncodingMode { - Shorthand(usize), - Direct, -} - /// A value of type T referred to by its absolute position /// in the metadata, and which can be decoded lazily. /// @@ -487,10 +481,88 @@ bitflags::bitflags! { } } -// Tags used for encoding Spans: -const TAG_VALID_SPAN_LOCAL: u8 = 0; -const TAG_VALID_SPAN_FOREIGN: u8 = 1; -const TAG_PARTIAL_SPAN: u8 = 2; +/// A span tag byte encodes a bunch of data, so that we can cut out a few extra bytes from span +/// encodings (which are very common, for example, libcore has ~650,000 unique spans and over 1.1 +/// million references to prior-written spans). +/// +/// The byte format is split into several parts: +/// +/// [ a a a a a c d d ] +/// +/// `a` bits represent the span length. We have 5 bits, so we can store lengths up to 30 inline, with +/// an all-1s pattern representing that the length is stored separately. +/// +/// `c` represents whether the span context is zero (and then it is not stored as a separate varint) +/// for direct span encodings, and whether the offset is absolute or relative otherwise (zero for +/// absolute). +/// +/// d bits represent the kind of span we are storing (local, foreign, partial, indirect). +#[derive(Encodable, Decodable, Copy, Clone)] +struct SpanTag(u8); + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +enum SpanKind { + Local = 0b00, + Foreign = 0b01, + Partial = 0b10, + // Indicates the actual span contents are elsewhere. + // If this is the kind, then the span context bit represents whether it is a relative or + // absolute offset. + Indirect = 0b11, +} + +impl SpanTag { + fn new(kind: SpanKind, context: rustc_span::SyntaxContext, length: usize) -> SpanTag { + let mut data = 0u8; + data |= kind as u8; + if context.is_root() { + data |= 0b100; + } + let all_1s_len = (0xffu8 << 3) >> 3; + // strictly less than - all 1s pattern is a sentinel for storage being out of band. + if length < all_1s_len as usize { + data |= (length as u8) << 3; + } else { + data |= all_1s_len << 3; + } + + SpanTag(data) + } + + fn indirect(relative: bool) -> SpanTag { + let mut tag = SpanTag(SpanKind::Indirect as u8); + if relative { + tag.0 |= 0b100; + } + tag + } + + fn kind(self) -> SpanKind { + let masked = self.0 & 0b11; + match masked { + 0b00 => SpanKind::Local, + 0b01 => SpanKind::Foreign, + 0b10 => SpanKind::Partial, + 0b11 => SpanKind::Indirect, + _ => unreachable!(), + } + } + + fn is_relative_offset(self) -> bool { + debug_assert_eq!(self.kind(), SpanKind::Indirect); + self.0 & 0b100 != 0 + } + + fn context(self) -> Option { + if self.0 & 0b100 != 0 { Some(rustc_span::SyntaxContext::root()) } else { None } + } + + fn length(self) -> Option { + let all_1s_len = (0xffu8 << 3) >> 3; + let len = self.0 >> 3; + if len != all_1s_len { Some(rustc_span::BytePos(u32::from(len))) } else { None } + } +} // Tags for encoding Symbol's const SYMBOL_STR: u8 = 0; diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index 916ff469e099a..306bf07a97608 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -199,7 +199,8 @@ fixed_size_enum! { fixed_size_enum! { hir::CoroutineKind { - ( Coroutine ) + ( Coroutine(hir::Movability::Movable) ) + ( Coroutine(hir::Movability::Static) ) ( Desugared(hir::CoroutineDesugaring::Gen, hir::CoroutineSource::Block) ) ( Desugared(hir::CoroutineDesugaring::Gen, hir::CoroutineSource::Fn) ) ( Desugared(hir::CoroutineDesugaring::Gen, hir::CoroutineSource::Closure) ) diff --git a/compiler/rustc_middle/Cargo.toml b/compiler/rustc_middle/Cargo.toml index d356984c1e9de..8e6ca645f80cb 100644 --- a/compiler/rustc_middle/Cargo.toml +++ b/compiler/rustc_middle/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -bitflags = "1.2.1" +bitflags = "2.4.1" derive_more = "0.99.17" either = "1.5.0" field-offset = "0.3.5" diff --git a/compiler/rustc_middle/src/lint.rs b/compiler/rustc_middle/src/lint.rs index d34d9160d55bf..ae432a0406567 100644 --- a/compiler/rustc_middle/src/lint.rs +++ b/compiler/rustc_middle/src/lint.rs @@ -1,6 +1,6 @@ use std::cmp; -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::sorted_map::SortedMap; use rustc_errors::{Diagnostic, DiagnosticBuilder, DiagnosticId, DiagnosticMessage, MultiSpan}; use rustc_hir::{HirId, ItemLocalId}; @@ -61,7 +61,7 @@ pub type LevelAndSource = (Level, LintLevelSource); /// by the attributes for *a single HirId*. #[derive(Default, Debug, HashStable)] pub struct ShallowLintLevelMap { - pub specs: SortedMap>, + pub specs: SortedMap>, } /// From an initial level and source, verify the effect of special annotations: diff --git a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs index f758c1d5e6f50..e11c9371118cb 100644 --- a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs +++ b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs @@ -45,9 +45,10 @@ pub struct CodegenFnAttrs { pub alignment: Option, } +#[derive(Clone, Copy, PartialEq, Eq, TyEncodable, TyDecodable, HashStable)] +pub struct CodegenFnAttrFlags(u32); bitflags! { - #[derive(TyEncodable, TyDecodable, HashStable)] - pub struct CodegenFnAttrFlags: u32 { + impl CodegenFnAttrFlags: u32 { /// `#[cold]`: a hint to LLVM that this function, when called, is never on /// the hot path. const COLD = 1 << 0; @@ -104,6 +105,7 @@ bitflags! { const NO_BUILTINS = 1 << 20; } } +rustc_data_structures::external_bitflags_debug! { CodegenFnAttrFlags } impl CodegenFnAttrs { pub const EMPTY: &'static Self = &Self::new(); diff --git a/compiler/rustc_middle/src/middle/region.rs b/compiler/rustc_middle/src/middle/region.rs index 3f6dc2b9f1261..b4dd8f6f4a781 100644 --- a/compiler/rustc_middle/src/middle/region.rs +++ b/compiler/rustc_middle/src/middle/region.rs @@ -307,11 +307,6 @@ pub struct ScopeTree { /// the values are still owned by their containing expressions. So /// we'll see that `&x`. pub yield_in_scope: FxHashMap>, - - /// The number of visit_expr and visit_pat calls done in the body. - /// Used to sanity check visit_expr/visit_pat call count when - /// calculating coroutine interiors. - pub body_expr_count: FxHashMap, } /// Identifies the reason that a given expression is an rvalue candidate @@ -408,20 +403,12 @@ impl ScopeTree { pub fn yield_in_scope(&self, scope: Scope) -> Option<&[YieldData]> { self.yield_in_scope.get(&scope).map(Deref::deref) } - - /// Gives the number of expressions visited in a body. - /// Used to sanity check visit_expr call count when - /// calculating coroutine interiors. - pub fn body_expr_count(&self, body_id: hir::BodyId) -> Option { - self.body_expr_count.get(&body_id).copied() - } } impl<'a> HashStable> for ScopeTree { fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let ScopeTree { root_body, - ref body_expr_count, ref parent_map, ref var_map, ref destruction_scopes, @@ -430,7 +417,6 @@ impl<'a> HashStable> for ScopeTree { } = *self; root_body.hash_stable(hcx, hasher); - body_expr_count.hash_stable(hcx, hasher); parent_map.hash_stable(hcx, hasher); var_map.hash_stable(hcx, hasher); destruction_scopes.hash_stable(hcx, hasher); diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index 071c6a75583d4..0b487eae36dbd 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -520,7 +520,7 @@ fn write_mir_sig(tcx: TyCtxt<'_>, body: &Body<'_>, w: &mut dyn io::Write) -> io: let kind = tcx.def_kind(def_id); let is_function = match kind { DefKind::Fn | DefKind::AssocFn | DefKind::Ctor(..) => true, - _ => tcx.is_closure(def_id), + _ => tcx.is_closure_or_coroutine(def_id), }; match (kind, body.source.promoted) { (_, Some(i)) => write!(w, "{i:?} in ")?, @@ -627,7 +627,11 @@ where w, "{:A$} // {}{}", indented_body, - if tcx.sess.verbose() { format!("{current_location:?}: ") } else { String::new() }, + if tcx.sess.verbose_internals() { + format!("{current_location:?}: ") + } else { + String::new() + }, comment(tcx, statement.source_info), A = ALIGN, )?; @@ -652,7 +656,11 @@ where w, "{:A$} // {}{}", indented_terminator, - if tcx.sess.verbose() { format!("{current_location:?}: ") } else { String::new() }, + if tcx.sess.verbose_internals() { + format!("{current_location:?}: ") + } else { + String::new() + }, comment(tcx, data.terminator().source_info), A = ALIGN, )?; @@ -943,7 +951,7 @@ impl<'tcx> Debug for Rvalue<'tcx> { // When printing regions, add trailing space if necessary. let print_region = ty::tls::with(|tcx| { - tcx.sess.verbose() || tcx.sess.opts.unstable_opts.identify_regions + tcx.sess.verbose_internals() || tcx.sess.opts.unstable_opts.identify_regions }); let region = if print_region { let mut region = region.to_string(); @@ -1041,7 +1049,7 @@ impl<'tcx> Debug for Rvalue<'tcx> { struct_fmt.finish() }), - AggregateKind::Coroutine(def_id, _, _) => ty::tls::with(|tcx| { + AggregateKind::Coroutine(def_id, _) => ty::tls::with(|tcx| { let name = format!("{{coroutine@{:?}}}", tcx.def_span(def_id)); let mut struct_fmt = fmt.debug_struct(&name); @@ -1296,11 +1304,11 @@ impl<'tcx> Visitor<'tcx> for ExtraComments<'tcx> { self.push(&format!("+ args: {args:#?}")); } - AggregateKind::Coroutine(def_id, args, movability) => { + AggregateKind::Coroutine(def_id, args) => { self.push("coroutine"); self.push(&format!("+ def_id: {def_id:?}")); self.push(&format!("+ args: {args:#?}")); - self.push(&format!("+ movability: {movability:?}")); + self.push(&format!("+ kind: {:?}", self.tcx.coroutine_kind(def_id))); } AggregateKind::Adt(_, _, _, Some(user_ty), _) => { @@ -1668,7 +1676,7 @@ fn pretty_print_const_value_tcx<'tcx>( ) -> fmt::Result { use crate::ty::print::PrettyPrinter; - if tcx.sess.verbose() { + if tcx.sess.verbose_internals() { fmt.write_str(&format!("ConstValue({ct:?}: {ty})"))?; return Ok(()); } diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 8cf9e55f0b603..462076d750f27 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -14,7 +14,6 @@ use crate::ty::{Region, UserTypeAnnotationIndex}; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_hir::def_id::DefId; -use rustc_hir::{self as hir}; use rustc_hir::{self, CoroutineKind}; use rustc_index::IndexVec; use rustc_target::abi::{FieldIdx, VariantIdx}; @@ -1344,7 +1343,7 @@ pub enum AggregateKind<'tcx> { Adt(DefId, VariantIdx, GenericArgsRef<'tcx>, Option, Option), Closure(DefId, GenericArgsRef<'tcx>), - Coroutine(DefId, GenericArgsRef<'tcx>, hir::Movability), + Coroutine(DefId, GenericArgsRef<'tcx>), } #[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)] diff --git a/compiler/rustc_middle/src/mir/tcx.rs b/compiler/rustc_middle/src/mir/tcx.rs index f9b2a6ee8aacb..234ccee55468d 100644 --- a/compiler/rustc_middle/src/mir/tcx.rs +++ b/compiler/rustc_middle/src/mir/tcx.rs @@ -201,9 +201,7 @@ impl<'tcx> Rvalue<'tcx> { } AggregateKind::Adt(did, _, args, _, _) => tcx.type_of(did).instantiate(tcx, args), AggregateKind::Closure(did, args) => Ty::new_closure(tcx, did, args), - AggregateKind::Coroutine(did, args, movability) => { - Ty::new_coroutine(tcx, did, args, movability) - } + AggregateKind::Coroutine(did, args) => Ty::new_coroutine(tcx, did, args), }, Rvalue::ShallowInitBox(_, ty) => Ty::new_box(tcx, ty), Rvalue::CopyForDeref(ref place) => place.ty(local_decls, tcx).ty, diff --git a/compiler/rustc_middle/src/mir/terminator.rs b/compiler/rustc_middle/src/mir/terminator.rs index e0c9def037948..7be6deb614193 100644 --- a/compiler/rustc_middle/src/mir/terminator.rs +++ b/compiler/rustc_middle/src/mir/terminator.rs @@ -147,7 +147,7 @@ impl AssertKind { Overflow(op, _, _) => bug!("{:?} cannot overflow", op), DivisionByZero(_) => "attempt to divide by zero", RemainderByZero(_) => "attempt to calculate the remainder with a divisor of zero", - ResumedAfterReturn(CoroutineKind::Coroutine) => "coroutine resumed after completion", + ResumedAfterReturn(CoroutineKind::Coroutine(_)) => "coroutine resumed after completion", ResumedAfterReturn(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => { "`async fn` resumed after completion" } @@ -157,7 +157,7 @@ impl AssertKind { ResumedAfterReturn(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => { "`gen fn` should just keep returning `None` after completion" } - ResumedAfterPanic(CoroutineKind::Coroutine) => "coroutine resumed after panicking", + ResumedAfterPanic(CoroutineKind::Coroutine(_)) => "coroutine resumed after panicking", ResumedAfterPanic(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => { "`async fn` resumed after panicking" } @@ -262,7 +262,7 @@ impl AssertKind { ResumedAfterReturn(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => { bug!("gen blocks can be resumed after they return and will keep returning `None`") } - ResumedAfterReturn(CoroutineKind::Coroutine) => { + ResumedAfterReturn(CoroutineKind::Coroutine(_)) => { middle_assert_coroutine_resume_after_return } ResumedAfterPanic(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => { @@ -274,7 +274,7 @@ impl AssertKind { ResumedAfterPanic(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => { middle_assert_gen_resume_after_panic } - ResumedAfterPanic(CoroutineKind::Coroutine) => { + ResumedAfterPanic(CoroutineKind::Coroutine(_)) => { middle_assert_coroutine_resume_after_panic } diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 9059936f49554..132ecf91af187 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -736,7 +736,6 @@ macro_rules! make_mir_visitor { AggregateKind::Coroutine( _, coroutine_args, - _movability, ) => { self.visit_args(coroutine_args, location); } diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 3a54f5f6b3d01..2810182c0a0dc 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -938,10 +938,6 @@ rustc_queries! { desc { |tcx| "checking naked functions in {}", describe_as_module(key, tcx) } } - query check_mod_item_types(key: LocalModDefId) -> () { - desc { |tcx| "checking item types in {}", describe_as_module(key, tcx) } - } - query check_mod_privacy(key: LocalModDefId) -> () { desc { |tcx| "checking privacy in {}", describe_as_module(key.to_local_def_id(), tcx) } } diff --git a/compiler/rustc_middle/src/thir/visit.rs b/compiler/rustc_middle/src/thir/visit.rs index 4943c11848b8a..ade3ea289cc59 100644 --- a/compiler/rustc_middle/src/thir/visit.rs +++ b/compiler/rustc_middle/src/thir/visit.rs @@ -3,26 +3,26 @@ use super::{ PatKind, Stmt, StmtKind, Thir, }; -pub trait Visitor<'a, 'tcx: 'a>: Sized { - fn thir(&self) -> &'a Thir<'tcx>; +pub trait Visitor<'thir, 'tcx: 'thir>: Sized { + fn thir(&self) -> &'thir Thir<'tcx>; - fn visit_expr(&mut self, expr: &Expr<'tcx>) { + fn visit_expr(&mut self, expr: &'thir Expr<'tcx>) { walk_expr(self, expr); } - fn visit_stmt(&mut self, stmt: &Stmt<'tcx>) { + fn visit_stmt(&mut self, stmt: &'thir Stmt<'tcx>) { walk_stmt(self, stmt); } - fn visit_block(&mut self, block: &Block) { + fn visit_block(&mut self, block: &'thir Block) { walk_block(self, block); } - fn visit_arm(&mut self, arm: &Arm<'tcx>) { + fn visit_arm(&mut self, arm: &'thir Arm<'tcx>) { walk_arm(self, arm); } - fn visit_pat(&mut self, pat: &Pat<'tcx>) { + fn visit_pat(&mut self, pat: &'thir Pat<'tcx>) { walk_pat(self, pat); } @@ -36,7 +36,10 @@ pub trait Visitor<'a, 'tcx: 'a>: Sized { // other `visit*` functions. } -pub fn walk_expr<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, expr: &Expr<'tcx>) { +pub fn walk_expr<'thir, 'tcx: 'thir, V: Visitor<'thir, 'tcx>>( + visitor: &mut V, + expr: &'thir Expr<'tcx>, +) { use ExprKind::*; match expr.kind { Scope { value, region_scope: _, lint_level: _ } => { @@ -168,7 +171,10 @@ pub fn walk_expr<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, expr: &Exp } } -pub fn walk_stmt<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, stmt: &Stmt<'tcx>) { +pub fn walk_stmt<'thir, 'tcx: 'thir, V: Visitor<'thir, 'tcx>>( + visitor: &mut V, + stmt: &'thir Stmt<'tcx>, +) { match &stmt.kind { StmtKind::Expr { expr, scope: _ } => visitor.visit_expr(&visitor.thir()[*expr]), StmtKind::Let { @@ -191,7 +197,10 @@ pub fn walk_stmt<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, stmt: &Stm } } -pub fn walk_block<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, block: &Block) { +pub fn walk_block<'thir, 'tcx: 'thir, V: Visitor<'thir, 'tcx>>( + visitor: &mut V, + block: &'thir Block, +) { for &stmt in &*block.stmts { visitor.visit_stmt(&visitor.thir()[stmt]); } @@ -200,7 +209,10 @@ pub fn walk_block<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, block: &B } } -pub fn walk_arm<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, arm: &Arm<'tcx>) { +pub fn walk_arm<'thir, 'tcx: 'thir, V: Visitor<'thir, 'tcx>>( + visitor: &mut V, + arm: &'thir Arm<'tcx>, +) { match arm.guard { Some(Guard::If(expr)) => visitor.visit_expr(&visitor.thir()[expr]), Some(Guard::IfLet(ref pat, expr)) => { @@ -213,7 +225,10 @@ pub fn walk_arm<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, arm: &Arm<' visitor.visit_expr(&visitor.thir()[arm.body]); } -pub fn walk_pat<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, pat: &Pat<'tcx>) { +pub fn walk_pat<'thir, 'tcx: 'thir, V: Visitor<'thir, 'tcx>>( + visitor: &mut V, + pat: &'thir Pat<'tcx>, +) { use PatKind::*; match &pat.kind { AscribeUserType { subpattern, ascription: _ } diff --git a/compiler/rustc_middle/src/traits/mod.rs b/compiler/rustc_middle/src/traits/mod.rs index 5d0187a859833..09b0a0dfbf38c 100644 --- a/compiler/rustc_middle/src/traits/mod.rs +++ b/compiler/rustc_middle/src/traits/mod.rs @@ -177,7 +177,7 @@ impl<'tcx> ObligationCause<'tcx> { // NOTE(flaper87): As of now, it keeps track of the whole error // chain. Ideally, we should have a way to configure this either - // by using -Z verbose or just a CLI argument. + // by using -Z verbose-internals or just a CLI argument. self.code = variant(DerivedObligationCause { parent_trait_pred, parent_code: self.code }).into(); self @@ -429,8 +429,10 @@ pub enum ObligationCauseCode<'tcx> { MatchImpl(ObligationCause<'tcx>, DefId), BinOp { + lhs_hir_id: hir::HirId, + rhs_hir_id: Option, rhs_span: Option, - is_lit: bool, + rhs_is_lit: bool, output_ty: Option>, }, @@ -510,6 +512,21 @@ impl<'tcx> ObligationCauseCode<'tcx> { base_cause } + /// Returns the base obligation and the base trait predicate, if any, ignoring + /// derived obligations. + pub fn peel_derives_with_predicate(&self) -> (&Self, Option>) { + let mut base_cause = self; + let mut base_trait_pred = None; + while let Some((parent_code, parent_pred)) = base_cause.parent() { + base_cause = parent_code; + if let Some(parent_pred) = parent_pred { + base_trait_pred = Some(parent_pred); + } + } + + (base_cause, base_trait_pred) + } + pub fn parent(&self) -> Option<(&Self, Option>)> { match self { FunctionArgumentObligation { parent_code, .. } => Some((parent_code, None)), diff --git a/compiler/rustc_middle/src/ty/adt.rs b/compiler/rustc_middle/src/ty/adt.rs index b95ae5881e24f..685c3e87dac1f 100644 --- a/compiler/rustc_middle/src/ty/adt.rs +++ b/compiler/rustc_middle/src/ty/adt.rs @@ -24,9 +24,10 @@ use std::str; use super::{Destructor, FieldDef, GenericPredicates, Ty, TyCtxt, VariantDef, VariantDiscr}; +#[derive(Clone, Copy, PartialEq, Eq, Hash, HashStable, TyEncodable, TyDecodable)] +pub struct AdtFlags(u16); bitflags! { - #[derive(HashStable, TyEncodable, TyDecodable)] - pub struct AdtFlags: u16 { + impl AdtFlags: u16 { const NO_ADT_FLAGS = 0; /// Indicates whether the ADT is an enum. const IS_ENUM = 1 << 0; @@ -51,6 +52,7 @@ bitflags! { const IS_UNSAFE_CELL = 1 << 9; } } +rustc_data_structures::external_bitflags_debug! { AdtFlags } /// The definition of a user-defined type, e.g., a `struct`, `enum`, or `union`. /// diff --git a/compiler/rustc_middle/src/ty/closure.rs b/compiler/rustc_middle/src/ty/closure.rs index 8c29bc5a42865..8ff5b135acae8 100644 --- a/compiler/rustc_middle/src/ty/closure.rs +++ b/compiler/rustc_middle/src/ty/closure.rs @@ -197,7 +197,7 @@ pub struct ClosureTypeInfo<'tcx> { } fn closure_typeinfo<'tcx>(tcx: TyCtxt<'tcx>, def: LocalDefId) -> ClosureTypeInfo<'tcx> { - debug_assert!(tcx.is_closure(def.to_def_id())); + debug_assert!(tcx.is_closure_or_coroutine(def.to_def_id())); let typeck_results = tcx.typeck(def); let user_provided_sig = typeck_results.user_provided_sigs[&def]; let captures = typeck_results.closure_min_captures_flattened(def); @@ -217,7 +217,7 @@ impl<'tcx> TyCtxt<'tcx> { } pub fn closure_captures(self, def_id: LocalDefId) -> &'tcx [&'tcx ty::CapturedPlace<'tcx>] { - if !self.is_closure(def_id.to_def_id()) { + if !self.is_closure_or_coroutine(def_id.to_def_id()) { return &[]; }; self.closure_typeinfo(def_id).captures diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index ac675a70b546e..6807eacb7f177 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -847,6 +847,12 @@ impl<'tcx> TyCtxt<'tcx> { self.coroutine_kind(def_id).is_some() } + /// Returns the movability of the coroutine of `def_id`, or panics + /// if given a `def_id` that is not a coroutine. + pub fn coroutine_movability(self, def_id: DefId) -> hir::Movability { + self.coroutine_kind(def_id).expect("expected a coroutine").movability() + } + /// Returns `true` if the node pointed to by `def_id` is a coroutine for an async construct. pub fn coroutine_is_async(self, def_id: DefId) -> bool { matches!( @@ -858,7 +864,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Returns `true` if the node pointed to by `def_id` is a general coroutine that implements `Coroutine`. /// This means it is neither an `async` or `gen` construct. pub fn is_general_coroutine(self, def_id: DefId) -> bool { - matches!(self.coroutine_kind(def_id), Some(hir::CoroutineKind::Coroutine)) + matches!(self.coroutine_kind(def_id), Some(hir::CoroutineKind::Coroutine(_))) } /// Returns `true` if the node pointed to by `def_id` is a coroutine for a `gen` construct. diff --git a/compiler/rustc_middle/src/ty/fast_reject.rs b/compiler/rustc_middle/src/ty/fast_reject.rs index 38a9cabca972e..6ed68f90eb38e 100644 --- a/compiler/rustc_middle/src/ty/fast_reject.rs +++ b/compiler/rustc_middle/src/ty/fast_reject.rs @@ -128,7 +128,7 @@ pub fn simplify_type<'tcx>( }, ty::Ref(_, _, mutbl) => Some(SimplifiedType::Ref(mutbl)), ty::FnDef(def_id, _) | ty::Closure(def_id, _) => Some(SimplifiedType::Closure(def_id)), - ty::Coroutine(def_id, _, _) => Some(SimplifiedType::Coroutine(def_id)), + ty::Coroutine(def_id, _) => Some(SimplifiedType::Coroutine(def_id)), ty::CoroutineWitness(def_id, _) => Some(SimplifiedType::CoroutineWitness(def_id)), ty::Never => Some(SimplifiedType::Never), ty::Tuple(tys) => Some(SimplifiedType::Tuple(tys.len())), diff --git a/compiler/rustc_middle/src/ty/flags.rs b/compiler/rustc_middle/src/ty/flags.rs index f9a2385b10005..0c1d10914146f 100644 --- a/compiler/rustc_middle/src/ty/flags.rs +++ b/compiler/rustc_middle/src/ty/flags.rs @@ -96,7 +96,7 @@ impl FlagComputation { self.add_flags(TypeFlags::STILL_FURTHER_SPECIALIZABLE); } - ty::Coroutine(_, args, _) => { + ty::Coroutine(_, args) => { let args = args.as_coroutine(); let should_remove_further_specializable = !self.flags.contains(TypeFlags::STILL_FURTHER_SPECIALIZABLE); diff --git a/compiler/rustc_middle/src/ty/generics.rs b/compiler/rustc_middle/src/ty/generics.rs index c3699b114c411..4821ae3f08735 100644 --- a/compiler/rustc_middle/src/ty/generics.rs +++ b/compiler/rustc_middle/src/ty/generics.rs @@ -326,7 +326,7 @@ impl<'tcx> Generics { own_params.start = 1; } - let verbose = tcx.sess.verbose(); + let verbose = tcx.sess.verbose_internals(); // Filter the default arguments. // @@ -342,7 +342,7 @@ impl<'tcx> Generics { param.default_value(tcx).is_some_and(|default| { default.instantiate(tcx, args) == args[param.index as usize] }) - // filter out trailing effect params, if we're not in `-Zverbose`. + // filter out trailing effect params, if we're not in `-Zverbose-internals`. || (!verbose && matches!(param.kind, GenericParamDefKind::Const { is_host_effect: true, .. })) }) .count(); diff --git a/compiler/rustc_middle/src/ty/instance.rs b/compiler/rustc_middle/src/ty/instance.rs index 1c7a7545e2b0e..2ac3cddfa15ad 100644 --- a/compiler/rustc_middle/src/ty/instance.rs +++ b/compiler/rustc_middle/src/ty/instance.rs @@ -426,7 +426,10 @@ impl<'tcx> Instance<'tcx> { ) -> Option> { debug!("resolve(def_id={:?}, args={:?})", def_id, args); // Use either `resolve_closure` or `resolve_for_vtable` - assert!(!tcx.is_closure(def_id), "Called `resolve_for_fn_ptr` on closure: {def_id:?}"); + assert!( + !tcx.is_closure_or_coroutine(def_id), + "Called `resolve_for_fn_ptr` on closure: {def_id:?}" + ); Instance::resolve(tcx, param_env, def_id, args).ok().flatten().map(|mut resolved| { match resolved.def { InstanceDef::Item(def) if resolved.def.requires_caller_location(tcx) => { @@ -488,7 +491,7 @@ impl<'tcx> Instance<'tcx> { }) ) { - if tcx.is_closure(def) { + if tcx.is_closure_or_coroutine(def) { debug!(" => vtable fn pointer created for closure with #[track_caller]: {:?} for method {:?} {:?}", def, def_id, args); @@ -658,12 +661,10 @@ fn polymorphize<'tcx>( // the unpolymorphized upvar closure would result in a polymorphized closure producing // multiple mono items (and eventually symbol clashes). let def_id = instance.def_id(); - let upvars_ty = if tcx.is_closure(def_id) { - Some(args.as_closure().tupled_upvars_ty()) - } else if tcx.type_of(def_id).skip_binder().is_coroutine() { - Some(args.as_coroutine().tupled_upvars_ty()) - } else { - None + let upvars_ty = match tcx.type_of(def_id).skip_binder().kind() { + ty::Closure(..) => Some(args.as_closure().tupled_upvars_ty()), + ty::Coroutine(..) => Some(args.as_coroutine().tupled_upvars_ty()), + _ => None, }; let has_upvars = upvars_ty.is_some_and(|ty| !ty.tuple_fields().is_empty()); debug!("polymorphize: upvars_ty={:?} has_upvars={:?}", upvars_ty, has_upvars); @@ -689,13 +690,13 @@ fn polymorphize<'tcx>( Ty::new_closure(self.tcx, def_id, polymorphized_args) } } - ty::Coroutine(def_id, args, movability) => { + ty::Coroutine(def_id, args) => { let polymorphized_args = polymorphize(self.tcx, ty::InstanceDef::Item(def_id), args); if args == polymorphized_args { ty } else { - Ty::new_coroutine(self.tcx, def_id, polymorphized_args, movability) + Ty::new_coroutine(self.tcx, def_id, polymorphized_args) } } _ => ty.super_fold_with(self), diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index 8a02914b4359c..5cc0ce87c9bd7 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -906,7 +906,7 @@ where i, ), - ty::Coroutine(def_id, args, _) => match this.variants { + ty::Coroutine(def_id, args) => match this.variants { Variants::Single { index } => TyMaybeWithLayout::Ty( args.as_coroutine() .state_tys(def_id, tcx) diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 9f1ff4538aa24..757d3337afc4c 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -309,23 +309,22 @@ impl Visibility { #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable, TyEncodable, TyDecodable)] pub enum BoundConstness { - /// `T: Trait` + /// `Type: Trait` NotConst, - /// `T: ~const Trait` + /// `Type: const Trait` + Const, + /// `Type: ~const Trait` /// /// Requires resolving to const only when we are in a const context. ConstIfConst, } impl BoundConstness { - /// Reduce `self` and `constness` to two possible combined states instead of four. - pub fn and(&mut self, constness: hir::Constness) -> hir::Constness { - match (constness, self) { - (hir::Constness::Const, BoundConstness::ConstIfConst) => hir::Constness::Const, - (_, this) => { - *this = BoundConstness::NotConst; - hir::Constness::NotConst - } + pub fn as_str(self) -> &'static str { + match self { + Self::NotConst => "", + Self::Const => "const", + Self::ConstIfConst => "~const", } } } @@ -334,7 +333,8 @@ impl fmt::Display for BoundConstness { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::NotConst => f.write_str("normal"), - Self::ConstIfConst => f.write_str("`~const`"), + Self::Const => f.write_str("const"), + Self::ConstIfConst => f.write_str("~const"), } } } @@ -1772,9 +1772,10 @@ pub struct Destructor { pub constness: hir::Constness, } +#[derive(Clone, Copy, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)] +pub struct VariantFlags(u8); bitflags! { - #[derive(HashStable, TyEncodable, TyDecodable)] - pub struct VariantFlags: u8 { + impl VariantFlags: u8 { const NO_VARIANT_FLAGS = 0; /// Indicates whether the field list of this variant is `#[non_exhaustive]`. const IS_FIELD_LIST_NON_EXHAUSTIVE = 1 << 0; @@ -1783,6 +1784,7 @@ bitflags! { const IS_RECOVERED = 1 << 1; } } +rustc_data_structures::external_bitflags_debug! { VariantFlags } /// Definition of a variant -- a struct's fields or an enum variant. #[derive(Debug, HashStable, TyEncodable, TyDecodable)] diff --git a/compiler/rustc_middle/src/ty/opaque_types.rs b/compiler/rustc_middle/src/ty/opaque_types.rs index 71fe7d15a6cf7..fc4d4c9a3d260 100644 --- a/compiler/rustc_middle/src/ty/opaque_types.rs +++ b/compiler/rustc_middle/src/ty/opaque_types.rs @@ -153,9 +153,9 @@ impl<'tcx> TypeFolder> for ReverseMapper<'tcx> { Ty::new_closure(self.tcx, def_id, args) } - ty::Coroutine(def_id, args, movability) => { + ty::Coroutine(def_id, args) => { let args = self.fold_closure_args(def_id, args); - Ty::new_coroutine(self.tcx, def_id, args, movability) + Ty::new_coroutine(self.tcx, def_id, args) } ty::CoroutineWitness(def_id, args) => { diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index 5e09154789a79..f32b7b0852abc 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -259,7 +259,7 @@ fn characteristic_def_id_of_type_cached<'a>( ty::FnDef(def_id, _) | ty::Closure(def_id, _) - | ty::Coroutine(def_id, _, _) + | ty::Coroutine(def_id, _) | ty::CoroutineWitness(def_id, _) | ty::Foreign(def_id) => Some(def_id), diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 8e045397b0ffa..ebbd02e01bf33 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -744,7 +744,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { // only affect certain debug messages (e.g. messages printed // from `rustc_middle::ty` during the computation of `tcx.predicates_of`), // and should have no effect on any compiler output. - // [Unless `-Zverbose` is used, e.g. in the output of + // [Unless `-Zverbose-internals` is used, e.g. in the output of // `tests/ui/nll/ty-outlives/impl-trait-captures.rs`, for // example.] if self.should_print_verbose() { @@ -783,14 +783,14 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } } ty::Str => p!("str"), - ty::Coroutine(did, args, movability) => { + ty::Coroutine(did, args) => { p!(write("{{")); let coroutine_kind = self.tcx().coroutine_kind(did).unwrap(); - let should_print_movability = - self.should_print_verbose() || coroutine_kind == hir::CoroutineKind::Coroutine; + let should_print_movability = self.should_print_verbose() + || matches!(coroutine_kind, hir::CoroutineKind::Coroutine(_)); if should_print_movability { - match movability { + match coroutine_kind.movability() { hir::Movability::Movable => {} hir::Movability::Static => p!("static "), } @@ -829,7 +829,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } ty::CoroutineWitness(did, args) => { p!(write("{{")); - if !self.tcx().sess.verbose() { + if !self.tcx().sess.verbose_internals() { p!("coroutine witness"); // FIXME(eddyb) should use `def_span`. if let Some(did) = did.as_local() { @@ -1055,7 +1055,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { && assoc.trait_container(tcx) == tcx.lang_items().coroutine_trait() && assoc.name == rustc_span::sym::Return { - if let ty::Coroutine(_, args, _) = args.type_at(0).kind() { + if let ty::Coroutine(_, args) = args.type_at(0).kind() { let return_ty = args.as_coroutine().return_ty(); if !return_ty.is_ty_var() { return_ty.into() @@ -1697,8 +1697,27 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { }) } + fn pretty_print_bound_constness( + &mut self, + trait_ref: ty::TraitRef<'tcx>, + ) -> Result<(), PrintError> { + define_scoped_cx!(self); + + let Some(idx) = self.tcx().generics_of(trait_ref.def_id).host_effect_index else { + return Ok(()); + }; + let arg = trait_ref.args.const_at(idx); + + if arg == self.tcx().consts.false_ { + p!("const "); + } else if arg != self.tcx().consts.true_ && !arg.has_infer() { + p!("~const "); + } + Ok(()) + } + fn should_print_verbose(&self) -> bool { - self.tcx().sess.verbose() + self.tcx().sess.verbose_internals() } } @@ -2866,13 +2885,7 @@ define_print_and_forward_display! { } TraitPredPrintModifiersAndPath<'tcx> { - if let Some(idx) = cx.tcx().generics_of(self.0.trait_ref.def_id).host_effect_index - { - let arg = self.0.trait_ref.args.const_at(idx); - if arg != cx.tcx().consts.true_ && !arg.has_infer() { - p!("~const "); - } - } + p!(pretty_print_bound_constness(self.0.trait_ref)); if let ty::ImplPolarity::Negative = self.0.polarity { p!("!") } @@ -2905,11 +2918,7 @@ define_print_and_forward_display! { ty::TraitPredicate<'tcx> { p!(print(self.trait_ref.self_ty()), ": "); - if let Some(idx) = cx.tcx().generics_of(self.trait_ref.def_id).host_effect_index { - if self.trait_ref.args.const_at(idx) != cx.tcx().consts.true_ { - p!("~const "); - } - } + p!(pretty_print_bound_constness(self.trait_ref)); if let ty::ImplPolarity::Negative = self.polarity { p!("!"); } diff --git a/compiler/rustc_middle/src/ty/relate.rs b/compiler/rustc_middle/src/ty/relate.rs index 9d92f81db0bb0..8543bd0bbdd96 100644 --- a/compiler/rustc_middle/src/ty/relate.rs +++ b/compiler/rustc_middle/src/ty/relate.rs @@ -455,14 +455,12 @@ pub fn structurally_relate_tys<'tcx, R: TypeRelation<'tcx>>( Ok(Ty::new_dynamic(tcx, relation.relate(a_obj, b_obj)?, region_bound, a_repr)) } - (&ty::Coroutine(a_id, a_args, movability), &ty::Coroutine(b_id, b_args, _)) - if a_id == b_id => - { + (&ty::Coroutine(a_id, a_args), &ty::Coroutine(b_id, b_args)) if a_id == b_id => { // All Coroutine types with the same id represent // the (anonymous) type of the same coroutine expression. So // all of their regions should be equated. let args = relate_args_invariantly(relation, a_args, b_args)?; - Ok(Ty::new_coroutine(tcx, a_id, args, movability)) + Ok(Ty::new_coroutine(tcx, a_id, args)) } (&ty::CoroutineWitness(a_id, a_args), &ty::CoroutineWitness(b_id, b_args)) diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs index 1c75d73e5528a..7c869adbd8329 100644 --- a/compiler/rustc_middle/src/ty/structural_impls.rs +++ b/compiler/rustc_middle/src/ty/structural_impls.rs @@ -578,9 +578,7 @@ impl<'tcx> TypeSuperFoldable> for Ty<'tcx> { ty::Ref(r, ty, mutbl) => { ty::Ref(r.try_fold_with(folder)?, ty.try_fold_with(folder)?, mutbl) } - ty::Coroutine(did, args, movability) => { - ty::Coroutine(did, args.try_fold_with(folder)?, movability) - } + ty::Coroutine(did, args) => ty::Coroutine(did, args.try_fold_with(folder)?), ty::CoroutineWitness(did, args) => { ty::CoroutineWitness(did, args.try_fold_with(folder)?) } @@ -630,7 +628,7 @@ impl<'tcx> TypeSuperVisitable> for Ty<'tcx> { r.visit_with(visitor)?; ty.visit_with(visitor) } - ty::Coroutine(_did, ref args, _) => args.visit_with(visitor), + ty::Coroutine(_did, ref args) => args.visit_with(visitor), ty::CoroutineWitness(_did, ref args) => args.visit_with(visitor), ty::Closure(_did, ref args) => args.visit_with(visitor), ty::Alias(_, ref data) => data.visit_with(visitor), diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 5b9dff8e3f29d..38bf39bff908d 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -241,38 +241,36 @@ pub struct ClosureArgs<'tcx> { } /// Struct returned by `split()`. -pub struct ClosureArgsParts<'tcx, T> { +pub struct ClosureArgsParts<'tcx> { pub parent_args: &'tcx [GenericArg<'tcx>], - pub closure_kind_ty: T, - pub closure_sig_as_fn_ptr_ty: T, - pub tupled_upvars_ty: T, + pub closure_kind_ty: Ty<'tcx>, + pub closure_sig_as_fn_ptr_ty: Ty<'tcx>, + pub tupled_upvars_ty: Ty<'tcx>, } impl<'tcx> ClosureArgs<'tcx> { /// Construct `ClosureArgs` from `ClosureArgsParts`, containing `Args` /// for the closure parent, alongside additional closure-specific components. - pub fn new(tcx: TyCtxt<'tcx>, parts: ClosureArgsParts<'tcx, Ty<'tcx>>) -> ClosureArgs<'tcx> { + pub fn new(tcx: TyCtxt<'tcx>, parts: ClosureArgsParts<'tcx>) -> ClosureArgs<'tcx> { ClosureArgs { - args: tcx.mk_args_from_iter( - parts.parent_args.iter().copied().chain( - [parts.closure_kind_ty, parts.closure_sig_as_fn_ptr_ty, parts.tupled_upvars_ty] - .iter() - .map(|&ty| ty.into()), - ), - ), + args: tcx.mk_args_from_iter(parts.parent_args.iter().copied().chain([ + parts.closure_kind_ty.into(), + parts.closure_sig_as_fn_ptr_ty.into(), + parts.tupled_upvars_ty.into(), + ])), } } /// Divides the closure args into their respective components. /// The ordering assumed here must match that used by `ClosureArgs::new` above. - fn split(self) -> ClosureArgsParts<'tcx, GenericArg<'tcx>> { + fn split(self) -> ClosureArgsParts<'tcx> { match self.args[..] { [ref parent_args @ .., closure_kind_ty, closure_sig_as_fn_ptr_ty, tupled_upvars_ty] => { ClosureArgsParts { parent_args, - closure_kind_ty, - closure_sig_as_fn_ptr_ty, - tupled_upvars_ty, + closure_kind_ty: closure_kind_ty.expect_ty(), + closure_sig_as_fn_ptr_ty: closure_sig_as_fn_ptr_ty.expect_ty(), + tupled_upvars_ty: tupled_upvars_ty.expect_ty(), } } _ => bug!("closure args missing synthetics"), @@ -285,7 +283,7 @@ impl<'tcx> ClosureArgs<'tcx> { /// Used primarily by `ty::print::pretty` to be able to handle closure /// types that haven't had their synthetic types substituted in. pub fn is_valid(self) -> bool { - self.args.len() >= 3 && matches!(self.split().tupled_upvars_ty.expect_ty().kind(), Tuple(_)) + self.args.len() >= 3 && matches!(self.split().tupled_upvars_ty.kind(), Tuple(_)) } /// Returns the substitutions of the closure's parent. @@ -309,14 +307,14 @@ impl<'tcx> ClosureArgs<'tcx> { /// Returns the tuple type representing the upvars for this closure. #[inline] pub fn tupled_upvars_ty(self) -> Ty<'tcx> { - self.split().tupled_upvars_ty.expect_ty() + self.split().tupled_upvars_ty } /// Returns the closure kind for this closure; may return a type /// variable during inference. To get the closure kind during /// inference, use `infcx.closure_kind(args)`. pub fn kind_ty(self) -> Ty<'tcx> { - self.split().closure_kind_ty.expect_ty() + self.split().closure_kind_ty } /// Returns the `fn` pointer type representing the closure signature for this @@ -325,7 +323,7 @@ impl<'tcx> ClosureArgs<'tcx> { // type is known at the time of the creation of `ClosureArgs`, // see `rustc_hir_analysis::check::closure`. pub fn sig_as_fn_ptr_ty(self) -> Ty<'tcx> { - self.split().closure_sig_as_fn_ptr_ty.expect_ty() + self.split().closure_sig_as_fn_ptr_ty } /// Returns the closure kind for this closure; only usable outside @@ -357,51 +355,42 @@ pub struct CoroutineArgs<'tcx> { pub args: GenericArgsRef<'tcx>, } -pub struct CoroutineArgsParts<'tcx, T> { +pub struct CoroutineArgsParts<'tcx> { pub parent_args: &'tcx [GenericArg<'tcx>], - pub resume_ty: T, - pub yield_ty: T, - pub return_ty: T, - pub witness: T, - pub tupled_upvars_ty: T, + pub resume_ty: Ty<'tcx>, + pub yield_ty: Ty<'tcx>, + pub return_ty: Ty<'tcx>, + pub witness: Ty<'tcx>, + pub tupled_upvars_ty: Ty<'tcx>, } impl<'tcx> CoroutineArgs<'tcx> { /// Construct `CoroutineArgs` from `CoroutineArgsParts`, containing `Args` /// for the coroutine parent, alongside additional coroutine-specific components. - pub fn new( - tcx: TyCtxt<'tcx>, - parts: CoroutineArgsParts<'tcx, Ty<'tcx>>, - ) -> CoroutineArgs<'tcx> { + pub fn new(tcx: TyCtxt<'tcx>, parts: CoroutineArgsParts<'tcx>) -> CoroutineArgs<'tcx> { CoroutineArgs { - args: tcx.mk_args_from_iter( - parts.parent_args.iter().copied().chain( - [ - parts.resume_ty, - parts.yield_ty, - parts.return_ty, - parts.witness, - parts.tupled_upvars_ty, - ] - .iter() - .map(|&ty| ty.into()), - ), - ), + args: tcx.mk_args_from_iter(parts.parent_args.iter().copied().chain([ + parts.resume_ty.into(), + parts.yield_ty.into(), + parts.return_ty.into(), + parts.witness.into(), + parts.tupled_upvars_ty.into(), + ])), } } /// Divides the coroutine args into their respective components. /// The ordering assumed here must match that used by `CoroutineArgs::new` above. - fn split(self) -> CoroutineArgsParts<'tcx, GenericArg<'tcx>> { + fn split(self) -> CoroutineArgsParts<'tcx> { match self.args[..] { [ref parent_args @ .., resume_ty, yield_ty, return_ty, witness, tupled_upvars_ty] => { CoroutineArgsParts { parent_args, - resume_ty, - yield_ty, - return_ty, - witness, - tupled_upvars_ty, + resume_ty: resume_ty.expect_ty(), + yield_ty: yield_ty.expect_ty(), + return_ty: return_ty.expect_ty(), + witness: witness.expect_ty(), + tupled_upvars_ty: tupled_upvars_ty.expect_ty(), } } _ => bug!("coroutine args missing synthetics"), @@ -414,7 +403,7 @@ impl<'tcx> CoroutineArgs<'tcx> { /// Used primarily by `ty::print::pretty` to be able to handle coroutine /// types that haven't had their synthetic types substituted in. pub fn is_valid(self) -> bool { - self.args.len() >= 5 && matches!(self.split().tupled_upvars_ty.expect_ty().kind(), Tuple(_)) + self.args.len() >= 5 && matches!(self.split().tupled_upvars_ty.kind(), Tuple(_)) } /// Returns the substitutions of the coroutine's parent. @@ -428,7 +417,7 @@ impl<'tcx> CoroutineArgs<'tcx> { /// The state transformation MIR pass may only produce layouts which mention types /// in this tuple. Upvars are not counted here. pub fn witness(self) -> Ty<'tcx> { - self.split().witness.expect_ty() + self.split().witness } /// Returns an iterator over the list of types of captured paths by the coroutine. @@ -447,31 +436,32 @@ impl<'tcx> CoroutineArgs<'tcx> { /// Returns the tuple type representing the upvars for this coroutine. #[inline] pub fn tupled_upvars_ty(self) -> Ty<'tcx> { - self.split().tupled_upvars_ty.expect_ty() + self.split().tupled_upvars_ty } /// Returns the type representing the resume type of the coroutine. pub fn resume_ty(self) -> Ty<'tcx> { - self.split().resume_ty.expect_ty() + self.split().resume_ty } /// Returns the type representing the yield type of the coroutine. pub fn yield_ty(self) -> Ty<'tcx> { - self.split().yield_ty.expect_ty() + self.split().yield_ty } /// Returns the type representing the return type of the coroutine. pub fn return_ty(self) -> Ty<'tcx> { - self.split().return_ty.expect_ty() + self.split().return_ty } /// Returns the "coroutine signature", which consists of its resume, yield /// and return types. pub fn sig(self) -> GenSig<'tcx> { + let parts = self.split(); ty::GenSig { - resume_ty: self.resume_ty(), - yield_ty: self.yield_ty(), - return_ty: self.return_ty(), + resume_ty: parts.resume_ty, + yield_ty: parts.yield_ty, + return_ty: parts.return_ty, } } } @@ -2168,14 +2158,13 @@ impl<'tcx> Ty<'tcx> { tcx: TyCtxt<'tcx>, def_id: DefId, coroutine_args: GenericArgsRef<'tcx>, - movability: hir::Movability, ) -> Ty<'tcx> { debug_assert_eq!( coroutine_args.len(), tcx.generics_of(tcx.typeck_root_def_id(def_id)).count() + 5, "coroutine constructed with incorrect number of substitutions" ); - Ty::new(tcx, Coroutine(def_id, coroutine_args, movability)) + Ty::new(tcx, Coroutine(def_id, coroutine_args)) } #[inline] @@ -2656,7 +2645,7 @@ impl<'tcx> Ty<'tcx> { pub fn variant_range(self, tcx: TyCtxt<'tcx>) -> Option> { match self.kind() { TyKind::Adt(adt, _) => Some(adt.variant_range()), - TyKind::Coroutine(def_id, args, _) => { + TyKind::Coroutine(def_id, args) => { Some(args.as_coroutine().variant_range(*def_id, tcx)) } _ => None, @@ -2677,7 +2666,7 @@ impl<'tcx> Ty<'tcx> { TyKind::Adt(adt, _) if adt.is_enum() => { Some(adt.discriminant_for_variant(tcx, variant_index)) } - TyKind::Coroutine(def_id, args, _) => { + TyKind::Coroutine(def_id, args) => { Some(args.as_coroutine().discriminant_for_variant(*def_id, tcx, variant_index)) } _ => None, @@ -2688,7 +2677,7 @@ impl<'tcx> Ty<'tcx> { pub fn discriminant_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { match self.kind() { ty::Adt(adt, _) if adt.is_enum() => adt.repr().discr_type().to_ty(tcx), - ty::Coroutine(_, args, _) => args.as_coroutine().discr_ty(tcx), + ty::Coroutine(_, args) => args.as_coroutine().discr_ty(tcx), ty::Param(_) | ty::Alias(..) | ty::Infer(ty::TyVar(_)) => { let assoc_items = tcx.associated_item_def_ids( @@ -2983,7 +2972,7 @@ impl<'tcx> Ty<'tcx> { | FnPtr(_) | Dynamic(_, _, _) | Closure(_, _) - | Coroutine(_, _, _) + | Coroutine(_, _) | CoroutineWitness(..) | Never | Tuple(_) => true, diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs index 55dc72b19d3f4..ad2442a7963f0 100644 --- a/compiler/rustc_middle/src/ty/util.rs +++ b/compiler/rustc_middle/src/ty/util.rs @@ -547,7 +547,7 @@ impl<'tcx> TyCtxt<'tcx> { /// closure appears (and, sadly, a corresponding `NodeId`, since /// those are not yet phased out). The parent of the closure's /// `DefId` will also be the context where it appears. - pub fn is_closure(self, def_id: DefId) -> bool { + pub fn is_closure_or_coroutine(self, def_id: DefId) -> bool { matches!(self.def_kind(def_id), DefKind::Closure) } @@ -735,7 +735,7 @@ impl<'tcx> TyCtxt<'tcx> { hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => { "async gen closure" } - hir::CoroutineKind::Coroutine => "coroutine", + hir::CoroutineKind::Coroutine(_) => "coroutine", hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => { "gen closure" } @@ -759,7 +759,7 @@ impl<'tcx> TyCtxt<'tcx> { hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, ..) => "an", hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, ..) => "an", hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, ..) => "a", - hir::CoroutineKind::Coroutine => "a", + hir::CoroutineKind::Coroutine(_) => "a", } } _ => def_kind.article(), diff --git a/compiler/rustc_middle/src/ty/walk.rs b/compiler/rustc_middle/src/ty/walk.rs index 20bdbcb5b7bb4..9050716db9d38 100644 --- a/compiler/rustc_middle/src/ty/walk.rs +++ b/compiler/rustc_middle/src/ty/walk.rs @@ -189,7 +189,7 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) } ty::Adt(_, args) | ty::Closure(_, args) - | ty::Coroutine(_, args, _) + | ty::Coroutine(_, args) | ty::CoroutineWitness(_, args) | ty::FnDef(_, args) => { stack.extend(args.iter().rev()); diff --git a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs index 4ce7f831c8711..e3dea2212df0d 100644 --- a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs +++ b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs @@ -61,7 +61,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> { @call(mir_drop, args) => { Ok(TerminatorKind::Drop { place: self.parse_place(args[0])?, - target: self.parse_block(args[1])?, + target: self.parse_return_to(args[1])?, unwind: self.parse_unwind_action(args[2])?, replace: false, }) @@ -104,6 +104,14 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> { ) } + fn parse_return_to(&self, expr_id: ExprId) -> PResult { + parse_by_kind!(self, expr_id, _, "return block", + @call(mir_return_to, args) => { + self.parse_block(args[0]) + }, + ) + } + fn parse_match(&self, arms: &[ArmId], span: Span) -> PResult { let Some((otherwise, rest)) = arms.split_last() else { return Err(ParseError { @@ -146,7 +154,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> { ExprKind::Assign { lhs, rhs } => (*lhs, *rhs), ); let destination = self.parse_place(destination)?; - let target = self.parse_block(args[1])?; + let target = self.parse_return_to(args[1])?; let unwind = self.parse_unwind_action(args[2])?; parse_by_kind!(self, call, _, "function call", diff --git a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs index 04dcc6854c72f..f799be165ecde 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs +++ b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs @@ -387,8 +387,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { closure_id, args, ref upvars, - movability, ref fake_reads, + movability: _, }) => { // Convert the closure fake reads, if any, from `ExprRef` to mir `Place` // and push the fake reads. @@ -474,10 +474,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let result = match args { UpvarArgs::Coroutine(args) => { - // We implicitly set the discriminant to 0. See - // librustc_mir/transform/deaggregator.rs for details. - let movability = movability.unwrap(); - Box::new(AggregateKind::Coroutine(closure_id.to_def_id(), args, movability)) + Box::new(AggregateKind::Coroutine(closure_id.to_def_id(), args)) } UpvarArgs::Closure(args) => { Box::new(AggregateKind::Closure(closure_id.to_def_id(), args)) diff --git a/compiler/rustc_mir_build/src/build/mod.rs b/compiler/rustc_mir_build/src/build/mod.rs index a6336ec63b215..e0199fb876717 100644 --- a/compiler/rustc_mir_build/src/build/mod.rs +++ b/compiler/rustc_mir_build/src/build/mod.rs @@ -646,7 +646,7 @@ fn construct_error(tcx: TyCtxt<'_>, def_id: LocalDefId, guar: ErrorGuaranteed) - } DefKind::Closure if coroutine_kind.is_some() => { let coroutine_ty = tcx.type_of(def_id).instantiate_identity(); - let ty::Coroutine(_, args, _) = coroutine_ty.kind() else { + let ty::Coroutine(_, args) = coroutine_ty.kind() else { bug!("expected type of coroutine-like closure to be a coroutine") }; let args = args.as_coroutine(); @@ -813,7 +813,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let upvar_args = match closure_ty.kind() { ty::Closure(_, args) => ty::UpvarArgs::Closure(args), - ty::Coroutine(_, args, _) => ty::UpvarArgs::Coroutine(args), + ty::Coroutine(_, args) => ty::UpvarArgs::Coroutine(args), _ => return, }; diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index 133b924c33730..2e8b6c19ec784 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -175,7 +175,7 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for LayoutConstrainedPlaceVisitor<'a, 'tcx> { self.thir } - fn visit_expr(&mut self, expr: &Expr<'tcx>) { + fn visit_expr(&mut self, expr: &'a Expr<'tcx>) { match expr.kind { ExprKind::Field { lhs, .. } => { if let ty::Adt(adt_def, _) = self.thir[lhs].ty.kind() { @@ -206,7 +206,7 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { self.thir } - fn visit_block(&mut self, block: &Block) { + fn visit_block(&mut self, block: &'a Block) { match block.safety_mode { // compiler-generated unsafe code should not count towards the usefulness of // an outer unsafe block @@ -234,7 +234,7 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { } } - fn visit_pat(&mut self, pat: &Pat<'tcx>) { + fn visit_pat(&mut self, pat: &'a Pat<'tcx>) { if self.in_union_destructure { match pat.kind { // binding to a variable allows getting stuff out of variable @@ -319,7 +319,7 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> { } } - fn visit_expr(&mut self, expr: &Expr<'tcx>) { + fn visit_expr(&mut self, expr: &'a Expr<'tcx>) { // could we be in the LHS of an assignment to a field? match expr.kind { ExprKind::Field { .. } diff --git a/compiler/rustc_mir_build/src/thir/cx/expr.rs b/compiler/rustc_mir_build/src/thir/cx/expr.rs index 01f4678fa0963..8ec70c58c4618 100644 --- a/compiler/rustc_mir_build/src/thir/cx/expr.rs +++ b/compiler/rustc_mir_build/src/thir/cx/expr.rs @@ -552,8 +552,8 @@ impl<'tcx> Cx<'tcx> { let closure_ty = self.typeck_results().expr_ty(expr); let (def_id, args, movability) = match *closure_ty.kind() { ty::Closure(def_id, args) => (def_id, UpvarArgs::Closure(args), None), - ty::Coroutine(def_id, args, movability) => { - (def_id, UpvarArgs::Coroutine(args), Some(movability)) + ty::Coroutine(def_id, args) => { + (def_id, UpvarArgs::Coroutine(args), Some(tcx.coroutine_movability(def_id))) } _ => { span_bug!(expr.span, "closure expr w/o closure type: {:?}", closure_ty); diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs index 666b535e20963..0bcc2a315ff73 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs @@ -75,11 +75,11 @@ enum LetSource { WhileLet, } -struct MatchVisitor<'thir, 'p, 'tcx> { +struct MatchVisitor<'p, 'tcx> { tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, typeck_results: &'tcx ty::TypeckResults<'tcx>, - thir: &'thir Thir<'tcx>, + thir: &'p Thir<'tcx>, lint_level: HirId, let_source: LetSource, pattern_arena: &'p TypedArena>, @@ -92,13 +92,13 @@ struct MatchVisitor<'thir, 'p, 'tcx> { // Visitor for a thir body. This calls `check_match`, `check_let` and `check_let_chain` as // appropriate. -impl<'thir, 'tcx> Visitor<'thir, 'tcx> for MatchVisitor<'thir, '_, 'tcx> { - fn thir(&self) -> &'thir Thir<'tcx> { +impl<'p, 'tcx> Visitor<'p, 'tcx> for MatchVisitor<'p, 'tcx> { + fn thir(&self) -> &'p Thir<'tcx> { self.thir } #[instrument(level = "trace", skip(self))] - fn visit_arm(&mut self, arm: &Arm<'tcx>) { + fn visit_arm(&mut self, arm: &'p Arm<'tcx>) { self.with_lint_level(arm.lint_level, |this| { match arm.guard { Some(Guard::If(expr)) => { @@ -121,7 +121,7 @@ impl<'thir, 'tcx> Visitor<'thir, 'tcx> for MatchVisitor<'thir, '_, 'tcx> { } #[instrument(level = "trace", skip(self))] - fn visit_expr(&mut self, ex: &Expr<'tcx>) { + fn visit_expr(&mut self, ex: &'p Expr<'tcx>) { match ex.kind { ExprKind::Scope { value, lint_level, .. } => { self.with_lint_level(lint_level, |this| { @@ -174,7 +174,7 @@ impl<'thir, 'tcx> Visitor<'thir, 'tcx> for MatchVisitor<'thir, '_, 'tcx> { self.with_let_source(LetSource::None, |this| visit::walk_expr(this, ex)); } - fn visit_stmt(&mut self, stmt: &Stmt<'tcx>) { + fn visit_stmt(&mut self, stmt: &'p Stmt<'tcx>) { match stmt.kind { StmtKind::Let { box ref pattern, initializer, else_block, lint_level, span, .. @@ -195,7 +195,7 @@ impl<'thir, 'tcx> Visitor<'thir, 'tcx> for MatchVisitor<'thir, '_, 'tcx> { } } -impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { +impl<'p, 'tcx> MatchVisitor<'p, 'tcx> { #[instrument(level = "trace", skip(self, f))] fn with_let_source(&mut self, let_source: LetSource, f: impl FnOnce(&mut Self)) { let old_let_source = self.let_source; @@ -224,7 +224,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { /// subexpressions we are not handling ourselves. fn visit_land( &mut self, - ex: &Expr<'tcx>, + ex: &'p Expr<'tcx>, accumulator: &mut Vec>, ) -> Result<(), ErrorGuaranteed> { match ex.kind { @@ -251,7 +251,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { /// expression. This must call `visit_expr` on the subexpressions we are not handling ourselves. fn visit_land_rhs( &mut self, - ex: &Expr<'tcx>, + ex: &'p Expr<'tcx>, ) -> Result, ErrorGuaranteed> { match ex.kind { ExprKind::Scope { value, lint_level, .. } => { @@ -276,7 +276,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { fn lower_pattern( &mut self, cx: &MatchCheckCtxt<'p, 'tcx>, - pat: &Pat<'tcx>, + pat: &'p Pat<'tcx>, ) -> Result<&'p DeconstructedPat<'p, 'tcx>, ErrorGuaranteed> { if let Err(err) = pat.pat_error_reported() { self.error = Err(err); @@ -395,7 +395,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { } #[instrument(level = "trace", skip(self))] - fn check_let(&mut self, pat: &Pat<'tcx>, scrutinee: Option, span: Span) { + fn check_let(&mut self, pat: &'p Pat<'tcx>, scrutinee: Option, span: Span) { assert!(self.let_source != LetSource::None); let scrut = scrutinee.map(|id| &self.thir[id]); if let LetSource::PlainLet = self.let_source { @@ -547,7 +547,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { fn analyze_binding( &mut self, - pat: &Pat<'tcx>, + pat: &'p Pat<'tcx>, refutability: RefutableFlag, scrut: Option<&Expr<'tcx>>, ) -> Result<(MatchCheckCtxt<'p, 'tcx>, UsefulnessReport<'p, 'tcx>), ErrorGuaranteed> { @@ -560,7 +560,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { fn is_let_irrefutable( &mut self, - pat: &Pat<'tcx>, + pat: &'p Pat<'tcx>, scrut: Option<&Expr<'tcx>>, ) -> Result { let (cx, report) = self.analyze_binding(pat, Refutable, scrut)?; @@ -575,7 +575,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { #[instrument(level = "trace", skip(self))] fn check_binding_is_irrefutable( &mut self, - pat: &Pat<'tcx>, + pat: &'p Pat<'tcx>, origin: &str, scrut: Option<&Expr<'tcx>>, sp: Option, @@ -677,7 +677,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { /// - `x @ Some(ref mut? y)`. /// /// This analysis is *not* subsumed by NLL. -fn check_borrow_conflicts_in_at_patterns<'tcx>(cx: &MatchVisitor<'_, '_, 'tcx>, pat: &Pat<'tcx>) { +fn check_borrow_conflicts_in_at_patterns<'tcx>(cx: &MatchVisitor<'_, 'tcx>, pat: &Pat<'tcx>) { // Extract `sub` in `binding @ sub`. let PatKind::Binding { name, mode, ty, subpattern: Some(box ref sub), .. } = pat.kind else { return; @@ -772,7 +772,7 @@ fn check_borrow_conflicts_in_at_patterns<'tcx>(cx: &MatchVisitor<'_, '_, 'tcx>, } fn check_for_bindings_named_same_as_variants( - cx: &MatchVisitor<'_, '_, '_>, + cx: &MatchVisitor<'_, '_>, pat: &Pat<'_>, rf: RefutableFlag, ) { @@ -856,21 +856,21 @@ fn report_arm_reachability<'p, 'tcx>( for (arm, is_useful) in report.arm_usefulness.iter() { match is_useful { Usefulness::Redundant => { - report_unreachable_pattern(*arm.pat.data().unwrap(), arm.arm_data, catchall) + report_unreachable_pattern(arm.pat.data().unwrap().span, arm.arm_data, catchall) } Usefulness::Useful(redundant_subpats) if redundant_subpats.is_empty() => {} // The arm is reachable, but contains redundant subpatterns (from or-patterns). Usefulness::Useful(redundant_subpats) => { let mut redundant_subpats = redundant_subpats.clone(); // Emit lints in the order in which they occur in the file. - redundant_subpats.sort_unstable_by_key(|pat| pat.data()); + redundant_subpats.sort_unstable_by_key(|pat| pat.data().unwrap().span); for pat in redundant_subpats { - report_unreachable_pattern(*pat.data().unwrap(), arm.arm_data, None); + report_unreachable_pattern(pat.data().unwrap().span, arm.arm_data, None); } } } if !arm.has_guard && catchall.is_none() && pat_is_catchall(arm.pat) { - catchall = Some(*arm.pat.data().unwrap()); + catchall = Some(arm.pat.data().unwrap().span); } } } diff --git a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs index 958fa0d17cd39..c9930565186f5 100644 --- a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs +++ b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs @@ -862,7 +862,7 @@ where // This should only happen for the self argument on the resume function. // It effectively only contains upvars until the coroutine transformation runs. // See librustc_body/transform/coroutine.rs for more details. - ty::Coroutine(_, args, _) => self.open_drop_for_tuple(args.as_coroutine().upvar_tys()), + ty::Coroutine(_, args) => self.open_drop_for_tuple(args.as_coroutine().upvar_tys()), ty::Tuple(fields) => self.open_drop_for_tuple(fields), ty::Adt(def, args) => self.open_drop_for_adt(*def, args), ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind), diff --git a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs index ccf3dc7941fed..cae357653087f 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs @@ -155,7 +155,7 @@ impl<'b, 'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> Gatherer<'b, 'a, 'tcx, F> { | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Coroutine(_, _, _) + | ty::Coroutine(_, _) | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) @@ -177,7 +177,7 @@ impl<'b, 'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> Gatherer<'b, 'a, 'tcx, F> { union_path.get_or_insert(base); } } - ty::Closure(_, _) | ty::Coroutine(_, _, _) | ty::Tuple(_) => (), + ty::Closure(_, _) | ty::Coroutine(_, _) | ty::Tuple(_) => (), ty::Bool | ty::Char | ty::Int(_) diff --git a/compiler/rustc_mir_transform/src/check_unsafety.rs b/compiler/rustc_mir_transform/src/check_unsafety.rs index 23726e49f4dd6..d94d96c1115ca 100644 --- a/compiler/rustc_mir_transform/src/check_unsafety.rs +++ b/compiler/rustc_mir_transform/src/check_unsafety.rs @@ -128,7 +128,7 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> { ), } } - &AggregateKind::Closure(def_id, _) | &AggregateKind::Coroutine(def_id, _, _) => { + &AggregateKind::Closure(def_id, _) | &AggregateKind::Coroutine(def_id, _) => { let def_id = def_id.expect_local(); let UnsafetyCheckResult { violations, used_unsafe_blocks, .. } = self.tcx.unsafety_check_result(def_id); diff --git a/compiler/rustc_mir_transform/src/const_prop.rs b/compiler/rustc_mir_transform/src/const_prop.rs index e66d5e0a9f99e..c5824c3077028 100644 --- a/compiler/rustc_mir_transform/src/const_prop.rs +++ b/compiler/rustc_mir_transform/src/const_prop.rs @@ -1,29 +1,22 @@ //! Propagates constants for early reporting of statically known //! assertion failures -use either::Right; -use rustc_const_eval::ReportErrorExt; +use rustc_const_eval::interpret::{ + self, compile_time_machine, AllocId, ConstAllocation, FnArg, Frame, ImmTy, InterpCx, + InterpResult, OpTy, PlaceTy, Pointer, +}; use rustc_data_structures::fx::FxHashSet; -use rustc_hir::def::DefKind; use rustc_index::bit_set::BitSet; -use rustc_index::{IndexSlice, IndexVec}; -use rustc_middle::mir::visit::{ - MutVisitor, MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor, -}; +use rustc_index::IndexVec; +use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; use rustc_middle::query::TyCtxtAt; -use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout}; -use rustc_middle::ty::{self, GenericArgs, Instance, ParamEnv, Ty, TyCtxt, TypeVisitableExt}; -use rustc_span::{def_id::DefId, Span}; -use rustc_target::abi::{self, HasDataLayout, Size, TargetDataLayout}; +use rustc_middle::ty::layout::TyAndLayout; +use rustc_middle::ty::{self, ParamEnv, TyCtxt}; +use rustc_span::def_id::DefId; +use rustc_target::abi::Size; use rustc_target::spec::abi::Abi as CallAbi; -use crate::dataflow_const_prop::Patch; -use rustc_const_eval::interpret::{ - self, compile_time_machine, AllocId, ConstAllocation, FnArg, Frame, ImmTy, Immediate, InterpCx, - InterpResult, MemoryKind, OpTy, PlaceTy, Pointer, Scalar, StackPopCleanup, -}; - /// The maximum number of bytes that we'll allocate space for a local or the return value. /// Needed for #66397, because otherwise we eval into large places and that can cause OOM or just /// Severely regress performance. @@ -56,62 +49,7 @@ pub(crate) macro throw_machine_stop_str($($tt:tt)*) {{ throw_machine_stop!(Zst) }} -pub struct ConstProp; - -impl<'tcx> MirPass<'tcx> for ConstProp { - fn is_enabled(&self, sess: &rustc_session::Session) -> bool { - sess.mir_opt_level() >= 2 - } - - #[instrument(skip(self, tcx), level = "debug")] - fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - // will be evaluated by miri and produce its errors there - if body.source.promoted.is_some() { - return; - } - - let def_id = body.source.def_id().expect_local(); - let def_kind = tcx.def_kind(def_id); - let is_fn_like = def_kind.is_fn_like(); - let is_assoc_const = def_kind == DefKind::AssocConst; - - // Only run const prop on functions, methods, closures and associated constants - if !is_fn_like && !is_assoc_const { - // skip anon_const/statics/consts because they'll be evaluated by miri anyway - trace!("ConstProp skipped for {:?}", def_id); - return; - } - - // FIXME(welseywiser) const prop doesn't work on coroutines because of query cycles - // computing their layout. - if tcx.is_coroutine(def_id.to_def_id()) { - trace!("ConstProp skipped for coroutine {:?}", def_id); - return; - } - - trace!("ConstProp starting for {:?}", def_id); - - // FIXME(oli-obk, eddyb) Optimize locals (or even local paths) to hold - // constants, instead of just checking for const-folding succeeding. - // That would require a uniform one-def no-mutation analysis - // and RPO (or recursing when needing the value of a local). - let mut optimization_finder = ConstPropagator::new(body, tcx); - - // Traverse the body in reverse post-order, to ensure that `FullConstProp` locals are - // assigned before being read. - for &bb in body.basic_blocks.reverse_postorder() { - let data = &body.basic_blocks[bb]; - optimization_finder.visit_basic_block_data(bb, data); - } - - let mut patch = optimization_finder.patch; - patch.visit_body_preserves_cfg(body); - - trace!("ConstProp done for {:?}", def_id); - } -} - -pub struct ConstPropMachine<'mir, 'tcx> { +pub(crate) struct ConstPropMachine<'mir, 'tcx> { /// The virtual call stack. stack: Vec>, pub written_only_inside_own_block_locals: FxHashSet, @@ -267,297 +205,6 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> } } -/// Finds optimization opportunities on the MIR. -struct ConstPropagator<'mir, 'tcx> { - ecx: InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, - tcx: TyCtxt<'tcx>, - param_env: ParamEnv<'tcx>, - local_decls: &'mir IndexSlice>, - patch: Patch<'tcx>, -} - -impl<'tcx> LayoutOfHelpers<'tcx> for ConstPropagator<'_, 'tcx> { - type LayoutOfResult = Result, LayoutError<'tcx>>; - - #[inline] - fn handle_layout_err(&self, err: LayoutError<'tcx>, _: Span, _: Ty<'tcx>) -> LayoutError<'tcx> { - err - } -} - -impl HasDataLayout for ConstPropagator<'_, '_> { - #[inline] - fn data_layout(&self) -> &TargetDataLayout { - &self.tcx.data_layout - } -} - -impl<'tcx> ty::layout::HasTyCtxt<'tcx> for ConstPropagator<'_, 'tcx> { - #[inline] - fn tcx(&self) -> TyCtxt<'tcx> { - self.tcx - } -} - -impl<'tcx> ty::layout::HasParamEnv<'tcx> for ConstPropagator<'_, 'tcx> { - #[inline] - fn param_env(&self) -> ty::ParamEnv<'tcx> { - self.param_env - } -} - -impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { - fn new(body: &'mir Body<'tcx>, tcx: TyCtxt<'tcx>) -> ConstPropagator<'mir, 'tcx> { - let def_id = body.source.def_id(); - let args = &GenericArgs::identity_for_item(tcx, def_id); - let param_env = tcx.param_env_reveal_all_normalized(def_id); - - let can_const_prop = CanConstProp::check(tcx, param_env, body); - let mut ecx = InterpCx::new( - tcx, - tcx.def_span(def_id), - param_env, - ConstPropMachine::new(can_const_prop), - ); - - let ret_layout = ecx - .layout_of(body.bound_return_ty().instantiate(tcx, args)) - .ok() - // Don't bother allocating memory for large values. - // I don't know how return types can seem to be unsized but this happens in the - // `type/type-unsatisfiable.rs` test. - .filter(|ret_layout| { - ret_layout.is_sized() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) - }) - .unwrap_or_else(|| ecx.layout_of(tcx.types.unit).unwrap()); - - let ret = ecx - .allocate(ret_layout, MemoryKind::Stack) - .expect("couldn't perform small allocation") - .into(); - - ecx.push_stack_frame( - Instance::new(def_id, args), - body, - &ret, - StackPopCleanup::Root { cleanup: false }, - ) - .expect("failed to push initial stack frame"); - - for local in body.local_decls.indices() { - // Mark everything initially live. - // This is somewhat dicey since some of them might be unsized and it is incoherent to - // mark those as live... We rely on `local_to_place`/`local_to_op` in the interpreter - // stopping us before those unsized immediates can cause issues deeper in the - // interpreter. - ecx.frame_mut().locals[local].make_live_uninit(); - } - - let patch = Patch::new(tcx); - ConstPropagator { ecx, tcx, param_env, local_decls: &body.local_decls, patch } - } - - fn get_const(&self, place: Place<'tcx>) -> Option> { - let op = match self.ecx.eval_place_to_op(place, None) { - Ok(op) => { - if op - .as_mplace_or_imm() - .right() - .is_some_and(|imm| matches!(*imm, Immediate::Uninit)) - { - // Make sure nobody accidentally uses this value. - return None; - } - op - } - Err(e) => { - trace!("get_const failed: {:?}", e.into_kind().debug()); - return None; - } - }; - - // Try to read the local as an immediate so that if it is representable as a scalar, we can - // handle it as such, but otherwise, just return the value as is. - Some(match self.ecx.read_immediate_raw(&op) { - Ok(Right(imm)) => imm.into(), - _ => op, - }) - } - - /// Remove `local` from the pool of `Locals`. Allows writing to them, - /// but not reading from them anymore. - fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) { - ecx.frame_mut().locals[local].make_live_uninit(); - ecx.machine.written_only_inside_own_block_locals.remove(&local); - } - - fn check_rvalue(&mut self, rvalue: &Rvalue<'tcx>) -> Option<()> { - // Perform any special handling for specific Rvalue types. - // Generally, checks here fall into one of two categories: - // 1. Additional checking to provide useful lints to the user - // - In this case, we will do some validation and then fall through to the - // end of the function which evals the assignment. - // 2. Working around bugs in other parts of the compiler - // - In this case, we'll return `None` from this function to stop evaluation. - match rvalue { - // Do not try creating references (#67862) - Rvalue::AddressOf(_, place) | Rvalue::Ref(_, _, place) => { - trace!("skipping AddressOf | Ref for {:?}", place); - - // This may be creating mutable references or immutable references to cells. - // If that happens, the pointed to value could be mutated via that reference. - // Since we aren't tracking references, the const propagator loses track of what - // value the local has right now. - // Thus, all locals that have their reference taken - // must not take part in propagation. - Self::remove_const(&mut self.ecx, place.local); - - return None; - } - Rvalue::ThreadLocalRef(def_id) => { - trace!("skipping ThreadLocalRef({:?})", def_id); - - return None; - } - // There's no other checking to do at this time. - Rvalue::Aggregate(..) - | Rvalue::Use(..) - | Rvalue::CopyForDeref(..) - | Rvalue::Repeat(..) - | Rvalue::Len(..) - | Rvalue::Cast(..) - | Rvalue::ShallowInitBox(..) - | Rvalue::Discriminant(..) - | Rvalue::NullaryOp(..) - | Rvalue::UnaryOp(..) - | Rvalue::BinaryOp(..) - | Rvalue::CheckedBinaryOp(..) => {} - } - - // FIXME we need to revisit this for #67176 - if rvalue.has_param() { - trace!("skipping, has param"); - return None; - } - if !rvalue - .ty(&self.ecx.frame().body.local_decls, *self.ecx.tcx) - .is_sized(*self.ecx.tcx, self.param_env) - { - // the interpreter doesn't support unsized locals (only unsized arguments), - // but rustc does (in a kinda broken way), so we have to skip them here - return None; - } - - Some(()) - } - - // Attempt to use algebraic identities to eliminate constant expressions - fn eval_rvalue_with_identities( - &mut self, - rvalue: &Rvalue<'tcx>, - place: Place<'tcx>, - ) -> Option<()> { - match rvalue { - Rvalue::BinaryOp(op, box (left, right)) - | Rvalue::CheckedBinaryOp(op, box (left, right)) => { - let l = self.ecx.eval_operand(left, None).and_then(|x| self.ecx.read_immediate(&x)); - let r = - self.ecx.eval_operand(right, None).and_then(|x| self.ecx.read_immediate(&x)); - - let const_arg = match (l, r) { - (Ok(x), Err(_)) | (Err(_), Ok(x)) => x, // exactly one side is known - (Err(_), Err(_)) => return None, // neither side is known - (Ok(_), Ok(_)) => return self.ecx.eval_rvalue_into_place(rvalue, place).ok(), // both sides are known - }; - - if !matches!(const_arg.layout.abi, abi::Abi::Scalar(..)) { - // We cannot handle Scalar Pair stuff. - // No point in calling `eval_rvalue_into_place`, since only one side is known - return None; - } - - let arg_value = const_arg.to_scalar().to_bits(const_arg.layout.size).ok()?; - let dest = self.ecx.eval_place(place).ok()?; - - match op { - BinOp::BitAnd if arg_value == 0 => { - self.ecx.write_immediate(*const_arg, &dest).ok() - } - BinOp::BitOr - if arg_value == const_arg.layout.size.truncate(u128::MAX) - || (const_arg.layout.ty.is_bool() && arg_value == 1) => - { - self.ecx.write_immediate(*const_arg, &dest).ok() - } - BinOp::Mul if const_arg.layout.ty.is_integral() && arg_value == 0 => { - if let Rvalue::CheckedBinaryOp(_, _) = rvalue { - let val = Immediate::ScalarPair( - const_arg.to_scalar(), - Scalar::from_bool(false), - ); - self.ecx.write_immediate(val, &dest).ok() - } else { - self.ecx.write_immediate(*const_arg, &dest).ok() - } - } - _ => None, - } - } - _ => self.ecx.eval_rvalue_into_place(rvalue, place).ok(), - } - } - - fn replace_with_const(&mut self, place: Place<'tcx>) -> Option> { - // This will return None if the above `const_prop` invocation only "wrote" a - // type whose creation requires no write. E.g. a coroutine whose initial state - // consists solely of uninitialized memory (so it doesn't capture any locals). - let value = self.get_const(place)?; - if !self.tcx.consider_optimizing(|| format!("ConstantPropagation - {value:?}")) { - return None; - } - trace!("replacing {:?} with {:?}", place, value); - - // FIXME: figure out what to do when read_immediate_raw fails - let imm = self.ecx.read_immediate_raw(&value).ok()?; - - let Right(imm) = imm else { return None }; - match *imm { - Immediate::Scalar(scalar) if scalar.try_to_int().is_ok() => { - Some(Const::from_scalar(self.tcx, scalar, value.layout.ty)) - } - Immediate::ScalarPair(l, r) if l.try_to_int().is_ok() && r.try_to_int().is_ok() => { - let alloc_id = self - .ecx - .intern_with_temp_alloc(value.layout, |ecx, dest| { - ecx.write_immediate(*imm, dest) - }) - .ok()?; - - Some(Const::Val( - ConstValue::Indirect { alloc_id, offset: Size::ZERO }, - value.layout.ty, - )) - } - // Scalars or scalar pairs that contain undef values are assumed to not have - // successfully evaluated and are thus not propagated. - _ => None, - } - } - - fn ensure_not_propagated(&self, local: Local) { - if cfg!(debug_assertions) { - assert!( - self.get_const(local.into()).is_none() - || self - .layout_of(self.local_decls[local].ty) - .map_or(true, |layout| layout.is_zst()), - "failed to remove values for `{local:?}`, value={:?}", - self.get_const(local.into()), - ) - } - } -} - /// The mode that `ConstProp` is allowed to run in for a given `Local`. #[derive(Clone, Copy, Debug, PartialEq)] pub enum ConstPropMode { @@ -677,154 +324,3 @@ impl<'tcx> Visitor<'tcx> for CanConstProp { } } } - -impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> { - fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) { - self.super_operand(operand, location); - if let Some(place) = operand.place() - && let Some(value) = self.replace_with_const(place) - { - self.patch.before_effect.insert((location, place), value); - } - } - - fn visit_projection_elem( - &mut self, - _: PlaceRef<'tcx>, - elem: PlaceElem<'tcx>, - _: PlaceContext, - location: Location, - ) { - if let PlaceElem::Index(local) = elem - && let Some(value) = self.replace_with_const(local.into()) - { - self.patch.before_effect.insert((location, local.into()), value); - } - } - - fn visit_assign(&mut self, place: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) { - self.super_assign(place, rvalue, location); - - let Some(()) = self.check_rvalue(rvalue) else { - trace!("rvalue check failed, removing const"); - Self::remove_const(&mut self.ecx, place.local); - return; - }; - - match self.ecx.machine.can_const_prop[place.local] { - // Do nothing if the place is indirect. - _ if place.is_indirect() => {} - ConstPropMode::NoPropagation => self.ensure_not_propagated(place.local), - ConstPropMode::OnlyInsideOwnBlock | ConstPropMode::FullConstProp => { - if let Some(()) = self.eval_rvalue_with_identities(rvalue, *place) { - // If this was already an evaluated constant, keep it. - if let Rvalue::Use(Operand::Constant(c)) = rvalue - && let Const::Val(..) = c.const_ - { - trace!( - "skipping replace of Rvalue::Use({:?} because it is already a const", - c - ); - } else if let Some(operand) = self.replace_with_const(*place) { - self.patch.assignments.insert(location, operand); - } - } else { - // Const prop failed, so erase the destination, ensuring that whatever happens - // from here on, does not know about the previous value. - // This is important in case we have - // ```rust - // let mut x = 42; - // x = SOME_MUTABLE_STATIC; - // // x must now be uninit - // ``` - // FIXME: we overzealously erase the entire local, because that's easier to - // implement. - trace!( - "propagation into {:?} failed. - Nuking the entire site from orbit, it's the only way to be sure", - place, - ); - Self::remove_const(&mut self.ecx, place.local); - } - } - } - } - - fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { - trace!("visit_statement: {:?}", statement); - - // We want to evaluate operands before any change to the assigned-to value, - // so we recurse first. - self.super_statement(statement, location); - - match statement.kind { - StatementKind::SetDiscriminant { ref place, .. } => { - match self.ecx.machine.can_const_prop[place.local] { - // Do nothing if the place is indirect. - _ if place.is_indirect() => {} - ConstPropMode::NoPropagation => self.ensure_not_propagated(place.local), - ConstPropMode::FullConstProp | ConstPropMode::OnlyInsideOwnBlock => { - if self.ecx.statement(statement).is_ok() { - trace!("propped discriminant into {:?}", place); - } else { - Self::remove_const(&mut self.ecx, place.local); - } - } - } - } - StatementKind::StorageLive(local) => { - Self::remove_const(&mut self.ecx, local); - } - // We do not need to mark dead locals as such. For `FullConstProp` locals, - // this allows to propagate the single assigned value in this case: - // ``` - // let x = SOME_CONST; - // if a { - // f(copy x); - // StorageDead(x); - // } else { - // g(copy x); - // StorageDead(x); - // } - // ``` - // - // This may propagate a constant where the local would be uninit or dead. - // In both cases, this does not matter, as those reads would be UB anyway. - _ => {} - } - } - - fn visit_basic_block_data(&mut self, block: BasicBlock, data: &BasicBlockData<'tcx>) { - self.super_basic_block_data(block, data); - - // We remove all Locals which are restricted in propagation to their containing blocks and - // which were modified in the current block. - // Take it out of the ecx so we can get a mutable reference to the ecx for `remove_const`. - let mut written_only_inside_own_block_locals = - std::mem::take(&mut self.ecx.machine.written_only_inside_own_block_locals); - - // This loop can get very hot for some bodies: it check each local in each bb. - // To avoid this quadratic behaviour, we only clear the locals that were modified inside - // the current block. - for local in written_only_inside_own_block_locals.drain() { - debug_assert_eq!( - self.ecx.machine.can_const_prop[local], - ConstPropMode::OnlyInsideOwnBlock - ); - Self::remove_const(&mut self.ecx, local); - } - self.ecx.machine.written_only_inside_own_block_locals = - written_only_inside_own_block_locals; - - if cfg!(debug_assertions) { - for (local, &mode) in self.ecx.machine.can_const_prop.iter_enumerated() { - match mode { - ConstPropMode::FullConstProp => {} - ConstPropMode::NoPropagation | ConstPropMode::OnlyInsideOwnBlock => { - self.ensure_not_propagated(local); - } - } - } - } - } -} diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs index d1d5b72af7063..ce1a36cf67021 100644 --- a/compiler/rustc_mir_transform/src/coroutine.rs +++ b/compiler/rustc_mir_transform/src/coroutine.rs @@ -257,7 +257,7 @@ impl<'tcx> TransformVisitor<'tcx> { CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => { span_bug!(body.span, "`Future`s are not fused inherently") } - CoroutineKind::Coroutine => span_bug!(body.span, "`Coroutine`s cannot be fused"), + CoroutineKind::Coroutine(_) => span_bug!(body.span, "`Coroutine`s cannot be fused"), // `gen` continues return `None` CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => { let option_def_id = self.tcx.require_lang_item(LangItem::Option, None); @@ -396,7 +396,7 @@ impl<'tcx> TransformVisitor<'tcx> { Rvalue::Use(val) } } - CoroutineKind::Coroutine => { + CoroutineKind::Coroutine(_) => { let coroutine_state_def_id = self.tcx.require_lang_item(LangItem::CoroutineState, None); let args = self.tcx.mk_args(&[self.old_yield_ty.into(), self.old_ret_ty.into()]); @@ -1417,19 +1417,18 @@ fn create_coroutine_resume_function<'tcx>( cases.insert(0, (UNRESUMED, START_BLOCK)); // Panic when resumed on the returned or poisoned state - let coroutine_kind = body.coroutine_kind().unwrap(); - if can_unwind { cases.insert( 1, - (POISONED, insert_panic_block(tcx, body, ResumedAfterPanic(coroutine_kind))), + (POISONED, insert_panic_block(tcx, body, ResumedAfterPanic(transform.coroutine_kind))), ); } if can_return { - let block = match coroutine_kind { - CoroutineKind::Desugared(CoroutineDesugaring::Async, _) | CoroutineKind::Coroutine => { - insert_panic_block(tcx, body, ResumedAfterReturn(coroutine_kind)) + let block = match transform.coroutine_kind { + CoroutineKind::Desugared(CoroutineDesugaring::Async, _) + | CoroutineKind::Coroutine(_) => { + insert_panic_block(tcx, body, ResumedAfterReturn(transform.coroutine_kind)) } CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) | CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => { @@ -1443,7 +1442,7 @@ fn create_coroutine_resume_function<'tcx>( make_coroutine_state_argument_indirect(tcx, body); - match coroutine_kind { + match transform.coroutine_kind { // Iterator::next doesn't accept a pinned argument, // unlike for all other coroutine kinds. CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {} @@ -1564,7 +1563,7 @@ pub(crate) fn mir_coroutine_witnesses<'tcx>( let coroutine_ty = body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty; let movable = match *coroutine_ty.kind() { - ty::Coroutine(_, _, movability) => movability == hir::Movability::Movable, + ty::Coroutine(def_id, _) => tcx.coroutine_movability(def_id) == hir::Movability::Movable, ty::Error(_) => return None, _ => span_bug!(body.span, "unexpected coroutine type {}", coroutine_ty), }; @@ -1596,12 +1595,13 @@ impl<'tcx> MirPass<'tcx> for StateTransform { // The first argument is the coroutine type passed by value let coroutine_ty = body.local_decls.raw[1].ty; + let coroutine_kind = body.coroutine_kind().unwrap(); // Get the discriminant type and args which typeck computed let (discr_ty, movable) = match *coroutine_ty.kind() { - ty::Coroutine(_, args, movability) => { + ty::Coroutine(_, args) => { let args = args.as_coroutine(); - (args.discr_ty(tcx), movability == hir::Movability::Movable) + (args.discr_ty(tcx), coroutine_kind.movability() == hir::Movability::Movable) } _ => { tcx.dcx().span_delayed_bug( @@ -1612,19 +1612,7 @@ impl<'tcx> MirPass<'tcx> for StateTransform { } }; - let is_async_kind = matches!( - body.coroutine_kind(), - Some(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) - ); - let is_async_gen_kind = matches!( - body.coroutine_kind(), - Some(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)) - ); - let is_gen_kind = matches!( - body.coroutine_kind(), - Some(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) - ); - let new_ret_ty = match body.coroutine_kind().unwrap() { + let new_ret_ty = match coroutine_kind { CoroutineKind::Desugared(CoroutineDesugaring::Async, _) => { // Compute Poll let poll_did = tcx.require_lang_item(LangItem::Poll, None); @@ -1643,7 +1631,7 @@ impl<'tcx> MirPass<'tcx> for StateTransform { // The yield ty is already `Poll>` old_yield_ty } - CoroutineKind::Coroutine => { + CoroutineKind::Coroutine(_) => { // Compute CoroutineState let state_did = tcx.require_lang_item(LangItem::CoroutineState, None); let state_adt_ref = tcx.adt_def(state_did); @@ -1657,7 +1645,10 @@ impl<'tcx> MirPass<'tcx> for StateTransform { let old_ret_local = replace_local(RETURN_PLACE, new_ret_ty, body, tcx); // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies. - if is_async_kind || is_async_gen_kind { + if matches!( + coroutine_kind, + CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) + ) { transform_async_context(tcx, body); } @@ -1666,11 +1657,7 @@ impl<'tcx> MirPass<'tcx> for StateTransform { // case there is no `Assign` to it that the transform can turn into a store to the coroutine // state. After the yield the slot in the coroutine state would then be uninitialized. let resume_local = Local::new(2); - let resume_ty = if is_async_kind { - Ty::new_task_context(tcx) - } else { - body.local_decls[resume_local].ty - }; + let resume_ty = body.local_decls[resume_local].ty; let old_resume_local = replace_local(resume_local, resume_ty, body, tcx); // When first entering the coroutine, move the resume argument into its old local @@ -1713,11 +1700,11 @@ impl<'tcx> MirPass<'tcx> for StateTransform { // Run the transformation which converts Places from Local to coroutine struct // accesses for locals in `remap`. // It also rewrites `return x` and `yield y` as writing a new coroutine state and returning - // either CoroutineState::Complete(x) and CoroutineState::Yielded(y), - // or Poll::Ready(x) and Poll::Pending respectively depending on `is_async_kind`. + // either `CoroutineState::Complete(x)` and `CoroutineState::Yielded(y)`, + // or `Poll::Ready(x)` and `Poll::Pending` respectively depending on the coroutine kind. let mut transform = TransformVisitor { tcx, - coroutine_kind: body.coroutine_kind().unwrap(), + coroutine_kind, remap, storage_liveness, always_live_locals, @@ -1734,7 +1721,7 @@ impl<'tcx> MirPass<'tcx> for StateTransform { body.spread_arg = None; // Remove the context argument within generator bodies. - if is_gen_kind { + if matches!(coroutine_kind, CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) { transform_gen_context(tcx, body); } diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index 604589e5b96ba..8c11dea5d4ee6 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -1,4 +1,4 @@ -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::graph::WithNumNodes; use rustc_index::bit_set::BitSet; use rustc_index::IndexVec; @@ -47,7 +47,10 @@ pub(super) struct CoverageCounters { bcb_counters: IndexVec>, /// Coverage counters/expressions that are associated with the control-flow /// edge between two BCBs. - bcb_edge_counters: FxHashMap<(BasicCoverageBlock, BasicCoverageBlock), BcbCounter>, + /// + /// The iteration order of this map can affect the precise contents of MIR, + /// so we use `FxIndexMap` to avoid query stability hazards. + bcb_edge_counters: FxIndexMap<(BasicCoverageBlock, BasicCoverageBlock), BcbCounter>, /// Tracks which BCBs have a counter associated with some incoming edge. /// Only used by assertions, to verify that BCBs with incoming edge /// counters do not have their own physical counters (expressions are allowed). @@ -58,27 +61,27 @@ pub(super) struct CoverageCounters { } impl CoverageCounters { - pub(super) fn new(basic_coverage_blocks: &CoverageGraph) -> Self { + /// Makes [`BcbCounter`] `Counter`s and `Expressions` for the `BasicCoverageBlock`s directly or + /// indirectly associated with coverage spans, and accumulates additional `Expression`s + /// representing intermediate values. + pub(super) fn make_bcb_counters( + basic_coverage_blocks: &CoverageGraph, + bcb_has_coverage_spans: impl Fn(BasicCoverageBlock) -> bool, + ) -> Self { let num_bcbs = basic_coverage_blocks.num_nodes(); - Self { + let mut this = Self { next_counter_id: CounterId::START, bcb_counters: IndexVec::from_elem_n(None, num_bcbs), - bcb_edge_counters: FxHashMap::default(), + bcb_edge_counters: FxIndexMap::default(), bcb_has_incoming_edge_counters: BitSet::new_empty(num_bcbs), expressions: IndexVec::new(), - } - } + }; - /// Makes [`BcbCounter`] `Counter`s and `Expressions` for the `BasicCoverageBlock`s directly or - /// indirectly associated with coverage spans, and accumulates additional `Expression`s - /// representing intermediate values. - pub fn make_bcb_counters( - &mut self, - basic_coverage_blocks: &CoverageGraph, - bcb_has_coverage_spans: impl Fn(BasicCoverageBlock) -> bool, - ) { - MakeBcbCounters::new(self, basic_coverage_blocks).make_bcb_counters(bcb_has_coverage_spans) + MakeBcbCounters::new(&mut this, basic_coverage_blocks) + .make_bcb_counters(bcb_has_coverage_spans); + + this } fn make_counter(&mut self) -> BcbCounter { @@ -186,8 +189,8 @@ impl CoverageCounters { .map(|(&(from_bcb, to_bcb), counter_kind)| (from_bcb, to_bcb, counter_kind)) } - pub(super) fn take_expressions(&mut self) -> IndexVec { - std::mem::take(&mut self.expressions) + pub(super) fn into_expressions(self) -> IndexVec { + self.expressions } } diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index c5a3391286acf..aa7b6b02f74e1 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -8,7 +8,7 @@ mod spans; mod tests; use self::counters::{BcbCounter, CoverageCounters}; -use self::graph::CoverageGraph; +use self::graph::{BasicCoverageBlock, CoverageGraph}; use self::spans::CoverageSpans; use crate::MirPass; @@ -23,7 +23,7 @@ use rustc_middle::mir::{ use rustc_middle::ty::TyCtxt; use rustc_span::def_id::LocalDefId; use rustc_span::source_map::SourceMap; -use rustc_span::{ExpnKind, Span, Symbol}; +use rustc_span::{Span, Symbol}; /// Inserts `StatementKind::Coverage` statements that either instrument the binary with injected /// counters, via intrinsic `llvm.instrprof.increment`, and/or inject metadata used during codegen @@ -70,7 +70,6 @@ struct Instrumentor<'a, 'tcx> { mir_body: &'a mut mir::Body<'tcx>, hir_info: ExtractedHirInfo, basic_coverage_blocks: CoverageGraph, - coverage_counters: CoverageCounters, } impl<'a, 'tcx> Instrumentor<'a, 'tcx> { @@ -80,9 +79,8 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { debug!(?hir_info, "instrumenting {:?}", mir_body.source.def_id()); let basic_coverage_blocks = CoverageGraph::from_mir(mir_body); - let coverage_counters = CoverageCounters::new(&basic_coverage_blocks); - Self { tcx, mir_body, hir_info, basic_coverage_blocks, coverage_counters } + Self { tcx, mir_body, hir_info, basic_coverage_blocks } } fn inject_counters(&'a mut self) { @@ -103,25 +101,31 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { // and all `Expression` dependencies (operands) are also generated, for any other // `BasicCoverageBlock`s not already associated with a coverage span. let bcb_has_coverage_spans = |bcb| coverage_spans.bcb_has_coverage_spans(bcb); - self.coverage_counters - .make_bcb_counters(&self.basic_coverage_blocks, bcb_has_coverage_spans); + let coverage_counters = CoverageCounters::make_bcb_counters( + &self.basic_coverage_blocks, + bcb_has_coverage_spans, + ); - let mappings = self.create_mappings_and_inject_coverage_statements(&coverage_spans); + let mappings = self.create_mappings(&coverage_spans, &coverage_counters); + self.inject_coverage_statements(bcb_has_coverage_spans, &coverage_counters); self.mir_body.function_coverage_info = Some(Box::new(FunctionCoverageInfo { function_source_hash: self.hir_info.function_source_hash, - num_counters: self.coverage_counters.num_counters(), - expressions: self.coverage_counters.take_expressions(), + num_counters: coverage_counters.num_counters(), + expressions: coverage_counters.into_expressions(), mappings, })); } - /// For each [`BcbCounter`] associated with a BCB node or BCB edge, create - /// any corresponding mappings (for BCB nodes only), and inject any necessary - /// coverage statements into MIR. - fn create_mappings_and_inject_coverage_statements( - &mut self, + /// For each coverage span extracted from MIR, create a corresponding + /// mapping. + /// + /// Precondition: All BCBs corresponding to those spans have been given + /// coverage counters. + fn create_mappings( + &self, coverage_spans: &CoverageSpans, + coverage_counters: &CoverageCounters, ) -> Vec { let source_map = self.tcx.sess.source_map(); let body_span = self.hir_info.body_span; @@ -131,30 +135,42 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { let file_name = Symbol::intern(&source_file.name.for_codegen(self.tcx.sess).to_string_lossy()); - let mut mappings = Vec::new(); - - // Process the counters and spans associated with BCB nodes. - for (bcb, counter_kind) in self.coverage_counters.bcb_node_counters() { - let spans = coverage_spans.spans_for_bcb(bcb); - let has_mappings = !spans.is_empty(); - - // If this BCB has any coverage spans, add corresponding mappings to - // the mappings table. - if has_mappings { - let term = counter_kind.as_term(); - mappings.extend(spans.iter().map(|&span| { - let code_region = make_code_region(source_map, file_name, span, body_span); - Mapping { code_region, term } - })); - } + coverage_spans + .bcbs_with_coverage_spans() + // For each BCB with spans, get a coverage term for its counter. + .map(|(bcb, spans)| { + let term = coverage_counters + .bcb_counter(bcb) + .expect("all BCBs with spans were given counters") + .as_term(); + (term, spans) + }) + // Flatten the spans into individual term/span pairs. + .flat_map(|(term, spans)| spans.iter().map(move |&span| (term, span))) + // Convert each span to a code region, and create the final mapping. + .map(|(term, span)| { + let code_region = make_code_region(source_map, file_name, span, body_span); + Mapping { term, code_region } + }) + .collect::>() + } + /// For each BCB node or BCB edge that has an associated coverage counter, + /// inject any necessary coverage statements into MIR. + fn inject_coverage_statements( + &mut self, + bcb_has_coverage_spans: impl Fn(BasicCoverageBlock) -> bool, + coverage_counters: &CoverageCounters, + ) { + // Process the counters associated with BCB nodes. + for (bcb, counter_kind) in coverage_counters.bcb_node_counters() { let do_inject = match counter_kind { // Counter-increment statements always need to be injected. BcbCounter::Counter { .. } => true, // The only purpose of expression-used statements is to detect // when a mapping is unreachable, so we only inject them for // expressions with one or more mappings. - BcbCounter::Expression { .. } => has_mappings, + BcbCounter::Expression { .. } => bcb_has_coverage_spans(bcb), }; if do_inject { inject_statement( @@ -166,7 +182,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { } // Process the counters associated with BCB edges. - for (from_bcb, to_bcb, counter_kind) in self.coverage_counters.bcb_edge_counters() { + for (from_bcb, to_bcb, counter_kind) in coverage_counters.bcb_edge_counters() { let do_inject = match counter_kind { // Counter-increment statements always need to be injected. BcbCounter::Counter { .. } => true, @@ -192,8 +208,6 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { // Inject a counter into the newly-created BB. inject_statement(self.mir_body, self.make_mir_coverage_kind(counter_kind), new_bb); } - - mappings } fn make_mir_coverage_kind(&self, counter_kind: &BcbCounter) -> CoverageKind { @@ -345,22 +359,11 @@ fn get_body_span<'tcx>( ) -> Span { let mut body_span = hir_body.value.span; - if tcx.is_closure(def_id.to_def_id()) { - // If the MIR function is a closure, and if the closure body span - // starts from a macro, but it's content is not in that macro, try - // to find a non-macro callsite, and instrument the spans there - // instead. - loop { - let expn_data = body_span.ctxt().outer_expn_data(); - if expn_data.is_root() { - break; - } - if let ExpnKind::Macro { .. } = expn_data.kind { - body_span = expn_data.call_site; - } else { - break; - } - } + if tcx.is_closure_or_coroutine(def_id.to_def_id()) { + // If the current function is a closure, and its "body" span was created + // by macro expansion or compiler desugaring, try to walk backwards to + // the pre-expansion call site or body. + body_span = body_span.source_callsite(); } body_span diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index ae43a18ad4e4f..ed091752187ef 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -48,8 +48,13 @@ impl CoverageSpans { !self.bcb_to_spans[bcb].is_empty() } - pub(super) fn spans_for_bcb(&self, bcb: BasicCoverageBlock) -> &[Span] { - &self.bcb_to_spans[bcb] + pub(super) fn bcbs_with_coverage_spans( + &self, + ) -> impl Iterator { + self.bcb_to_spans.iter_enumerated().filter_map(|(bcb, spans)| { + // Only yield BCBs that have at least one coverage span. + (!spans.is_empty()).then_some((bcb, spans.as_slice())) + }) } } diff --git a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs index a9c4ea33d0e82..8f6592afe85cb 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs @@ -74,7 +74,7 @@ fn bcb_to_initial_coverage_spans<'a, 'tcx>( let expn_span = filtered_statement_span(statement)?; let span = unexpand_into_body_span(expn_span, body_span)?; - Some(CoverageSpan::new(span, expn_span, bcb, is_closure(statement))) + Some(CoverageSpan::new(span, expn_span, bcb, is_closure_or_coroutine(statement))) }); let terminator_span = Some(data.terminator()).into_iter().filter_map(move |terminator| { @@ -88,10 +88,10 @@ fn bcb_to_initial_coverage_spans<'a, 'tcx>( }) } -fn is_closure(statement: &Statement<'_>) -> bool { +fn is_closure_or_coroutine(statement: &Statement<'_>) -> bool { match statement.kind { StatementKind::Assign(box (_, Rvalue::Aggregate(box ref agg_kind, _))) => match agg_kind { - AggregateKind::Closure(_, _) | AggregateKind::Coroutine(_, _, _) => true, + AggregateKind::Closure(_, _) | AggregateKind::Coroutine(_, _) => true, _ => false, }, _ => false, @@ -204,10 +204,5 @@ fn filtered_terminator_span(terminator: &Terminator<'_>) -> Option { /// etc.). #[inline] fn unexpand_into_body_span(span: Span, body_span: Span) -> Option { - use rustc_span::source_map::original_sp; - - // FIXME(#118525): Consider switching from `original_sp` to `Span::find_ancestor_inside`, - // which is similar but gives slightly different results in some edge cases. - let original_span = original_sp(span, body_span).with_ctxt(body_span.ctxt()); - body_span.contains(original_span).then_some(original_span) + span.find_ancestor_inside_same_ctxt(body_span) } diff --git a/compiler/rustc_mir_transform/src/coverage/tests.rs b/compiler/rustc_mir_transform/src/coverage/tests.rs index 931bc8e58ffc1..d9a3c0cb162f3 100644 --- a/compiler/rustc_mir_transform/src/coverage/tests.rs +++ b/compiler/rustc_mir_transform/src/coverage/tests.rs @@ -630,8 +630,10 @@ fn test_make_bcb_counters() { // coverage spans for BCBs 1 and 2. Now we skip that step and just tell // BCB counter construction that those BCBs have spans. let bcb_has_coverage_spans = |bcb: BasicCoverageBlock| (1..=2).contains(&bcb.as_usize()); - let mut coverage_counters = counters::CoverageCounters::new(&basic_coverage_blocks); - coverage_counters.make_bcb_counters(&basic_coverage_blocks, bcb_has_coverage_spans); + let coverage_counters = counters::CoverageCounters::make_bcb_counters( + &basic_coverage_blocks, + bcb_has_coverage_spans, + ); assert_eq!(coverage_counters.num_expressions(), 0); assert_eq!( diff --git a/compiler/rustc_mir_transform/src/gvn.rs b/compiler/rustc_mir_transform/src/gvn.rs index 3b8adf7e86b71..2551c8aca8836 100644 --- a/compiler/rustc_mir_transform/src/gvn.rs +++ b/compiler/rustc_mir_transform/src/gvn.rs @@ -109,7 +109,7 @@ pub struct GVN; impl<'tcx> MirPass<'tcx> for GVN { fn is_enabled(&self, sess: &rustc_session::Session) -> bool { - sess.mir_opt_level() >= 4 + sess.mir_opt_level() >= 2 } #[instrument(level = "trace", skip(self, tcx, body))] @@ -850,7 +850,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> { assert!(!fields.is_empty()); (AggregateTy::Tuple, FIRST_VARIANT) } - AggregateKind::Closure(did, substs) | AggregateKind::Coroutine(did, substs, _) => { + AggregateKind::Closure(did, substs) | AggregateKind::Coroutine(did, substs) => { (AggregateTy::Def(did, substs), FIRST_VARIANT) } AggregateKind::Adt(did, variant_index, substs, _, None) => { diff --git a/compiler/rustc_mir_transform/src/large_enums.rs b/compiler/rustc_mir_transform/src/large_enums.rs index 1d788a55ff82f..8be96b6ba8f28 100644 --- a/compiler/rustc_mir_transform/src/large_enums.rs +++ b/compiler/rustc_mir_transform/src/large_enums.rs @@ -9,7 +9,7 @@ use rustc_target::abi::{HasDataLayout, Size, TagEncoding, Variants}; /// A pass that seeks to optimize unnecessary moves of large enum types, if there is a large /// enough discrepancy between them. /// -/// i.e. If there is are two variants: +/// i.e. If there are two variants: /// ``` /// enum Example { /// Small, diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 9d03bab48448d..5562ae7f3bdef 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -588,7 +588,6 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { // destroy the SSA property. It should still happen before const-propagation, so the // latter pass will leverage the created opportunities. &separate_const_switch::SeparateConstSwitch, - &const_prop::ConstProp, &gvn::GVN, &simplify::SimplifyLocals::AfterGVN, &dataflow_const_prop::DataflowConstProp, diff --git a/compiler/rustc_mir_transform/src/pass_manager.rs b/compiler/rustc_mir_transform/src/pass_manager.rs index 1da1c1920b249..82074f1960d25 100644 --- a/compiler/rustc_mir_transform/src/pass_manager.rs +++ b/compiler/rustc_mir_transform/src/pass_manager.rs @@ -129,12 +129,6 @@ fn run_passes_inner<'tcx>( if dump_enabled { dump_mir_for_pass(tcx, body, name, false); } - if validate { - validate_body(tcx, body, format!("before pass {name}")); - } - if lint { - lint_body(tcx, body, format!("before pass {name}")); - } if let Some(prof_arg) = &prof_arg { tcx.sess diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index fba73d5195b76..f6b820bfcd01f 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -69,7 +69,7 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> Body<' ty::InstanceDef::DropGlue(def_id, ty) => { // FIXME(#91576): Drop shims for coroutines aren't subject to the MIR passes at the end // of this function. Is this intentional? - if let Some(ty::Coroutine(coroutine_def_id, args, _)) = ty.map(Ty::kind) { + if let Some(ty::Coroutine(coroutine_def_id, args)) = ty.map(Ty::kind) { let body = tcx.optimized_mir(*coroutine_def_id).coroutine_drop().unwrap(); let mut body = EarlyBinder::bind(body.clone()).instantiate(tcx, args); debug!("make_shim({:?}) = {:?}", instance, body); @@ -394,7 +394,8 @@ fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) - _ if is_copy => builder.copy_shim(), ty::Closure(_, args) => builder.tuple_like_shim(dest, src, args.as_closure().upvar_tys()), ty::Tuple(..) => builder.tuple_like_shim(dest, src, self_ty.tuple_fields()), - ty::Coroutine(coroutine_def_id, args, hir::Movability::Movable) => { + ty::Coroutine(coroutine_def_id, args) => { + assert_eq!(tcx.coroutine_movability(*coroutine_def_id), hir::Movability::Movable); builder.coroutine_shim(dest, src, *coroutine_def_id, args.as_coroutine()) } _ => bug!("clone shim for `{:?}` which is not `Copy` and is not an aggregate", self_ty), diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs index 54464600d99a8..44beafa08736e 100644 --- a/compiler/rustc_monomorphize/src/collector.rs +++ b/compiler/rustc_monomorphize/src/collector.rs @@ -1119,7 +1119,10 @@ fn create_fn_mono_item<'tcx>( source: Span, ) -> Spanned> { let def_id = instance.def_id(); - if tcx.sess.opts.unstable_opts.profile_closures && def_id.is_local() && tcx.is_closure(def_id) { + if tcx.sess.opts.unstable_opts.profile_closures + && def_id.is_local() + && tcx.is_closure_or_coroutine(def_id) + { crate::util::dump_closure_profile(tcx, instance); } diff --git a/compiler/rustc_next_trait_solver/src/canonicalizer.rs b/compiler/rustc_next_trait_solver/src/canonicalizer.rs index ac2e8960b069b..db1aee1190359 100644 --- a/compiler/rustc_next_trait_solver/src/canonicalizer.rs +++ b/compiler/rustc_next_trait_solver/src/canonicalizer.rs @@ -333,7 +333,7 @@ impl, I: Interner> TypeFolder | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Coroutine(_, _, _) + | ty::Coroutine(_, _) | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) diff --git a/compiler/rustc_parse/Cargo.toml b/compiler/rustc_parse/Cargo.toml index 02f9f35f0f567..f562332316874 100644 --- a/compiler/rustc_parse/Cargo.toml +++ b/compiler/rustc_parse/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -bitflags = "1.0" +bitflags = "2.4.1" rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_data_structures = { path = "../rustc_data_structures" } diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl index 363b8f4bfb9cc..c11a6fab7e5d2 100644 --- a/compiler/rustc_parse/messages.ftl +++ b/compiler/rustc_parse/messages.ftl @@ -10,6 +10,8 @@ parse_ambiguous_range_pattern = the range pattern here has ambiguous interpretat parse_array_brackets_instead_of_braces = this is a block expression, not an array .suggestion = to make an array, use square brackets instead of curly braces +parse_array_index_offset_of = array indexing not supported in offset_of + parse_assignment_else_not_allowed = ... else {"{"} ... {"}"} is not allowed parse_assoc_lifetime = associated lifetimes are not supported @@ -95,9 +97,6 @@ parse_compound_assignment_expression_in_let = can't reassign to an uninitialized .suggestion = initialize the variable .help = if you meant to overwrite, remove the `let` binding -parse_const_bounds_missing_tilde = const bounds must start with `~` - .suggestion = add `~` - parse_const_generic_without_braces = expressions must be enclosed in braces to be used as const generic arguments .suggestion = enclose the `const` expression in braces @@ -408,6 +407,8 @@ parse_invalid_logical_operator = `{$incorrect}` is not a logical operator parse_invalid_meta_item = expected unsuffixed literal or identifier, found `{$token}` +parse_invalid_offset_of = offset_of expects dot-separated field and variant names + parse_invalid_unicode_escape = invalid unicode character escape .label = invalid escape .help = unicode escape must {$surrogate -> @@ -555,8 +556,8 @@ parse_missing_trait_in_trait_impl = missing trait in a trait impl .suggestion_add_trait = add a trait here .suggestion_remove_for = for an inherent impl, drop this `for` -parse_modifier_lifetime = `{$sigil}` may only modify trait bounds, not lifetime bounds - .suggestion = remove the `{$sigil}` +parse_modifier_lifetime = `{$modifier}` may only modify trait bounds, not lifetime bounds + .suggestion = remove the `{$modifier}` parse_more_than_one_char = character literal may only contain one codepoint .followed_by = this `{$chr}` is followed by the combining {$len -> @@ -729,8 +730,6 @@ parse_switch_ref_box_order = switch the order of `ref` and `box` parse_ternary_operator = Rust has no ternary operator .help = use an `if-else` expression instead -parse_tilde_const_lifetime = `~const` may only modify trait bounds, not lifetime bounds - parse_tilde_is_not_unary_operator = `~` cannot be used as a unary operator .suggestion = use `!` to perform bitwise not @@ -772,6 +771,9 @@ parse_unexpected_if_with_if = unexpected `if` in the condition expression parse_unexpected_lifetime_in_pattern = unexpected lifetime `{$symbol}` in pattern .suggestion = remove the lifetime +parse_unexpected_paren_in_range_pat = range pattern bounds cannot have parentheses +parse_unexpected_paren_in_range_pat_sugg = remove these parentheses + parse_unexpected_parentheses_in_for_head = unexpected parentheses surrounding `for` loop head .suggestion = remove parentheses in `for` loop diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index 53cce9e2883a9..936c5de2ae88a 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -2378,6 +2378,27 @@ pub(crate) struct ExpectedCommaAfterPatternField { pub span: Span, } +#[derive(Diagnostic)] +#[diag(parse_unexpected_paren_in_range_pat)] +pub(crate) struct UnexpectedParenInRangePat { + #[primary_span] + pub span: Vec, + #[subdiagnostic] + pub sugg: UnexpectedParenInRangePatSugg, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion( + parse_unexpected_paren_in_range_pat_sugg, + applicability = "machine-applicable" +)] +pub(crate) struct UnexpectedParenInRangePatSugg { + #[suggestion_part(code = "")] + pub start_span: Span, + #[suggestion_part(code = "")] + pub end_span: Span, +} + #[derive(Diagnostic)] #[diag(parse_return_types_use_thin_arrow)] pub(crate) struct ReturnTypesUseThinArrow { @@ -2555,20 +2576,13 @@ pub(crate) struct AssocLifetime { pub lifetime: Span, } -#[derive(Diagnostic)] -#[diag(parse_tilde_const_lifetime)] -pub(crate) struct TildeConstLifetime { - #[primary_span] - pub span: Span, -} - #[derive(Diagnostic)] #[diag(parse_modifier_lifetime)] pub(crate) struct ModifierLifetime { #[primary_span] #[suggestion(style = "tool-only", applicability = "maybe-incorrect", code = "")] pub span: Span, - pub sigil: &'static str, + pub modifier: &'static str, } #[derive(Diagnostic)] @@ -2581,15 +2595,6 @@ pub(crate) struct ParenthesizedLifetime { pub snippet: String, } -#[derive(Diagnostic)] -#[diag(parse_const_bounds_missing_tilde)] -pub(crate) struct ConstMissingTilde { - #[primary_span] - pub span: Span, - #[suggestion(code = "~", applicability = "machine-applicable")] - pub start: Span, -} - #[derive(Diagnostic)] #[diag(parse_underscore_literal_suffix)] pub(crate) struct UnderscoreLiteralSuffix { @@ -2903,3 +2908,11 @@ pub(crate) struct TransposeDynOrImplSugg<'a> { pub insertion_span: Span, pub kw: &'a str, } + +#[derive(Diagnostic)] +#[diag(parse_array_index_offset_of)] +pub(crate) struct ArrayIndexInOffsetOf(#[primary_span] pub Span); + +#[derive(Diagnostic)] +#[diag(parse_invalid_offset_of)] +pub(crate) struct InvalidOffsetOf(#[primary_span] pub Span); diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 226bf60c45e46..77bca2f138a9d 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -450,37 +450,39 @@ impl<'a> Parser<'a> { let mut expected = edible .iter() - .map(|x| TokenType::Token(x.clone())) - .chain(inedible.iter().map(|x| TokenType::Token(x.clone()))) + .chain(inedible) + .cloned() + .map(TokenType::Token) .chain(self.expected_tokens.iter().cloned()) - .filter_map(|token| { - // filter out suggestions which suggest the same token which was found and deemed incorrect + .filter(|token| { + // Filter out suggestions that suggest the same token which was found and deemed incorrect. fn is_ident_eq_keyword(found: &TokenKind, expected: &TokenType) -> bool { - if let TokenKind::Ident(current_sym, _) = found { - if let TokenType::Keyword(suggested_sym) = expected { - return current_sym == suggested_sym; - } + if let TokenKind::Ident(current_sym, _) = found + && let TokenType::Keyword(suggested_sym) = expected + { + return current_sym == suggested_sym; } false } - if token != parser::TokenType::Token(self.token.kind.clone()) { + + if *token != parser::TokenType::Token(self.token.kind.clone()) { let eq = is_ident_eq_keyword(&self.token.kind, &token); - // if the suggestion is a keyword and the found token is an ident, + // If the suggestion is a keyword and the found token is an ident, // the content of which are equal to the suggestion's content, - // we can remove that suggestion (see the return None statement below) + // we can remove that suggestion (see the `return false` below). - // if this isn't the case however, and the suggestion is a token the - // content of which is the same as the found token's, we remove it as well + // If this isn't the case however, and the suggestion is a token the + // content of which is the same as the found token's, we remove it as well. if !eq { if let TokenType::Token(kind) = &token { if kind == &self.token.kind { - return None; + return false; } } - return Some(token); + return true; } } - return None; + false }) .collect::>(); expected.sort_by_cached_key(|x| x.to_string()); @@ -488,10 +490,10 @@ impl<'a> Parser<'a> { let sm = self.sess.source_map(); - // Special-case "expected `;`" errors + // Special-case "expected `;`" errors. if expected.contains(&TokenType::Token(token::Semi)) { // If the user is trying to write a ternary expression, recover it and - // return an Err to prevent a cascade of irrelevant diagnostics + // return an Err to prevent a cascade of irrelevant diagnostics. if self.prev_token == token::Question && let Err(e) = self.maybe_recover_from_ternary_operator() { @@ -598,6 +600,26 @@ impl<'a> Parser<'a> { // FIXME: translation requires list formatting (for `expect`) let mut err = self.dcx().struct_span_err(self.token.span, msg_exp); + // Look for usages of '=>' where '>=' was probably intended + if self.token == token::FatArrow + && expected + .iter() + .any(|tok| matches!(tok, TokenType::Operator | TokenType::Token(TokenKind::Le))) + && !expected.iter().any(|tok| { + matches!( + tok, + TokenType::Token(TokenKind::FatArrow) | TokenType::Token(TokenKind::Comma) + ) + }) + { + err.span_suggestion( + self.token.span, + "you might have meant to write a \"greater than or equal to\" comparison", + ">=", + Applicability::MaybeIncorrect, + ); + } + if let TokenKind::Ident(symbol, _) = &self.prev_token.kind { if ["def", "fun", "func", "function"].contains(&symbol.as_str()) { err.span_suggestion_short( diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index b76d67cf71554..0b24e78412635 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -1023,7 +1023,7 @@ impl<'a> Parser<'a> { // we should break everything including floats into more basic proc-macro style // tokens in the lexer (probably preferable). // See also `TokenKind::break_two_token_op` which does similar splitting of `>>` into `>`. - fn break_up_float(&mut self, float: Symbol) -> DestructuredFloat { + fn break_up_float(&self, float: Symbol, span: Span) -> DestructuredFloat { #[derive(Debug)] enum FloatComponent { IdentLike(String), @@ -1053,7 +1053,6 @@ impl<'a> Parser<'a> { // With proc macros the span can refer to anything, the source may be too short, // or too long, or non-ASCII. It only makes sense to break our span into components // if its underlying text is identical to our float literal. - let span = self.token.span; let can_take_span_apart = || self.span_to_snippet(span).as_deref() == Ok(float_str).as_deref(); @@ -1115,7 +1114,7 @@ impl<'a> Parser<'a> { float: Symbol, suffix: Option, ) -> P { - match self.break_up_float(float) { + match self.break_up_float(float, self.token.span) { // 1e2 DestructuredFloat::Single(sym, _sp) => { self.parse_expr_tuple_field_access(lo, base, sym, suffix, None) @@ -1143,40 +1142,105 @@ impl<'a> Parser<'a> { } } - fn parse_field_name_maybe_tuple(&mut self) -> PResult<'a, ThinVec> { - let token::Literal(token::Lit { kind: token::Float, symbol, suffix }) = self.token.kind - else { - return Ok(thin_vec![self.parse_field_name()?]); - }; - Ok(match self.break_up_float(symbol) { - // 1e2 - DestructuredFloat::Single(sym, sp) => { - self.bump(); - thin_vec![Ident::new(sym, sp)] - } - // 1. - DestructuredFloat::TrailingDot(sym, sym_span, dot_span) => { - assert!(suffix.is_none()); - // Analogous to `Self::break_and_eat` - self.break_last_token = true; - // This might work, in cases like `1. 2`, and might not, - // in cases like `offset_of!(Ty, 1.)`. It depends on what comes - // after the float-like token, and therefore we have to make - // the other parts of the parser think that there is a dot literal. - self.token = Token::new(token::Ident(sym, false), sym_span); - self.bump_with((Token::new(token::Dot, dot_span), self.token_spacing)); - thin_vec![Ident::new(sym, sym_span)] - } - // 1.2 | 1.2e3 - DestructuredFloat::MiddleDot(symbol1, ident1_span, _dot_span, symbol2, ident2_span) => { - self.bump(); - thin_vec![Ident::new(symbol1, ident1_span), Ident::new(symbol2, ident2_span)] + /// Parse the field access used in offset_of, matched by `$(e:expr)+`. + /// Currently returns a list of idents. However, it should be possible in + /// future to also do array indices, which might be arbitrary expressions. + fn parse_floating_field_access(&mut self) -> PResult<'a, P<[Ident]>> { + let mut fields = Vec::new(); + let mut trailing_dot = None; + + loop { + // This is expected to use a metavariable $(args:expr)+, but the builtin syntax + // could be called directly. Calling `parse_expr` allows this function to only + // consider `Expr`s. + let expr = self.parse_expr()?; + let mut current = &expr; + let start_idx = fields.len(); + loop { + match current.kind { + ExprKind::Field(ref left, right) => { + // Field access is read right-to-left. + fields.insert(start_idx, right); + trailing_dot = None; + current = left; + } + // Parse this both to give helpful error messages and to + // verify it can be done with this parser setup. + ExprKind::Index(ref left, ref _right, span) => { + self.dcx().emit_err(errors::ArrayIndexInOffsetOf(span)); + current = left; + } + ExprKind::Lit(token::Lit { + kind: token::Float | token::Integer, + symbol, + suffix, + }) => { + if let Some(suffix) = suffix { + self.expect_no_tuple_index_suffix(current.span, suffix); + } + match self.break_up_float(symbol, current.span) { + // 1e2 + DestructuredFloat::Single(sym, sp) => { + trailing_dot = None; + fields.insert(start_idx, Ident::new(sym, sp)); + } + // 1. + DestructuredFloat::TrailingDot(sym, sym_span, dot_span) => { + assert!(suffix.is_none()); + trailing_dot = Some(dot_span); + fields.insert(start_idx, Ident::new(sym, sym_span)); + } + // 1.2 | 1.2e3 + DestructuredFloat::MiddleDot( + symbol1, + span1, + _dot_span, + symbol2, + span2, + ) => { + trailing_dot = None; + fields.insert(start_idx, Ident::new(symbol2, span2)); + fields.insert(start_idx, Ident::new(symbol1, span1)); + } + DestructuredFloat::Error => { + trailing_dot = None; + fields.insert(start_idx, Ident::new(symbol, self.prev_token.span)); + } + } + break; + } + ExprKind::Path(None, Path { ref segments, .. }) => { + match &segments[..] { + [PathSegment { ident, args: None, .. }] => { + trailing_dot = None; + fields.insert(start_idx, *ident) + } + _ => { + self.dcx().emit_err(errors::InvalidOffsetOf(current.span)); + break; + } + } + break; + } + _ => { + self.dcx().emit_err(errors::InvalidOffsetOf(current.span)); + break; + } + } } - DestructuredFloat::Error => { - self.bump(); - thin_vec![Ident::new(symbol, self.prev_token.span)] + + if matches!(self.token.kind, token::CloseDelim(..) | token::Comma) { + break; + } else if trailing_dot.is_none() { + // This loop should only repeat if there is a trailing dot. + self.dcx().emit_err(errors::InvalidOffsetOf(self.token.span)); + break; } - }) + } + if let Some(dot) = trailing_dot { + self.dcx().emit_err(errors::InvalidOffsetOf(dot)); + } + Ok(fields.into_iter().collect()) } fn parse_expr_tuple_field_access( @@ -1907,15 +1971,29 @@ impl<'a> Parser<'a> { let container = self.parse_ty()?; self.expect(&TokenKind::Comma)?; - let seq_sep = SeqSep { sep: Some(token::Dot), trailing_sep_allowed: false }; - let (fields, _trailing, _recovered) = self.parse_seq_to_before_end( - &TokenKind::CloseDelim(Delimiter::Parenthesis), - seq_sep, - Parser::parse_field_name_maybe_tuple, - )?; - let fields = fields.into_iter().flatten().collect::>(); + let fields = self.parse_floating_field_access()?; + let trailing_comma = self.eat_noexpect(&TokenKind::Comma); + + if let Err(mut e) = + self.expect_one_of(&[], &[TokenKind::CloseDelim(Delimiter::Parenthesis)]) + { + if trailing_comma { + e.note("unexpected third argument to offset_of"); + } else { + e.note("offset_of expects dot-separated field and variant names"); + } + e.emit(); + } + + // Eat tokens until the macro call ends. + if self.may_recover() { + while !matches!(self.token.kind, token::CloseDelim(..) | token::Eof) { + self.bump(); + } + } + let span = lo.to(self.token.span); - Ok(self.mk_expr(span, ExprKind::OffsetOf(container, fields.into()))) + Ok(self.mk_expr(span, ExprKind::OffsetOf(container, fields))) } /// Returns a string literal if the next token is a string literal. @@ -2445,6 +2523,7 @@ impl<'a> Parser<'a> { } } else { let attrs = self.parse_outer_attributes()?; // For recovery. + let maybe_fatarrow = self.token.clone(); let block = if self.check(&token::OpenDelim(Delimiter::Brace)) { self.parse_block()? } else { @@ -2469,6 +2548,15 @@ impl<'a> Parser<'a> { "you likely meant to continue parsing the let-chain starting here", ); } else { + // Look for usages of '=>' where '>=' might be intended + if maybe_fatarrow.kind == token::FatArrow { + err.span_suggestion( + maybe_fatarrow.span, + "you might have meant to write a \"greater than or equal to\" comparison", + ">=", + Applicability::MaybeIncorrect, + ); + } err.span_note( cond_span, "the `if` expression is missing a block after this condition", @@ -2927,7 +3015,13 @@ impl<'a> Parser<'a> { let is_almost_fat_arrow = TokenKind::FatArrow .similar_tokens() .is_some_and(|similar_tokens| similar_tokens.contains(&this.token.kind)); - let mut result = if !is_fat_arrow && !is_almost_fat_arrow { + + // this avoids the compiler saying that a `,` or `}` was expected even though + // the pattern isn't a never pattern (and thus an arm body is required) + let armless = (!is_fat_arrow && !is_almost_fat_arrow && pat.could_be_never_pattern()) + || matches!(this.token.kind, token::Comma | token::CloseDelim(Delimiter::Brace)); + + let mut result = if armless { // A pattern without a body, allowed for never patterns. arm_body = None; this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)]).map( diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 1598fd19f6d3f..3932d32a6e1a4 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -46,6 +46,7 @@ use crate::errors::{ }; bitflags::bitflags! { + #[derive(Clone, Copy)] struct Restrictions: u8 { const STMT_EXPR = 1 << 0; const NO_STRUCT_LITERAL = 1 << 1; @@ -320,9 +321,15 @@ impl TokenType { } } +/// Used by [`Parser::expect_any_with_type`]. #[derive(Copy, Clone, Debug)] enum TokenExpectType { + /// Unencountered tokens are inserted into [`Parser::expected_tokens`]. + /// See [`Parser::check`]. Expect, + + /// Unencountered tokens are not inserted into [`Parser::expected_tokens`]. + /// See [`Parser::check_noexpect`]. NoExpect, } @@ -504,18 +511,10 @@ impl<'a> Parser<'a> { } fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> { - let result = self.token.ident().ok_or_else(|| self.expected_ident_found(recover)); - - let (ident, is_raw) = match result { - Ok(ident) => ident, - Err(err) => match err { - // we recovered! - Ok(ident) => ident, - Err(err) => return Err(err), - }, - }; - - Ok((ident, is_raw)) + match self.token.ident() { + Some(ident) => Ok(ident), + None => self.expected_ident_found(recover), + } } /// Checks if the next token is `tok`, and returns `true` if so. @@ -766,13 +765,17 @@ impl<'a> Parser<'a> { } } + /// Checks if the next token is contained within `kets`, and returns `true` if so. fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool { kets.iter().any(|k| match expect { TokenExpectType::Expect => self.check(k), - TokenExpectType::NoExpect => self.token == **k, + TokenExpectType::NoExpect => self.check_noexpect(k), }) } + /// Parses a sequence until the specified delimiters. The function + /// `f` must consume tokens until reaching the next separator or + /// closing bracket. fn parse_seq_to_before_tokens( &mut self, kets: &[&TokenKind], @@ -791,13 +794,15 @@ impl<'a> Parser<'a> { } if let Some(t) = &sep.sep { if first { + // no separator for the first element first = false; } else { + // check for separator match self.expect(t) { - Ok(false) => { + Ok(false) /* not recovered */ => { self.current_closure.take(); } - Ok(true) => { + Ok(true) /* recovered */ => { self.current_closure.take(); recovered = true; break; @@ -965,7 +970,7 @@ impl<'a> Parser<'a> { Ok(()) } - /// Parses a sequence, not including the closing delimiter. The function + /// Parses a sequence, not including the delimiters. The function /// `f` must consume tokens until reaching the next separator or /// closing bracket. fn parse_seq_to_before_end( @@ -973,11 +978,11 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool, bool)> { + ) -> PResult<'a, (ThinVec, bool /* trailing */, bool /* recovered */)> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) } - /// Parses a sequence, including the closing delimiter. The function + /// Parses a sequence, including only the closing delimiter. The function /// `f` must consume tokens until reaching the next separator or /// closing bracket. fn parse_seq_to_end( @@ -993,7 +998,7 @@ impl<'a> Parser<'a> { Ok((val, trailing)) } - /// Parses a sequence, including the closing delimiter. The function + /// Parses a sequence, including both delimiters. The function /// `f` must consume tokens until reaching the next separator or /// closing bracket. fn parse_unspanned_seq( @@ -1002,16 +1007,19 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool)> { + ) -> PResult<'a, (ThinVec, bool /* trailing */)> { self.expect(bra)?; self.parse_seq_to_end(ket, sep, f) } + /// Parses a comma-separated sequence, including both delimiters. + /// The function `f` must consume tokens until reaching the next separator or + /// closing bracket. fn parse_delim_comma_seq( &mut self, delim: Delimiter, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool)> { + ) -> PResult<'a, (ThinVec, bool /* trailing */)> { self.parse_unspanned_seq( &token::OpenDelim(delim), &token::CloseDelim(delim), @@ -1020,10 +1028,13 @@ impl<'a> Parser<'a> { ) } + /// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`). + /// The function `f` must consume tokens until reaching the next separator or + /// closing bracket. fn parse_paren_comma_seq( &mut self, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool)> { + ) -> PResult<'a, (ThinVec, bool /* trailing */)> { self.parse_delim_comma_seq(Delimiter::Parenthesis, f) } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index afbc253757816..7d17b1d4c4d67 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -6,7 +6,8 @@ use crate::errors::{ InclusiveRangeExtraEquals, InclusiveRangeMatchArrow, InclusiveRangeNoEnd, InvalidMutInPattern, PatternOnWrongSideOfAt, RefMutOrderIncorrect, RemoveLet, RepeatedMutInPattern, SwitchRefBoxOrder, TopLevelOrPatternNotAllowed, TopLevelOrPatternNotAllowedSugg, - TrailingVertNotAllowed, UnexpectedLifetimeInPattern, UnexpectedVertVertBeforeFunctionParam, + TrailingVertNotAllowed, UnexpectedLifetimeInPattern, UnexpectedParenInRangePat, + UnexpectedParenInRangePatSugg, UnexpectedVertVertBeforeFunctionParam, UnexpectedVertVertInPattern, }; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; @@ -579,6 +580,8 @@ impl<'a> Parser<'a> { /// Parse a tuple or parenthesis pattern. fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> { + let open_paren = self.token.span; + let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| { p.parse_pat_allow_top_alt( None, @@ -591,7 +594,29 @@ impl<'a> Parser<'a> { // Here, `(pat,)` is a tuple pattern. // For backward compatibility, `(..)` is a tuple pattern as well. Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) { - PatKind::Paren(fields.into_iter().next().unwrap()) + let pat = fields.into_iter().next().unwrap(); + let close_paren = self.prev_token.span; + + match &pat.kind { + // recover ranges with parentheses around the `(start)..` + PatKind::Lit(begin) + if self.may_recover() + && let Some(form) = self.parse_range_end() => + { + self.dcx().emit_err(UnexpectedParenInRangePat { + span: vec![open_paren, close_paren], + sugg: UnexpectedParenInRangePatSugg { + start_span: open_paren, + end_span: close_paren, + }, + }); + + self.parse_pat_range_begin_with(begin.clone(), form)? + } + + // (pat) with optional parentheses + _ => PatKind::Paren(pat), + } } else { PatKind::Tuple(fields) }) @@ -794,11 +819,21 @@ impl<'a> Parser<'a> { || t.can_begin_literal_maybe_minus() // e.g. `42`. || t.is_whole_expr() || t.is_lifetime() // recover `'a` instead of `'a'` + || (self.may_recover() // recover leading `(` + && t.kind == token::OpenDelim(Delimiter::Parenthesis) + && self.look_ahead(dist + 1, |t| t.kind != token::OpenDelim(Delimiter::Parenthesis)) + && self.is_pat_range_end_start(dist + 1)) }) } + /// Parse a range pattern end bound fn parse_pat_range_end(&mut self) -> PResult<'a, P> { - if self.check_inline_const(0) { + // recover leading `(` + let open_paren = (self.may_recover() + && self.eat_noexpect(&token::OpenDelim(Delimiter::Parenthesis))) + .then_some(self.prev_token.span); + + let bound = if self.check_inline_const(0) { self.parse_const_block(self.token.span, true) } else if self.check_path() { let lo = self.token.span; @@ -814,7 +849,22 @@ impl<'a> Parser<'a> { Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path))) } else { self.parse_literal_maybe_minus() + }?; + + // recover trailing `)` + if let Some(open_paren) = open_paren { + self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; + + self.dcx().emit_err(UnexpectedParenInRangePat { + span: vec![open_paren, self.prev_token.span], + sugg: UnexpectedParenInRangePatSugg { + start_span: open_paren, + end_span: self.prev_token.span, + }, + }); } + + Ok(bound) } /// Is this the start of a pattern beginning with a path? diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 42ab23d62923a..4be2c662d0351 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -86,6 +86,18 @@ fn can_continue_type_after_non_fn_ident(t: &Token) -> bool { t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) } +fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool { + // `Not`, `Tilde` & `Const` are deliberately not part of this list to + // contain the number of potential regressions esp. in MBE code. + // `Const` would regress `rfc-2632-const-trait-impl/mbe-dyn-const-2015.rs`. + // `Not` would regress `dyn!(...)` macro calls in Rust 2015. + t.is_path_start() + || t.is_lifetime() + || t == &TokenKind::Question + || t.is_keyword(kw::For) + || t == &TokenKind::OpenDelim(Delimiter::Parenthesis) +} + impl<'a> Parser<'a> { /// Parses a type. pub fn parse_ty(&mut self) -> PResult<'a, P> { @@ -665,7 +677,8 @@ impl<'a> Parser<'a> { self.check_keyword(kw::Dyn) && (self.token.uninterpolated_span().at_least_rust_2018() || self.look_ahead(1, |t| { - (t.can_begin_bound() || t.kind == TokenKind::BinOp(token::Star)) + (can_begin_dyn_bound_in_edition_2015(t) + || t.kind == TokenKind::BinOp(token::Star)) && !can_continue_type_after_non_fn_ident(t) })) } @@ -758,12 +771,12 @@ impl<'a> Parser<'a> { /// Can the current token begin a bound? fn can_begin_bound(&mut self) -> bool { - // This needs to be synchronized with `TokenKind::can_begin_bound`. self.check_path() || self.check_lifetime() || self.check(&token::Not) || self.check(&token::Question) || self.check(&token::Tilde) + || self.check_keyword(kw::Const) || self.check_keyword(kw::For) || self.check(&token::OpenDelim(Delimiter::Parenthesis)) } @@ -812,8 +825,11 @@ impl<'a> Parser<'a> { fn error_lt_bound_with_modifiers(&self, modifiers: TraitBoundModifiers) { match modifiers.constness { BoundConstness::Never => {} - BoundConstness::Maybe(span) => { - self.dcx().emit_err(errors::TildeConstLifetime { span }); + BoundConstness::Always(span) | BoundConstness::Maybe(span) => { + self.dcx().emit_err(errors::ModifierLifetime { + span, + modifier: modifiers.constness.as_str(), + }); } } @@ -822,7 +838,7 @@ impl<'a> Parser<'a> { BoundPolarity::Negative(span) | BoundPolarity::Maybe(span) => { self.dcx().emit_err(errors::ModifierLifetime { span, - sigil: modifiers.polarity.as_str(), + modifier: modifiers.polarity.as_str(), }); } } @@ -848,7 +864,7 @@ impl<'a> Parser<'a> { /// If no modifiers are present, this does not consume any tokens. /// /// ```ebnf - /// TRAIT_BOUND_MODIFIERS = ["~const"] ["?" | "!"] + /// TRAIT_BOUND_MODIFIERS = [["~"] "const"] ["?" | "!"] /// ``` fn parse_trait_bound_modifiers(&mut self) -> PResult<'a, TraitBoundModifiers> { let constness = if self.eat(&token::Tilde) { @@ -858,11 +874,8 @@ impl<'a> Parser<'a> { self.sess.gated_spans.gate(sym::const_trait_impl, span); BoundConstness::Maybe(span) } else if self.eat_keyword(kw::Const) { - let span = self.prev_token.span; - self.sess.gated_spans.gate(sym::const_trait_impl, span); - self.dcx().emit_err(errors::ConstMissingTilde { span, start: span.shrink_to_lo() }); - - BoundConstness::Maybe(span) + self.sess.gated_spans.gate(sym::const_trait_impl, self.prev_token.span); + BoundConstness::Always(self.prev_token.span) } else { BoundConstness::Never }; diff --git a/compiler/rustc_passes/src/dead.rs b/compiler/rustc_passes/src/dead.rs index d270794978b27..22aac1e775e6f 100644 --- a/compiler/rustc_passes/src/dead.rs +++ b/compiler/rustc_passes/src/dead.rs @@ -4,7 +4,6 @@ // is dead. use hir::def_id::{LocalDefIdMap, LocalDefIdSet}; -use itertools::Itertools; use rustc_data_structures::unord::UnordSet; use rustc_errors::MultiSpan; use rustc_hir as hir; @@ -16,7 +15,8 @@ use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::middle::privacy::Level; use rustc_middle::query::Providers; use rustc_middle::ty::{self, TyCtxt}; -use rustc_session::lint; +use rustc_session::lint::builtin::{DEAD_CODE, UNUSED_TUPLE_STRUCT_FIELDS}; +use rustc_session::lint::{self, Lint, LintId}; use rustc_span::symbol::{sym, Symbol}; use rustc_target::abi::FieldIdx; use std::mem; @@ -762,7 +762,7 @@ struct DeadVisitor<'tcx> { } enum ShouldWarnAboutField { - Yes(bool), // positional? + Yes, No, } @@ -784,7 +784,12 @@ impl<'tcx> DeadVisitor<'tcx> { { return ShouldWarnAboutField::No; } - ShouldWarnAboutField::Yes(is_positional) + ShouldWarnAboutField::Yes + } + + fn def_lint_level(&self, lint: &'static Lint, id: LocalDefId) -> lint::Level { + let hir_id = self.tcx.local_def_id_to_hir_id(id); + self.tcx.lint_level_at_node(lint, hir_id).0 } // # Panics @@ -795,38 +800,33 @@ impl<'tcx> DeadVisitor<'tcx> { // since those methods group by lint level before calling this method. fn lint_at_single_level( &self, - dead_codes: &[LocalDefId], + dead_codes: &[&DeadItem], participle: &str, parent_item: Option, - is_positional: bool, + lint: &'static Lint, ) { - let Some(&first_id) = dead_codes.first() else { + let Some(&first_item) = dead_codes.first() else { return; }; let tcx = self.tcx; - let first_hir_id = tcx.local_def_id_to_hir_id(first_id); - let first_lint_level = tcx.lint_level_at_node(lint::builtin::DEAD_CODE, first_hir_id).0; - assert!(dead_codes.iter().skip(1).all(|id| { - let hir_id = tcx.local_def_id_to_hir_id(*id); - let level = tcx.lint_level_at_node(lint::builtin::DEAD_CODE, hir_id).0; - level == first_lint_level - })); + let first_lint_level = first_item.level; + assert!(dead_codes.iter().skip(1).all(|item| item.level == first_lint_level)); - let names: Vec<_> = - dead_codes.iter().map(|&def_id| tcx.item_name(def_id.to_def_id())).collect(); + let names: Vec<_> = dead_codes.iter().map(|item| item.name).collect(); let spans: Vec<_> = dead_codes .iter() - .map(|&def_id| match tcx.def_ident_span(def_id) { - Some(s) => s.with_ctxt(tcx.def_span(def_id).ctxt()), - None => tcx.def_span(def_id), + .map(|item| match tcx.def_ident_span(item.def_id) { + Some(s) => s.with_ctxt(tcx.def_span(item.def_id).ctxt()), + None => tcx.def_span(item.def_id), }) .collect(); - let descr = tcx.def_descr(first_id.to_def_id()); + let descr = tcx.def_descr(first_item.def_id.to_def_id()); // `impl` blocks are "batched" and (unlike other batching) might // contain different kinds of associated items. - let descr = if dead_codes.iter().any(|did| tcx.def_descr(did.to_def_id()) != descr) { + let descr = if dead_codes.iter().any(|item| tcx.def_descr(item.def_id.to_def_id()) != descr) + { "associated item" } else { descr @@ -835,12 +835,6 @@ impl<'tcx> DeadVisitor<'tcx> { let multiple = num > 6; let name_list = names.into(); - let lint = if is_positional { - lint::builtin::UNUSED_TUPLE_STRUCT_FIELDS - } else { - lint::builtin::DEAD_CODE - }; - let parent_info = if let Some(parent_item) = parent_item { let parent_descr = tcx.def_descr(parent_item.to_def_id()); let span = if let DefKind::Impl { .. } = tcx.def_kind(parent_item) { @@ -853,7 +847,7 @@ impl<'tcx> DeadVisitor<'tcx> { None }; - let encl_def_id = parent_item.unwrap_or(first_id); + let encl_def_id = parent_item.unwrap_or(first_item.def_id); let ignored_derived_impls = if let Some(ign_traits) = self.ignored_derived_traits.get(&encl_def_id) { let trait_list = ign_traits @@ -870,7 +864,7 @@ impl<'tcx> DeadVisitor<'tcx> { None }; - let diag = if is_positional { + let diag = if LintId::of(lint) == LintId::of(UNUSED_TUPLE_STRUCT_FIELDS) { MultipleDeadCodes::UnusedTupleStructFields { multiple, num, @@ -893,7 +887,8 @@ impl<'tcx> DeadVisitor<'tcx> { } }; - self.tcx.emit_spanned_lint(lint, first_hir_id, MultiSpan::from_spans(spans), diag); + let hir_id = tcx.local_def_id_to_hir_id(first_item.def_id); + self.tcx.emit_spanned_lint(lint, hir_id, MultiSpan::from_spans(spans), diag); } fn warn_multiple( @@ -901,7 +896,7 @@ impl<'tcx> DeadVisitor<'tcx> { def_id: LocalDefId, participle: &str, dead_codes: Vec, - is_positional: bool, + lint: &'static Lint, ) { let mut dead_codes = dead_codes .iter() @@ -911,18 +906,18 @@ impl<'tcx> DeadVisitor<'tcx> { return; } dead_codes.sort_by_key(|v| v.level); - for (_, group) in &dead_codes.into_iter().group_by(|v| v.level) { - self.lint_at_single_level( - &group.map(|v| v.def_id).collect::>(), - participle, - Some(def_id), - is_positional, - ); + for group in dead_codes[..].group_by(|a, b| a.level == b.level) { + self.lint_at_single_level(&group, participle, Some(def_id), lint); } } fn warn_dead_code(&mut self, id: LocalDefId, participle: &str) { - self.lint_at_single_level(&[id], participle, None, false); + let item = DeadItem { + def_id: id, + name: self.tcx.item_name(id.to_def_id()), + level: self.def_lint_level(DEAD_CODE, id), + }; + self.lint_at_single_level(&[&item], participle, None, DEAD_CODE); } fn check_definition(&mut self, def_id: LocalDefId) { @@ -969,13 +964,12 @@ fn check_mod_deathness(tcx: TyCtxt<'_>, module: LocalModDefId) { let def_id = item.id.owner_id.def_id; if !visitor.is_live_code(def_id) { let name = tcx.item_name(def_id.to_def_id()); - let hir_id = tcx.local_def_id_to_hir_id(def_id); - let level = tcx.lint_level_at_node(lint::builtin::DEAD_CODE, hir_id).0; + let level = visitor.def_lint_level(DEAD_CODE, def_id); dead_items.push(DeadItem { def_id, name, level }) } } - visitor.warn_multiple(item.owner_id.def_id, "used", dead_items, false); + visitor.warn_multiple(item.owner_id.def_id, "used", dead_items, DEAD_CODE); } if !live_symbols.contains(&item.owner_id.def_id) { @@ -997,43 +991,32 @@ fn check_mod_deathness(tcx: TyCtxt<'_>, module: LocalModDefId) { let def_id = variant.def_id.expect_local(); if !live_symbols.contains(&def_id) { // Record to group diagnostics. - let hir_id = tcx.local_def_id_to_hir_id(def_id); - let level = tcx.lint_level_at_node(lint::builtin::DEAD_CODE, hir_id).0; + let level = visitor.def_lint_level(DEAD_CODE, def_id); dead_variants.push(DeadItem { def_id, name: variant.name, level }); continue; } - let mut is_positional = false; + let is_positional = variant.fields.raw.first().map_or(false, |field| { + field.name.as_str().starts_with(|c: char| c.is_ascii_digit()) + }); + let lint = if is_positional { UNUSED_TUPLE_STRUCT_FIELDS } else { DEAD_CODE }; let dead_fields = variant .fields .iter() .filter_map(|field| { let def_id = field.did.expect_local(); - let hir_id = tcx.local_def_id_to_hir_id(def_id); - if let ShouldWarnAboutField::Yes(is_pos) = - visitor.should_warn_about_field(field) - { - let level = tcx - .lint_level_at_node( - if is_pos { - is_positional = true; - lint::builtin::UNUSED_TUPLE_STRUCT_FIELDS - } else { - lint::builtin::DEAD_CODE - }, - hir_id, - ) - .0; + if let ShouldWarnAboutField::Yes = visitor.should_warn_about_field(field) { + let level = visitor.def_lint_level(lint, def_id); Some(DeadItem { def_id, name: field.name, level }) } else { None } }) .collect(); - visitor.warn_multiple(def_id, "read", dead_fields, is_positional); + visitor.warn_multiple(def_id, "read", dead_fields, lint); } - visitor.warn_multiple(item.owner_id.def_id, "constructed", dead_variants, false); + visitor.warn_multiple(item.owner_id.def_id, "constructed", dead_variants, DEAD_CODE); } } diff --git a/compiler/rustc_passes/src/lib.rs b/compiler/rustc_passes/src/lib.rs index c969867e871a6..bb33a4feb0556 100644 --- a/compiler/rustc_passes/src/lib.rs +++ b/compiler/rustc_passes/src/lib.rs @@ -12,6 +12,7 @@ #![feature(let_chains)] #![feature(map_try_insert)] #![feature(min_specialization)] +#![feature(slice_group_by)] #![feature(try_blocks)] #![recursion_limit = "256"] #![deny(rustc::untranslatable_diagnostic)] diff --git a/compiler/rustc_passes/src/liveness.rs b/compiler/rustc_passes/src/liveness.rs index a8ba9838780a2..cfe829f170f7e 100644 --- a/compiler/rustc_passes/src/liveness.rs +++ b/compiler/rustc_passes/src/liveness.rs @@ -101,7 +101,6 @@ use rustc_span::symbol::{kw, sym, Symbol}; use rustc_span::DUMMY_SP; use rustc_span::{BytePos, Span}; -use std::collections::VecDeque; use std::io; use std::io::prelude::*; use std::rc::Rc; @@ -317,35 +316,13 @@ impl<'tcx> IrMaps<'tcx> { // For struct patterns, take note of which fields used shorthand // (`x` rather than `x: x`). let mut shorthand_field_ids = HirIdSet::default(); - let mut pats = VecDeque::new(); - pats.push_back(pat); - - while let Some(pat) = pats.pop_front() { - use rustc_hir::PatKind::*; - match &pat.kind { - Binding(.., inner_pat) => { - pats.extend(inner_pat.iter()); - } - Struct(_, fields, _) => { - let (short, not_short): (Vec>, _) = - fields.iter().partition(|f| f.is_shorthand); - shorthand_field_ids.extend(short.iter().map(|f| f.pat.hir_id)); - pats.extend(not_short.iter().map(|f| f.pat)); - } - Ref(inner_pat, _) | Box(inner_pat) => { - pats.push_back(inner_pat); - } - TupleStruct(_, inner_pats, _) | Tuple(inner_pats, _) | Or(inner_pats) => { - pats.extend(inner_pats.iter()); - } - Slice(pre_pats, inner_pat, post_pats) => { - pats.extend(pre_pats.iter()); - pats.extend(inner_pat.iter()); - pats.extend(post_pats.iter()); - } - _ => {} + + pat.walk_always(|pat| { + if let hir::PatKind::Struct(_, fields, _) = pat.kind { + let short = fields.iter().filter(|f| f.is_shorthand); + shorthand_field_ids.extend(short.map(|f| f.pat.hir_id)); } - } + }); shorthand_field_ids } @@ -405,7 +382,6 @@ impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> { if let Res::Local(_var_hir_id) = path.res { self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id)); } - intravisit::walk_expr(self, expr); } hir::ExprKind::Closure(closure) => { // Interesting control flow (for loops can contain labeled @@ -425,12 +401,10 @@ impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> { })); } self.set_captures(expr.hir_id, call_caps); - intravisit::walk_expr(self, expr); } hir::ExprKind::Let(let_expr) => { self.add_from_pat(let_expr.pat); - intravisit::walk_expr(self, expr); } // live nodes required for interesting control flow: @@ -439,11 +413,9 @@ impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> { | hir::ExprKind::Loop(..) | hir::ExprKind::Yield(..) => { self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id)); - intravisit::walk_expr(self, expr); } hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => { self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id)); - intravisit::walk_expr(self, expr); } // otherwise, live nodes are not required: @@ -474,10 +446,9 @@ impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> { | hir::ExprKind::Type(..) | hir::ExprKind::Err(_) | hir::ExprKind::Path(hir::QPath::TypeRelative(..)) - | hir::ExprKind::Path(hir::QPath::LangItem(..)) => { - intravisit::walk_expr(self, expr); - } + | hir::ExprKind::Path(hir::QPath::LangItem(..)) => {} } + intravisit::walk_expr(self, expr); } } @@ -1357,6 +1328,9 @@ impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> { fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) { self.check_unused_vars_in_pat(arm.pat, None, None, |_, _, _, _| {}); + if let Some(hir::Guard::IfLet(let_expr)) = arm.guard { + self.check_unused_vars_in_pat(let_expr.pat, None, None, |_, _, _, _| {}); + } intravisit::walk_arm(self, arm); } } diff --git a/compiler/rustc_passes/src/loops.rs b/compiler/rustc_passes/src/loops.rs index 24db708196b85..0f8cc583b03cf 100644 --- a/compiler/rustc_passes/src/loops.rs +++ b/compiler/rustc_passes/src/loops.rs @@ -3,7 +3,7 @@ use Context::*; use rustc_hir as hir; use rustc_hir::def_id::{LocalDefId, LocalModDefId}; use rustc_hir::intravisit::{self, Visitor}; -use rustc_hir::{Destination, Movability, Node}; +use rustc_hir::{Destination, Node}; use rustc_middle::hir::nested_filter; use rustc_middle::query::Providers; use rustc_middle::ty::TyCtxt; @@ -86,16 +86,15 @@ impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> { self.with_context(Loop(source), |v| v.visit_block(b)); } hir::ExprKind::Closure(&hir::Closure { - ref fn_decl, - body, - fn_decl_span, - movability, - .. + ref fn_decl, body, fn_decl_span, kind, .. }) => { - let cx = if let Some(Movability::Static) = movability { - AsyncClosure(fn_decl_span) - } else { - Closure(fn_decl_span) + // FIXME(coroutines): This doesn't handle coroutines correctly + let cx = match kind { + hir::ClosureKind::Coroutine(hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Async, + hir::CoroutineSource::Block, + )) => AsyncClosure(fn_decl_span), + _ => Closure(fn_decl_span), }; self.visit_fn_decl(fn_decl); self.with_context(cx, |v| v.visit_nested_body(body)); diff --git a/compiler/rustc_passes/src/upvars.rs b/compiler/rustc_passes/src/upvars.rs index d87df706cc84e..ded20c38543d4 100644 --- a/compiler/rustc_passes/src/upvars.rs +++ b/compiler/rustc_passes/src/upvars.rs @@ -11,7 +11,7 @@ use rustc_span::Span; pub fn provide(providers: &mut Providers) { providers.upvars_mentioned = |tcx, def_id| { - if !tcx.is_closure(def_id) { + if !tcx.is_closure_or_coroutine(def_id) { return None; } diff --git a/compiler/rustc_pattern_analysis/src/constructor.rs b/compiler/rustc_pattern_analysis/src/constructor.rs index b688051ca9ccb..15ff4ceb5b3a6 100644 --- a/compiler/rustc_pattern_analysis/src/constructor.rs +++ b/compiler/rustc_pattern_analysis/src/constructor.rs @@ -861,12 +861,9 @@ impl ConstructorSet { #[instrument(level = "debug", skip(self, pcx, ctors), ret)] pub(crate) fn split<'a>( &self, - pcx: &PlaceCtxt<'_, '_, Cx>, + pcx: &PlaceCtxt<'a, '_, Cx>, ctors: impl Iterator> + Clone, - ) -> SplitConstructorSet - where - Cx: 'a, - { + ) -> SplitConstructorSet { let mut present: SmallVec<[_; 1]> = SmallVec::new(); // Empty constructors found missing. let mut missing_empty = Vec::new(); diff --git a/compiler/rustc_pattern_analysis/src/lib.rs b/compiler/rustc_pattern_analysis/src/lib.rs index a1c9b15766676..e01b571ede101 100644 --- a/compiler/rustc_pattern_analysis/src/lib.rs +++ b/compiler/rustc_pattern_analysis/src/lib.rs @@ -91,7 +91,7 @@ pub struct MatchCtxt<'a, 'p, Cx: TypeCx> { /// The context for type information. pub tycx: &'a Cx, /// An arena to store the wildcards we produce during analysis. - pub wildcard_arena: &'a TypedArena>, + pub wildcard_arena: &'p TypedArena>, } /// The arm of a match expression. diff --git a/compiler/rustc_pattern_analysis/src/lints.rs b/compiler/rustc_pattern_analysis/src/lints.rs index bba1f406056bb..cb712fe640c5e 100644 --- a/compiler/rustc_pattern_analysis/src/lints.rs +++ b/compiler/rustc_pattern_analysis/src/lints.rs @@ -28,11 +28,11 @@ use crate::TypeCx; /// /// This is not used in the main algorithm; only in lints. #[derive(Debug)] -pub(crate) struct PatternColumn<'a, 'p, 'tcx> { - patterns: Vec<&'a DeconstructedPat<'p, 'tcx>>, +pub(crate) struct PatternColumn<'p, 'tcx> { + patterns: Vec<&'p DeconstructedPat<'p, 'tcx>>, } -impl<'a, 'p, 'tcx> PatternColumn<'a, 'p, 'tcx> { +impl<'p, 'tcx> PatternColumn<'p, 'tcx> { pub(crate) fn new(arms: &[MatchArm<'p, 'tcx>]) -> Self { let mut patterns = Vec::with_capacity(arms.len()); for arm in arms { @@ -48,7 +48,7 @@ impl<'a, 'p, 'tcx> PatternColumn<'a, 'p, 'tcx> { fn is_empty(&self) -> bool { self.patterns.is_empty() } - fn head_ty(&self, cx: MatchCtxt<'a, 'p, 'tcx>) -> Option> { + fn head_ty(&self, cx: MatchCtxt<'_, 'p, 'tcx>) -> Option> { if self.patterns.len() == 0 { return None; } @@ -64,7 +64,7 @@ impl<'a, 'p, 'tcx> PatternColumn<'a, 'p, 'tcx> { pcx.ctors_for_ty().split(pcx, column_ctors) } - fn iter<'b>(&'b self) -> impl Iterator> + Captures<'b> { + fn iter(&self) -> impl Iterator> + Captures<'_> { self.patterns.iter().copied() } @@ -75,9 +75,9 @@ impl<'a, 'p, 'tcx> PatternColumn<'a, 'p, 'tcx> { /// which may change the lengths. fn specialize( &self, - pcx: &PlaceCtxt<'a, 'p, 'tcx>, + pcx: &PlaceCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'p, 'tcx>, - ) -> Vec> { + ) -> Vec> { let arity = ctor.arity(pcx); if arity == 0 { return Vec::new(); @@ -115,7 +115,7 @@ impl<'a, 'p, 'tcx> PatternColumn<'a, 'p, 'tcx> { #[instrument(level = "debug", skip(cx), ret)] fn collect_nonexhaustive_missing_variants<'a, 'p, 'tcx>( cx: MatchCtxt<'a, 'p, 'tcx>, - column: &PatternColumn<'a, 'p, 'tcx>, + column: &PatternColumn<'p, 'tcx>, ) -> Vec> { let Some(ty) = column.head_ty(cx) else { return Vec::new(); @@ -163,7 +163,7 @@ fn collect_nonexhaustive_missing_variants<'a, 'p, 'tcx>( pub(crate) fn lint_nonexhaustive_missing_variants<'a, 'p, 'tcx>( cx: MatchCtxt<'a, 'p, 'tcx>, arms: &[MatchArm<'p, 'tcx>], - pat_column: &PatternColumn<'a, 'p, 'tcx>, + pat_column: &PatternColumn<'p, 'tcx>, scrut_ty: Ty<'tcx>, ) { let rcx: &RustcMatchCheckCtxt<'_, '_> = cx.tycx; @@ -203,7 +203,7 @@ pub(crate) fn lint_nonexhaustive_missing_variants<'a, 'p, 'tcx>( }; use rustc_errors::DecorateLint; - let mut err = rcx.tcx.dcx().struct_span_warn(*arm.pat.data().unwrap(), ""); + let mut err = rcx.tcx.dcx().struct_span_warn(arm.pat.data().unwrap().span, ""); err.set_primary_message(decorator.msg()); decorator.decorate_lint(&mut err); err.emit(); @@ -216,7 +216,7 @@ pub(crate) fn lint_nonexhaustive_missing_variants<'a, 'p, 'tcx>( #[instrument(level = "debug", skip(cx))] pub(crate) fn lint_overlapping_range_endpoints<'a, 'p, 'tcx>( cx: MatchCtxt<'a, 'p, 'tcx>, - column: &PatternColumn<'a, 'p, 'tcx>, + column: &PatternColumn<'p, 'tcx>, ) { let Some(ty) = column.head_ty(cx) else { return; @@ -254,7 +254,7 @@ pub(crate) fn lint_overlapping_range_endpoints<'a, 'p, 'tcx>( // Iterate on patterns that contained `overlap`. for pat in column.iter() { let Constructor::IntRange(this_range) = pat.ctor() else { continue }; - let this_span = *pat.data().unwrap(); + let this_span = pat.data().unwrap().span; if this_range.is_singleton() { // Don't lint when one of the ranges is a singleton. continue; diff --git a/compiler/rustc_pattern_analysis/src/pat.rs b/compiler/rustc_pattern_analysis/src/pat.rs index 9efd3e864dac0..db41d2824a1ed 100644 --- a/compiler/rustc_pattern_analysis/src/pat.rs +++ b/compiler/rustc_pattern_analysis/src/pat.rs @@ -71,19 +71,17 @@ impl<'p, Cx: TypeCx> DeconstructedPat<'p, Cx> { self.data.as_ref() } - pub fn iter_fields<'a>( - &'a self, - ) -> impl Iterator> + Captures<'a> { + pub fn iter_fields(&self) -> impl Iterator> + Captures<'_> { self.fields.iter() } /// Specialize this pattern with a constructor. /// `other_ctor` can be different from `self.ctor`, but must be covered by it. - pub(crate) fn specialize<'a>( + pub(crate) fn specialize( &self, - pcx: &PlaceCtxt<'a, 'p, Cx>, + pcx: &PlaceCtxt<'_, 'p, Cx>, other_ctor: &Constructor, - ) -> SmallVec<[&'a DeconstructedPat<'p, Cx>; 2]> { + ) -> SmallVec<[&'p DeconstructedPat<'p, Cx>; 2]> { let wildcard_sub_tys = || { let tys = pcx.ctor_sub_tys(other_ctor); tys.iter() @@ -196,7 +194,7 @@ impl WitnessPat { self.ty } - pub fn iter_fields<'a>(&'a self) -> impl Iterator> { + pub fn iter_fields(&self) -> impl Iterator> { self.fields.iter() } } diff --git a/compiler/rustc_pattern_analysis/src/rustc.rs b/compiler/rustc_pattern_analysis/src/rustc.rs index a5a47724f3f02..b09d565f0768c 100644 --- a/compiler/rustc_pattern_analysis/src/rustc.rs +++ b/compiler/rustc_pattern_analysis/src/rustc.rs @@ -128,11 +128,11 @@ impl<'p, 'tcx> RustcMatchCheckCtxt<'p, 'tcx> { // In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide // uninhabited fields in order not to reveal the uninhabitedness of the whole variant. // This lists the fields we keep along with their types. - pub(crate) fn list_variant_nonhidden_fields<'a>( - &'a self, + pub(crate) fn list_variant_nonhidden_fields( + &self, ty: Ty<'tcx>, - variant: &'a VariantDef, - ) -> impl Iterator)> + Captures<'p> + Captures<'a> { + variant: &'tcx VariantDef, + ) -> impl Iterator)> + Captures<'p> + Captures<'_> { let cx = self; let ty::Adt(adt, args) = ty.kind() else { bug!() }; // Whether we must not match the fields of this variant exhaustively. @@ -366,7 +366,7 @@ impl<'p, 'tcx> RustcMatchCheckCtxt<'p, 'tcx> { | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Coroutine(_, _, _) + | ty::Coroutine(_, _) | ty::Alias(_, _) | ty::Param(_) | ty::Error(_) => ConstructorSet::Unlistable, @@ -399,7 +399,7 @@ impl<'p, 'tcx> RustcMatchCheckCtxt<'p, 'tcx> { /// Note: the input patterns must have been lowered through /// `rustc_mir_build::thir::pattern::check_match::MatchVisitor::lower_pattern`. - pub fn lower_pat(&self, pat: &Pat<'tcx>) -> DeconstructedPat<'p, 'tcx> { + pub fn lower_pat(&self, pat: &'p Pat<'tcx>) -> DeconstructedPat<'p, 'tcx> { let singleton = |pat| std::slice::from_ref(self.pattern_arena.alloc(pat)); let cx = self; let ctor; @@ -540,7 +540,7 @@ impl<'p, 'tcx> RustcMatchCheckCtxt<'p, 'tcx> { // `Ref`), and has one field. That field has constructor `Str(value)` and no // subfields. // Note: `t` is `str`, not `&str`. - let subpattern = DeconstructedPat::new(Str(*value), &[], *t, pat.span); + let subpattern = DeconstructedPat::new(Str(*value), &[], *t, pat); ctor = Ref; fields = singleton(subpattern) } @@ -624,7 +624,7 @@ impl<'p, 'tcx> RustcMatchCheckCtxt<'p, 'tcx> { fields = &[]; } } - DeconstructedPat::new(ctor, fields, pat.ty, pat.span) + DeconstructedPat::new(ctor, fields, pat.ty, pat) } /// Convert back to a `thir::PatRangeBoundary` for diagnostic purposes. @@ -894,7 +894,7 @@ impl<'p, 'tcx> TypeCx for RustcMatchCheckCtxt<'p, 'tcx> { type VariantIdx = VariantIdx; type StrLit = Const<'tcx>; type ArmData = HirId; - type PatData = Span; + type PatData = &'p Pat<'tcx>; fn is_exhaustive_patterns_feature_on(&self) -> bool { self.tcx.features().exhaustive_patterns diff --git a/compiler/rustc_pattern_analysis/src/usefulness.rs b/compiler/rustc_pattern_analysis/src/usefulness.rs index b51b1a1f72255..d2e621a6b98eb 100644 --- a/compiler/rustc_pattern_analysis/src/usefulness.rs +++ b/compiler/rustc_pattern_analysis/src/usefulness.rs @@ -821,22 +821,21 @@ impl fmt::Display for ValidityConstraint { /// Represents a pattern-tuple under investigation. // The three lifetimes are: -// - 'a allocated by us // - 'p coming from the input // - Cx global compilation context #[derive(derivative::Derivative)] #[derivative(Clone(bound = ""))] -struct PatStack<'a, 'p, Cx: TypeCx> { +struct PatStack<'p, Cx: TypeCx> { // Rows of len 1 are very common, which is why `SmallVec[_; 2]` works well. - pats: SmallVec<[&'a DeconstructedPat<'p, Cx>; 2]>, + pats: SmallVec<[&'p DeconstructedPat<'p, Cx>; 2]>, /// Sometimes we know that as far as this row is concerned, the current case is already handled /// by a different, more general, case. When the case is irrelevant for all rows this allows us /// to skip a case entirely. This is purely an optimization. See at the top for details. relevant: bool, } -impl<'a, 'p, Cx: TypeCx> PatStack<'a, 'p, Cx> { - fn from_pattern(pat: &'a DeconstructedPat<'p, Cx>) -> Self { +impl<'p, Cx: TypeCx> PatStack<'p, Cx> { + fn from_pattern(pat: &'p DeconstructedPat<'p, Cx>) -> Self { PatStack { pats: smallvec![pat], relevant: true } } @@ -848,17 +847,17 @@ impl<'a, 'p, Cx: TypeCx> PatStack<'a, 'p, Cx> { self.pats.len() } - fn head(&self) -> &'a DeconstructedPat<'p, Cx> { + fn head(&self) -> &'p DeconstructedPat<'p, Cx> { self.pats[0] } - fn iter<'b>(&'b self) -> impl Iterator> + Captures<'b> { + fn iter(&self) -> impl Iterator> + Captures<'_> { self.pats.iter().copied() } // Recursively expand the first or-pattern into its subpatterns. Only useful if the pattern is // an or-pattern. Panics if `self` is empty. - fn expand_or_pat<'b>(&'b self) -> impl Iterator> + Captures<'b> { + fn expand_or_pat(&self) -> impl Iterator> + Captures<'_> { self.head().flatten_or_pat().into_iter().map(move |pat| { let mut new = self.clone(); new.pats[0] = pat; @@ -870,10 +869,10 @@ impl<'a, 'p, Cx: TypeCx> PatStack<'a, 'p, Cx> { /// Only call if `ctor.is_covered_by(self.head().ctor())` is true. fn pop_head_constructor( &self, - pcx: &PlaceCtxt<'a, 'p, Cx>, + pcx: &PlaceCtxt<'_, 'p, Cx>, ctor: &Constructor, ctor_is_relevant: bool, - ) -> PatStack<'a, 'p, Cx> { + ) -> PatStack<'p, Cx> { // We pop the head pattern and push the new fields extracted from the arguments of // `self.head()`. let mut new_pats = self.head().specialize(pcx, ctor); @@ -886,7 +885,7 @@ impl<'a, 'p, Cx: TypeCx> PatStack<'a, 'p, Cx> { } } -impl<'a, 'p, Cx: TypeCx> fmt::Debug for PatStack<'a, 'p, Cx> { +impl<'p, Cx: TypeCx> fmt::Debug for PatStack<'p, Cx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // We pretty-print similarly to the `Debug` impl of `Matrix`. write!(f, "+")?; @@ -899,9 +898,9 @@ impl<'a, 'p, Cx: TypeCx> fmt::Debug for PatStack<'a, 'p, Cx> { /// A row of the matrix. #[derive(Clone)] -struct MatrixRow<'a, 'p, Cx: TypeCx> { +struct MatrixRow<'p, Cx: TypeCx> { // The patterns in the row. - pats: PatStack<'a, 'p, Cx>, + pats: PatStack<'p, Cx>, /// Whether the original arm had a guard. This is inherited when specializing. is_under_guard: bool, /// When we specialize, we remember which row of the original matrix produced a given row of the @@ -914,7 +913,7 @@ struct MatrixRow<'a, 'p, Cx: TypeCx> { useful: bool, } -impl<'a, 'p, Cx: TypeCx> MatrixRow<'a, 'p, Cx> { +impl<'p, Cx: TypeCx> MatrixRow<'p, Cx> { fn is_empty(&self) -> bool { self.pats.is_empty() } @@ -923,17 +922,17 @@ impl<'a, 'p, Cx: TypeCx> MatrixRow<'a, 'p, Cx> { self.pats.len() } - fn head(&self) -> &'a DeconstructedPat<'p, Cx> { + fn head(&self) -> &'p DeconstructedPat<'p, Cx> { self.pats.head() } - fn iter<'b>(&'b self) -> impl Iterator> + Captures<'b> { + fn iter(&self) -> impl Iterator> + Captures<'_> { self.pats.iter() } // Recursively expand the first or-pattern into its subpatterns. Only useful if the pattern is // an or-pattern. Panics if `self` is empty. - fn expand_or_pat<'b>(&'b self) -> impl Iterator> + Captures<'b> { + fn expand_or_pat(&self) -> impl Iterator> + Captures<'_> { self.pats.expand_or_pat().map(|patstack| MatrixRow { pats: patstack, parent_row: self.parent_row, @@ -946,11 +945,11 @@ impl<'a, 'p, Cx: TypeCx> MatrixRow<'a, 'p, Cx> { /// Only call if `ctor.is_covered_by(self.head().ctor())` is true. fn pop_head_constructor( &self, - pcx: &PlaceCtxt<'a, 'p, Cx>, + pcx: &PlaceCtxt<'_, 'p, Cx>, ctor: &Constructor, ctor_is_relevant: bool, parent_row: usize, - ) -> MatrixRow<'a, 'p, Cx> { + ) -> MatrixRow<'p, Cx> { MatrixRow { pats: self.pats.pop_head_constructor(pcx, ctor, ctor_is_relevant), parent_row, @@ -960,7 +959,7 @@ impl<'a, 'p, Cx: TypeCx> MatrixRow<'a, 'p, Cx> { } } -impl<'a, 'p, Cx: TypeCx> fmt::Debug for MatrixRow<'a, 'p, Cx> { +impl<'p, Cx: TypeCx> fmt::Debug for MatrixRow<'p, Cx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.pats.fmt(f) } @@ -977,22 +976,22 @@ impl<'a, 'p, Cx: TypeCx> fmt::Debug for MatrixRow<'a, 'p, Cx> { /// specializing `(,)` and `Some` on a pattern of type `(Option, bool)`, the first column of /// the matrix will correspond to `scrutinee.0.Some.0` and the second column to `scrutinee.1`. #[derive(Clone)] -struct Matrix<'a, 'p, Cx: TypeCx> { +struct Matrix<'p, Cx: TypeCx> { /// Vector of rows. The rows must form a rectangular 2D array. Moreover, all the patterns of /// each column must have the same type. Each column corresponds to a place within the /// scrutinee. - rows: Vec>, + rows: Vec>, /// Stores an extra fictitious row full of wildcards. Mostly used to keep track of the type of /// each column. This must obey the same invariants as the real rows. - wildcard_row: PatStack<'a, 'p, Cx>, + wildcard_row: PatStack<'p, Cx>, /// Track for each column/place whether it contains a known valid value. place_validity: SmallVec<[ValidityConstraint; 2]>, } -impl<'a, 'p, Cx: TypeCx> Matrix<'a, 'p, Cx> { +impl<'p, Cx: TypeCx> Matrix<'p, Cx> { /// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively /// expands it. Internal method, prefer [`Matrix::new`]. - fn expand_and_push(&mut self, row: MatrixRow<'a, 'p, Cx>) { + fn expand_and_push(&mut self, row: MatrixRow<'p, Cx>) { if !row.is_empty() && row.head().is_or_pat() { // Expand nested or-patterns. for new_row in row.expand_or_pat() { @@ -1005,8 +1004,8 @@ impl<'a, 'p, Cx: TypeCx> Matrix<'a, 'p, Cx> { /// Build a new matrix from an iterator of `MatchArm`s. fn new( - wildcard_arena: &'a TypedArena>, - arms: &'a [MatchArm<'p, Cx>], + wildcard_arena: &'p TypedArena>, + arms: &[MatchArm<'p, Cx>], scrut_ty: Cx::Ty, scrut_validity: ValidityConstraint, ) -> Self { @@ -1029,7 +1028,7 @@ impl<'a, 'p, Cx: TypeCx> Matrix<'a, 'p, Cx> { matrix } - fn head_ty(&self, mcx: MatchCtxt<'a, 'p, Cx>) -> Option { + fn head_ty(&self, mcx: MatchCtxt<'_, 'p, Cx>) -> Option { if self.column_count() == 0 { return None; } @@ -1042,33 +1041,31 @@ impl<'a, 'p, Cx: TypeCx> Matrix<'a, 'p, Cx> { self.wildcard_row.len() } - fn rows<'b>( - &'b self, - ) -> impl Iterator> + Clone + DoubleEndedIterator + ExactSizeIterator + fn rows( + &self, + ) -> impl Iterator> + Clone + DoubleEndedIterator + ExactSizeIterator { self.rows.iter() } - fn rows_mut<'b>( - &'b mut self, - ) -> impl Iterator> + DoubleEndedIterator + ExactSizeIterator + fn rows_mut( + &mut self, + ) -> impl Iterator> + DoubleEndedIterator + ExactSizeIterator { self.rows.iter_mut() } /// Iterate over the first pattern of each row. - fn heads<'b>( - &'b self, - ) -> impl Iterator> + Clone + Captures<'a> { + fn heads(&self) -> impl Iterator> + Clone + Captures<'_> { self.rows().map(|r| r.head()) } /// This computes `specialize(ctor, self)`. See top of the file for explanations. fn specialize_constructor( &self, - pcx: &PlaceCtxt<'a, 'p, Cx>, + pcx: &PlaceCtxt<'_, 'p, Cx>, ctor: &Constructor, ctor_is_relevant: bool, - ) -> Matrix<'a, 'p, Cx> { + ) -> Matrix<'p, Cx> { let wildcard_row = self.wildcard_row.pop_head_constructor(pcx, ctor, ctor_is_relevant); let new_validity = self.place_validity[0].specialize(ctor); let new_place_validity = std::iter::repeat(new_validity) @@ -1097,7 +1094,7 @@ impl<'a, 'p, Cx: TypeCx> Matrix<'a, 'p, Cx> { /// + _ + [_, _, tail @ ..] + /// | ✓ | ? | // column validity /// ``` -impl<'a, 'p, Cx: TypeCx> fmt::Debug for Matrix<'a, 'p, Cx> { +impl<'p, Cx: TypeCx> fmt::Debug for Matrix<'p, Cx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "\n")?; @@ -1336,7 +1333,7 @@ impl WitnessMatrix { #[instrument(level = "debug", skip(mcx, is_top_level), ret)] fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>( mcx: MatchCtxt<'a, 'p, Cx>, - matrix: &mut Matrix<'a, 'p, Cx>, + matrix: &mut Matrix<'p, Cx>, is_top_level: bool, ) -> WitnessMatrix { debug_assert!(matrix.rows().all(|r| r.len() == matrix.column_count())); diff --git a/compiler/rustc_query_impl/src/lib.rs b/compiler/rustc_query_impl/src/lib.rs index 59812efc32465..d5883f5281998 100644 --- a/compiler/rustc_query_impl/src/lib.rs +++ b/compiler/rustc_query_impl/src/lib.rs @@ -3,9 +3,6 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(rustdoc_internals)] -// this shouldn't be necessary, but the check for `&mut _` is too naive and denies returning a function pointer that takes a mut ref -#![feature(const_mut_refs)] -#![feature(const_refs_to_cell)] #![feature(min_specialization)] #![feature(never_type)] #![feature(rustc_attrs)] diff --git a/compiler/rustc_query_impl/src/plumbing.rs b/compiler/rustc_query_impl/src/plumbing.rs index 0971f2d75da0f..f131a0f759320 100644 --- a/compiler/rustc_query_impl/src/plumbing.rs +++ b/compiler/rustc_query_impl/src/plumbing.rs @@ -315,8 +315,11 @@ pub(crate) fn create_query_frame< ty::print::with_forced_impl_filename_line!(do_describe(tcx, key)) ) ); - let description = - if tcx.sess.verbose() { format!("{description} [{name:?}]") } else { description }; + let description = if tcx.sess.verbose_internals() { + format!("{description} [{name:?}]") + } else { + description + }; let span = if kind == dep_graph::dep_kinds::def_span || with_no_queries() { // The `def_span` query is used to calculate `default_span`, // so exit to avoid infinite recursion. diff --git a/compiler/rustc_query_system/src/query/plumbing.rs b/compiler/rustc_query_system/src/query/plumbing.rs index 34f2c01f89098..51842664eeb70 100644 --- a/compiler/rustc_query_system/src/query/plumbing.rs +++ b/compiler/rustc_query_system/src/query/plumbing.rs @@ -44,6 +44,18 @@ enum QueryResult { Poisoned, } +impl QueryResult { + /// Unwraps the query job expecting that it has started. + fn expect_job(self) -> QueryJob { + match self { + Self::Started(job) => job, + Self::Poisoned => { + panic!("job for query failed to start and was poisoned") + } + } + } +} + impl QueryState where K: Eq + Hash + Copy + Debug, @@ -169,10 +181,7 @@ where let job = { let mut lock = state.active.lock_shard_by_value(&key); - match lock.remove(&key).unwrap() { - QueryResult::Started(job) => job, - QueryResult::Poisoned => panic!(), - } + lock.remove(&key).unwrap().expect_job() }; job.signal_complete(); @@ -190,10 +199,8 @@ where let state = self.state; let job = { let mut shard = state.active.lock_shard_by_value(&self.key); - let job = match shard.remove(&self.key).unwrap() { - QueryResult::Started(job) => job, - QueryResult::Poisoned => panic!(), - }; + let job = shard.remove(&self.key).unwrap().expect_job(); + shard.insert(self.key, QueryResult::Poisoned); job }; @@ -277,11 +284,14 @@ where // We didn't find the query result in the query cache. Check if it was // poisoned due to a panic instead. let lock = query.query_state(qcx).active.get_shard_by_value(&key).lock(); + match lock.get(&key) { - // The query we waited on panicked. Continue unwinding here. - Some(QueryResult::Poisoned) => FatalError.raise(), + Some(QueryResult::Poisoned) => { + panic!("query '{}' not cached due to poisoning", query.name()) + } _ => panic!( - "query result must in the cache or the query must be poisoned after a wait" + "query '{}' result must be in the cache or the query must be poisoned after a wait", + query.name() ), } }) diff --git a/compiler/rustc_resolve/Cargo.toml b/compiler/rustc_resolve/Cargo.toml index ff9d6d8739f29..a1a353ce0574e 100644 --- a/compiler/rustc_resolve/Cargo.toml +++ b/compiler/rustc_resolve/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -bitflags = "1.2.1" +bitflags = "2.4.1" pulldown-cmark = { version = "0.9.3", default-features = false } rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } diff --git a/compiler/rustc_resolve/src/ident.rs b/compiler/rustc_resolve/src/ident.rs index e559ca8e7ccac..3a31addb10933 100644 --- a/compiler/rustc_resolve/src/ident.rs +++ b/compiler/rustc_resolve/src/ident.rs @@ -377,6 +377,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ignore_binding: Option>, ) -> Result, Determinacy> { bitflags::bitflags! { + #[derive(Clone, Copy)] struct Flags: u8 { const MACRO_RULES = 1 << 0; const MODULE = 1 << 1; diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index c3026e52430e1..a82f7bdfbf3fd 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -3076,7 +3076,16 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> { } let feed_visibility = |this: &mut Self, def_id| { - let vis = this.r.tcx.visibility(def_id).expect_local(); + let vis = this.r.tcx.visibility(def_id); + let vis = if vis.is_visible_locally() { + vis.expect_local() + } else { + this.r.dcx().span_delayed_bug( + span, + "error should be emitted when an unexpected trait item is used", + ); + rustc_middle::ty::Visibility::Public + }; this.r.feed_visibility(this.r.local_def_id(id), vis); }; diff --git a/compiler/rustc_session/Cargo.toml b/compiler/rustc_session/Cargo.toml index 1f51dd6c975eb..ba8f67982f51a 100644 --- a/compiler/rustc_session/Cargo.toml +++ b/compiler/rustc_session/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -bitflags = "1.2.1" +bitflags = "2.4.1" getopts = "0.2" rustc_ast = { path = "../rustc_ast" } rustc_data_structures = { path = "../rustc_data_structures" } diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index d158163f4a463..89508c77639aa 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -1021,6 +1021,7 @@ impl OutputFilenames { bitflags::bitflags! { /// Scopes used to determined if it need to apply to --remap-path-prefix + #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct RemapPathScopeComponents: u8 { /// Apply remappings to the expansion of std::file!() macro const MACRO = 1 << 0; @@ -1041,7 +1042,7 @@ bitflags::bitflags! { /// An alias for macro,unsplit-debuginfo,split-debuginfo-path. This /// ensures all paths in compiled executables or libraries are remapped /// but not elsewhere. - const OBJECT = Self::MACRO.bits | Self::UNSPLIT_DEBUGINFO.bits | Self::SPLIT_DEBUGINFO_PATH.bits; + const OBJECT = Self::MACRO.bits() | Self::UNSPLIT_DEBUGINFO.bits() | Self::SPLIT_DEBUGINFO_PATH.bits(); } } @@ -1116,6 +1117,7 @@ impl Default for Options { working_dir: RealFileName::LocalPath(std::env::current_dir().unwrap()), color: ColorConfig::Auto, logical_env: FxIndexMap::default(), + verbose: false, } } } @@ -2916,6 +2918,8 @@ pub fn build_session_options(early_dcx: &mut EarlyDiagCtxt, matches: &getopts::M RealFileName::LocalPath(path.into_owned()) }; + let verbose = matches.opt_present("verbose") || unstable_opts.verbose_internals; + Options { assert_incr_state, crate_types, @@ -2957,6 +2961,7 @@ pub fn build_session_options(early_dcx: &mut EarlyDiagCtxt, matches: &getopts::M working_dir, color, logical_env, + verbose, } } diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 10a4bdb94d46f..8274fd05bc057 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -223,6 +223,8 @@ top_level_options!( /// The (potentially remapped) working directory working_dir: RealFileName [TRACKED], color: ColorConfig [UNTRACKED], + + verbose: bool [UNTRACKED], } ); @@ -1991,8 +1993,8 @@ written to standard error output)"), "use legacy .ctors section for initializers rather than .init_array"), validate_mir: bool = (false, parse_bool, [UNTRACKED], "validate MIR after each transformation"), - #[rustc_lint_opt_deny_field_access("use `Session::verbose` instead of this field")] - verbose: bool = (false, parse_bool, [UNTRACKED], + #[rustc_lint_opt_deny_field_access("use `Session::verbose_internals` instead of this field")] + verbose_internals: bool = (false, parse_bool, [UNTRACKED], "in general, enable more debug printouts (default: no)"), #[rustc_lint_opt_deny_field_access("use `Session::verify_llvm_ir` instead of this field")] verify_llvm_ir: bool = (false, parse_bool, [TRACKED], diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index 53dd48ea272f8..c80990402a980 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -665,8 +665,8 @@ impl Session { // JUSTIFICATION: defn of the suggested wrapper fns #[allow(rustc::bad_opt_access)] impl Session { - pub fn verbose(&self) -> bool { - self.opts.unstable_opts.verbose + pub fn verbose_internals(&self) -> bool { + self.opts.unstable_opts.verbose_internals } pub fn print_llvm_stats(&self) -> bool { @@ -1274,7 +1274,10 @@ fn validate_commandline_args_with_session_available(sess: &Session) { } // Cannot enable crt-static with sanitizers on Linux - if sess.crt_static(None) && !sess.opts.unstable_opts.sanitizer.is_empty() { + if sess.crt_static(None) + && !sess.opts.unstable_opts.sanitizer.is_empty() + && !sess.target.is_like_msvc + { sess.dcx().emit_err(errors::CannotEnableCrtStaticLinux); } diff --git a/compiler/rustc_smir/src/rustc_internal/internal.rs b/compiler/rustc_smir/src/rustc_internal/internal.rs index bbc98af45c0d7..17162d0de25c2 100644 --- a/compiler/rustc_smir/src/rustc_internal/internal.rs +++ b/compiler/rustc_smir/src/rustc_internal/internal.rs @@ -104,11 +104,9 @@ impl<'tcx> RustcInternal<'tcx> for RigidTy { RigidTy::Closure(def, args) => { rustc_ty::TyKind::Closure(def.0.internal(tables), args.internal(tables)) } - RigidTy::Coroutine(def, args, mov) => rustc_ty::TyKind::Coroutine( - def.0.internal(tables), - args.internal(tables), - mov.internal(tables), - ), + RigidTy::Coroutine(def, args, _mov) => { + rustc_ty::TyKind::Coroutine(def.0.internal(tables), args.internal(tables)) + } RigidTy::CoroutineWitness(def, args) => { rustc_ty::TyKind::CoroutineWitness(def.0.internal(tables), args.internal(tables)) } diff --git a/compiler/rustc_smir/src/rustc_smir/convert/mir.rs b/compiler/rustc_smir/src/rustc_smir/convert/mir.rs index 49bf2192f8288..c5fb6f7a26f18 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/mir.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/mir.rs @@ -531,11 +531,11 @@ impl<'tcx> Stable<'tcx> for mir::AggregateKind<'tcx> { generic_arg.stable(tables), ) } - mir::AggregateKind::Coroutine(def_id, generic_arg, movability) => { + mir::AggregateKind::Coroutine(def_id, generic_arg) => { stable_mir::mir::AggregateKind::Coroutine( tables.coroutine_def(*def_id), generic_arg.stable(tables), - movability.stable(tables), + tables.tcx.coroutine_movability(*def_id).stable(tables), ) } } diff --git a/compiler/rustc_smir/src/rustc_smir/convert/mod.rs b/compiler/rustc_smir/src/rustc_smir/convert/mod.rs index 5f505ac181cad..2446671770ecb 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/mod.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/mod.rs @@ -42,7 +42,7 @@ impl<'tcx> Stable<'tcx> for rustc_hir::CoroutineKind { type T = stable_mir::mir::CoroutineKind; fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { use rustc_hir::{CoroutineDesugaring, CoroutineKind}; - match self { + match *self { CoroutineKind::Desugared(CoroutineDesugaring::Async, source) => { stable_mir::mir::CoroutineKind::Desugared( stable_mir::mir::CoroutineDesugaring::Async, @@ -55,7 +55,9 @@ impl<'tcx> Stable<'tcx> for rustc_hir::CoroutineKind { source.stable(tables), ) } - CoroutineKind::Coroutine => stable_mir::mir::CoroutineKind::Coroutine, + CoroutineKind::Coroutine(movability) => { + stable_mir::mir::CoroutineKind::Coroutine(movability.stable(tables)) + } CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, source) => { stable_mir::mir::CoroutineKind::Desugared( stable_mir::mir::CoroutineDesugaring::AsyncGen, diff --git a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs index cbdddc3007273..f0f1d798d44b4 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs @@ -386,10 +386,10 @@ impl<'tcx> Stable<'tcx> for ty::TyKind<'tcx> { tables.closure_def(*def_id), generic_args.stable(tables), )), - ty::Coroutine(def_id, generic_args, movability) => TyKind::RigidTy(RigidTy::Coroutine( + ty::Coroutine(def_id, generic_args) => TyKind::RigidTy(RigidTy::Coroutine( tables.coroutine_def(*def_id), generic_args.stable(tables), - movability.stable(tables), + tables.tcx.coroutine_movability(*def_id).stable(tables), )), ty::Never => TyKind::RigidTy(RigidTy::Never), ty::Tuple(fields) => { diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 95106cc64c129..9af81e0630318 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -498,6 +498,7 @@ symbols! { cfg_panic, cfg_relocation_model, cfg_sanitize, + cfg_sanitizer_cfi, cfg_target_abi, cfg_target_compact, cfg_target_feature, @@ -1040,6 +1041,7 @@ symbols! { mir_offset, mir_retag, mir_return, + mir_return_to, mir_set_discriminant, mir_static, mir_static_mut, diff --git a/compiler/rustc_symbol_mangling/Cargo.toml b/compiler/rustc_symbol_mangling/Cargo.toml index ff3f1ad646fab..0ce522c9cabca 100644 --- a/compiler/rustc_symbol_mangling/Cargo.toml +++ b/compiler/rustc_symbol_mangling/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -bitflags = "1.2.1" +bitflags = "2.4.1" punycode = "0.4.0" rustc-demangle = "0.1.21" rustc_data_structures = { path = "../rustc_data_structures" } diff --git a/compiler/rustc_symbol_mangling/src/legacy.rs b/compiler/rustc_symbol_mangling/src/legacy.rs index 0178ff53b24a3..4a5f58443bc29 100644 --- a/compiler/rustc_symbol_mangling/src/legacy.rs +++ b/compiler/rustc_symbol_mangling/src/legacy.rs @@ -211,10 +211,10 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { ty::FnDef(def_id, args) | ty::Alias(ty::Projection | ty::Opaque, ty::AliasTy { def_id, args, .. }) | ty::Closure(def_id, args) - | ty::Coroutine(def_id, args, _) => self.print_def_path(def_id, args), + | ty::Coroutine(def_id, args) => self.print_def_path(def_id, args), // The `pretty_print_type` formatting of array size depends on - // -Zverbose flag, so we cannot reuse it here. + // -Zverbose-internals flag, so we cannot reuse it here. ty::Array(ty, size) => { self.write_str("[")?; self.print_type(ty)?; @@ -255,7 +255,7 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> { // only print integers match (ct.kind(), ct.ty().kind()) { (ty::ConstKind::Value(ty::ValTree::Leaf(scalar)), ty::Int(_) | ty::Uint(_)) => { - // The `pretty_print_const` formatting depends on -Zverbose + // The `pretty_print_const` formatting depends on -Zverbose-internals // flag, so we cannot reuse it here. let signed = matches!(ct.ty().kind(), ty::Int(_)); write!( diff --git a/compiler/rustc_symbol_mangling/src/typeid.rs b/compiler/rustc_symbol_mangling/src/typeid.rs index cda16e3a3f566..838d9d774b20c 100644 --- a/compiler/rustc_symbol_mangling/src/typeid.rs +++ b/compiler/rustc_symbol_mangling/src/typeid.rs @@ -11,6 +11,7 @@ use twox_hash::XxHash64; bitflags! { /// Options for typeid_for_fnabi and typeid_for_fnsig. + #[derive(Clone, Copy, Debug)] pub struct TypeIdOptions: u32 { const GENERALIZE_POINTERS = 1; const GENERALIZE_REPR_C = 2; diff --git a/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs b/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs index d2962b2968b37..0cc82ac7506cb 100644 --- a/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs +++ b/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs @@ -895,8 +895,8 @@ fn transform_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, options: TransformTyOptio ty = Ty::new_closure(tcx, *def_id, transform_args(tcx, args, options)); } - ty::Coroutine(def_id, args, movability) => { - ty = Ty::new_coroutine(tcx, *def_id, transform_args(tcx, args, options), *movability); + ty::Coroutine(def_id, args) => { + ty = Ty::new_coroutine(tcx, *def_id, transform_args(tcx, args, options)); } ty::Ref(region, ty0, ..) => { diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index e002e345ae689..e89a640767f22 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -427,7 +427,7 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { | ty::FnDef(def_id, args) | ty::Alias(ty::Projection | ty::Opaque, ty::AliasTy { def_id, args, .. }) | ty::Closure(def_id, args) - | ty::Coroutine(def_id, args, _) => { + | ty::Coroutine(def_id, args) => { self.print_def_path(def_id, args)?; } ty::Foreign(def_id) => { diff --git a/compiler/rustc_target/Cargo.toml b/compiler/rustc_target/Cargo.toml index 94dfeb12dc98a..2cb8ac7e8bfb3 100644 --- a/compiler/rustc_target/Cargo.toml +++ b/compiler/rustc_target/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -bitflags = "1.2.1" +bitflags = "2.4.1" rustc_abi = { path = "../rustc_abi" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_feature = { path = "../rustc_feature" } diff --git a/compiler/rustc_target/src/abi/call/mod.rs b/compiler/rustc_target/src/abi/call/mod.rs index e9730947389fe..fafc10e71635a 100644 --- a/compiler/rustc_target/src/abi/call/mod.rs +++ b/compiler/rustc_target/src/abi/call/mod.rs @@ -93,9 +93,10 @@ pub use attr_impl::ArgAttribute; #[allow(unused)] mod attr_impl { // The subset of llvm::Attribute needed for arguments, packed into a bitfield. + #[derive(Clone, Copy, Default, Hash, PartialEq, Eq, HashStable_Generic)] + pub struct ArgAttribute(u8); bitflags::bitflags! { - #[derive(Default, HashStable_Generic)] - pub struct ArgAttribute: u8 { + impl ArgAttribute: u8 { const NoAlias = 1 << 1; const NoCapture = 1 << 2; const NonNull = 1 << 3; @@ -104,6 +105,7 @@ mod attr_impl { const NoUndef = 1 << 6; } } + rustc_data_structures::external_bitflags_debug! { ArgAttribute } } /// Sometimes an ABI requires small integers to be extended to a full or partial register. This enum diff --git a/compiler/rustc_target/src/asm/s390x.rs b/compiler/rustc_target/src/asm/s390x.rs index 0a50064f58755..b8afeb824d847 100644 --- a/compiler/rustc_target/src/asm/s390x.rs +++ b/compiler/rustc_target/src/asm/s390x.rs @@ -6,6 +6,7 @@ use std::fmt; def_reg_class! { S390x S390xInlineAsmRegClass { reg, + reg_addr, freg, } } @@ -36,7 +37,7 @@ impl S390xInlineAsmRegClass { arch: InlineAsmArch, ) -> &'static [(InlineAsmType, Option)] { match (self, arch) { - (Self::reg, _) => types! { _: I8, I16, I32, I64; }, + (Self::reg | Self::reg_addr, _) => types! { _: I8, I16, I32, I64; }, (Self::freg, _) => types! { _: F32, F64; }, } } @@ -45,19 +46,19 @@ impl S390xInlineAsmRegClass { def_regs! { S390x S390xInlineAsmReg S390xInlineAsmRegClass { r0: reg = ["r0"], - r1: reg = ["r1"], - r2: reg = ["r2"], - r3: reg = ["r3"], - r4: reg = ["r4"], - r5: reg = ["r5"], - r6: reg = ["r6"], - r7: reg = ["r7"], - r8: reg = ["r8"], - r9: reg = ["r9"], - r10: reg = ["r10"], - r12: reg = ["r12"], - r13: reg = ["r13"], - r14: reg = ["r14"], + r1: reg, reg_addr = ["r1"], + r2: reg, reg_addr = ["r2"], + r3: reg, reg_addr = ["r3"], + r4: reg, reg_addr = ["r4"], + r5: reg, reg_addr = ["r5"], + r6: reg, reg_addr = ["r6"], + r7: reg, reg_addr = ["r7"], + r8: reg, reg_addr = ["r8"], + r9: reg, reg_addr = ["r9"], + r10: reg, reg_addr = ["r10"], + r12: reg, reg_addr = ["r12"], + r13: reg, reg_addr = ["r13"], + r14: reg, reg_addr = ["r14"], f0: freg = ["f0"], f1: freg = ["f1"], f2: freg = ["f2"], diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index 4789a9be151e7..9d25388b90fd0 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -39,7 +39,6 @@ use crate::abi::{Endian, Integer, Size, TargetDataLayout, TargetDataLayoutErrors use crate::json::{Json, ToJson}; use crate::spec::abi::{lookup as lookup_abi, Abi}; use crate::spec::crt_objects::CrtObjects; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_fs_util::try_canonicalize; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_span::symbol::{kw, sym, Symbol}; @@ -592,7 +591,7 @@ impl LinkSelfContainedDefault { } bitflags::bitflags! { - #[derive(Default)] + #[derive(Clone, Copy, PartialEq, Eq, Default)] /// The `-C link-self-contained` components that can individually be enabled or disabled. pub struct LinkSelfContainedComponents: u8 { /// CRT objects (e.g. on `windows-gnu`, `musl`, `wasi` targets) @@ -609,6 +608,7 @@ bitflags::bitflags! { const MINGW = 1 << 5; } } +rustc_data_structures::external_bitflags_debug! { LinkSelfContainedComponents } impl LinkSelfContainedComponents { /// Parses a single `-Clink-self-contained` well-known component, not a set of flags. @@ -667,19 +667,6 @@ impl LinkSelfContainedComponents { } } -impl IntoIterator for LinkSelfContainedComponents { - type Item = LinkSelfContainedComponents; - type IntoIter = std::vec::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - LinkSelfContainedComponents::all_components() - .into_iter() - .filter(|&s| self.contains(s)) - .collect::>() - .into_iter() - } -} - impl ToJson for LinkSelfContainedComponents { fn to_json(&self) -> Json { let components: Vec<_> = Self::all_components() @@ -1219,9 +1206,10 @@ impl ToJson for StackProbeType { } } +#[derive(Default, Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)] +pub struct SanitizerSet(u16); bitflags::bitflags! { - #[derive(Default, Encodable, Decodable)] - pub struct SanitizerSet: u16 { + impl SanitizerSet: u16 { const ADDRESS = 1 << 0; const LEAK = 1 << 1; const MEMORY = 1 << 2; @@ -1235,6 +1223,7 @@ bitflags::bitflags! { const SAFESTACK = 1 << 10; } } +rustc_data_structures::external_bitflags_debug! { SanitizerSet } impl SanitizerSet { /// Return sanitizer's name @@ -1274,38 +1263,6 @@ impl fmt::Display for SanitizerSet { } } -impl IntoIterator for SanitizerSet { - type Item = SanitizerSet; - type IntoIter = std::vec::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - [ - SanitizerSet::ADDRESS, - SanitizerSet::CFI, - SanitizerSet::KCFI, - SanitizerSet::LEAK, - SanitizerSet::MEMORY, - SanitizerSet::MEMTAG, - SanitizerSet::SHADOWCALLSTACK, - SanitizerSet::THREAD, - SanitizerSet::HWADDRESS, - SanitizerSet::KERNELADDRESS, - SanitizerSet::SAFESTACK, - ] - .iter() - .copied() - .filter(|&s| self.contains(s)) - .collect::>() - .into_iter() - } -} - -impl HashStable for SanitizerSet { - fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { - self.bits().hash_stable(ctx, hasher); - } -} - impl ToJson for SanitizerSet { fn to_json(&self) -> Json { self.into_iter() diff --git a/compiler/rustc_target/src/spec/targets/i686_pc_windows_msvc.rs b/compiler/rustc_target/src/spec/targets/i686_pc_windows_msvc.rs index ba80c23196e1d..5abc3017bf80c 100644 --- a/compiler/rustc_target/src/spec/targets/i686_pc_windows_msvc.rs +++ b/compiler/rustc_target/src/spec/targets/i686_pc_windows_msvc.rs @@ -1,9 +1,10 @@ -use crate::spec::{base, LinkerFlavor, Lld, Target}; +use crate::spec::{base, LinkerFlavor, Lld, SanitizerSet, Target}; pub fn target() -> Target { let mut base = base::windows_msvc::opts(); base.cpu = "pentium4".into(); base.max_atomic_width = Some(64); + base.supported_sanitizers = SanitizerSet::ADDRESS; base.add_pre_link_args( LinkerFlavor::Msvc(Lld::No), diff --git a/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs b/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs index ba80c23196e1d..5b91682e168ab 100644 --- a/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs +++ b/compiler/rustc_target/src/spec/targets/i686_win7_windows_msvc.rs @@ -4,6 +4,7 @@ pub fn target() -> Target { let mut base = base::windows_msvc::opts(); base.cpu = "pentium4".into(); base.max_atomic_width = Some(64); + base.vendor = "win7".into(); base.add_pre_link_args( LinkerFlavor::Msvc(Lld::No), diff --git a/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_msvc.rs b/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_msvc.rs index 7d6276a0c2d57..3a4da91c2443f 100644 --- a/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_msvc.rs +++ b/compiler/rustc_target/src/spec/targets/x86_64_pc_windows_msvc.rs @@ -1,10 +1,11 @@ -use crate::spec::{base, Target}; +use crate::spec::{base, SanitizerSet, Target}; pub fn target() -> Target { let mut base = base::windows_msvc::opts(); base.cpu = "x86-64".into(); base.plt_by_default = false; base.max_atomic_width = Some(64); + base.supported_sanitizers = SanitizerSet::ADDRESS; Target { llvm_target: "x86_64-pc-windows-msvc".into(), diff --git a/compiler/rustc_trait_selection/Cargo.toml b/compiler/rustc_trait_selection/Cargo.toml index 29c0d8b5ff172..1883099d345b9 100644 --- a/compiler/rustc_trait_selection/Cargo.toml +++ b/compiler/rustc_trait_selection/Cargo.toml @@ -5,6 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start +itertools = "0.11.0" rustc_ast = { path = "../rustc_ast" } rustc_attr = { path = "../rustc_attr" } rustc_data_structures = { path = "../rustc_data_structures" } diff --git a/compiler/rustc_trait_selection/src/solve/assembly/mod.rs b/compiler/rustc_trait_selection/src/solve/assembly/mod.rs index 81a766f24b048..caf9470b4c646 100644 --- a/compiler/rustc_trait_selection/src/solve/assembly/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/assembly/mod.rs @@ -432,7 +432,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Coroutine(_, _, _) + | ty::Coroutine(_, _) | ty::Never | ty::Tuple(_) => { let simp = diff --git a/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs b/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs index f442e2a08a811..274a75a125c58 100644 --- a/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs +++ b/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs @@ -57,7 +57,7 @@ pub(in crate::solve) fn instantiate_constituent_tys_for_auto_trait<'tcx>( ty::Closure(_, args) => Ok(vec![args.as_closure().tupled_upvars_ty()]), - ty::Coroutine(_, args, _) => { + ty::Coroutine(_, args) => { let coroutine_args = args.as_coroutine(); Ok(vec![coroutine_args.tupled_upvars_ty(), coroutine_args.witness()]) } @@ -177,7 +177,6 @@ pub(in crate::solve) fn instantiate_constituent_tys_for_copy_clone_trait<'tcx>( ty::Dynamic(..) | ty::Str | ty::Slice(_) - | ty::Coroutine(_, _, Movability::Static) | ty::Foreign(..) | ty::Ref(_, _, Mutability::Mut) | ty::Adt(_, _) @@ -194,14 +193,17 @@ pub(in crate::solve) fn instantiate_constituent_tys_for_copy_clone_trait<'tcx>( ty::Closure(_, args) => Ok(vec![args.as_closure().tupled_upvars_ty()]), - ty::Coroutine(_, args, Movability::Movable) => { - if ecx.tcx().features().coroutine_clone { - let coroutine = args.as_coroutine(); - Ok(vec![coroutine.tupled_upvars_ty(), coroutine.witness()]) - } else { - Err(NoSolution) + ty::Coroutine(def_id, args) => match ecx.tcx().coroutine_movability(def_id) { + Movability::Static => Err(NoSolution), + Movability::Movable => { + if ecx.tcx().features().coroutine_clone { + let coroutine = args.as_coroutine(); + Ok(vec![coroutine.tupled_upvars_ty(), coroutine.witness()]) + } else { + Err(NoSolution) + } } - } + }, ty::CoroutineWitness(def_id, args) => Ok(ecx .tcx() @@ -278,7 +280,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_callable<'tcx>( | ty::RawPtr(_) | ty::Ref(_, _, _) | ty::Dynamic(_, _, _) - | ty::Coroutine(_, _, _) + | ty::Coroutine(_, _) | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) diff --git a/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs b/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs index bac08f6588f1c..ccee6f8eb29b9 100644 --- a/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/normalizes_to/mod.rs @@ -468,7 +468,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { goal: Goal<'tcx, Self>, ) -> QueryResult<'tcx> { let self_ty = goal.predicate.self_ty(); - let ty::Coroutine(def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(def_id, args) = *self_ty.kind() else { return Err(NoSolution); }; @@ -499,7 +499,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { goal: Goal<'tcx, Self>, ) -> QueryResult<'tcx> { let self_ty = goal.predicate.self_ty(); - let ty::Coroutine(def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(def_id, args) = *self_ty.kind() else { return Err(NoSolution); }; @@ -530,7 +530,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { goal: Goal<'tcx, Self>, ) -> QueryResult<'tcx> { let self_ty = goal.predicate.self_ty(); - let ty::Coroutine(def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(def_id, args) = *self_ty.kind() else { return Err(NoSolution); }; @@ -564,7 +564,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> { goal: Goal<'tcx, Self>, ) -> QueryResult<'tcx> { let self_ty = goal.predicate.self_ty(); - let ty::Coroutine(def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(def_id, args) = *self_ty.kind() else { return Err(NoSolution); }; diff --git a/compiler/rustc_trait_selection/src/solve/trait_goals.rs b/compiler/rustc_trait_selection/src/solve/trait_goals.rs index ac3ffd2d6c2c9..be07927568446 100644 --- a/compiler/rustc_trait_selection/src/solve/trait_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/trait_goals.rs @@ -337,7 +337,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { return Err(NoSolution); } - let ty::Coroutine(def_id, _, _) = *goal.predicate.self_ty().kind() else { + let ty::Coroutine(def_id, _) = *goal.predicate.self_ty().kind() else { return Err(NoSolution); }; @@ -361,7 +361,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { return Err(NoSolution); } - let ty::Coroutine(def_id, _, _) = *goal.predicate.self_ty().kind() else { + let ty::Coroutine(def_id, _) = *goal.predicate.self_ty().kind() else { return Err(NoSolution); }; @@ -385,7 +385,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { return Err(NoSolution); } - let ty::Coroutine(def_id, _, _) = *goal.predicate.self_ty().kind() else { + let ty::Coroutine(def_id, _) = *goal.predicate.self_ty().kind() else { return Err(NoSolution); }; @@ -410,7 +410,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { } let self_ty = goal.predicate.self_ty(); - let ty::Coroutine(def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(def_id, args) = *self_ty.kind() else { return Err(NoSolution); }; @@ -927,10 +927,10 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { // Coroutines have one special built-in candidate, `Unpin`, which // takes precedence over the structural auto trait candidate being // assembled. - ty::Coroutine(_, _, movability) + ty::Coroutine(def_id, _) if Some(goal.predicate.def_id()) == self.tcx().lang_items().unpin_trait() => { - match movability { + match self.tcx().coroutine_movability(def_id) { Movability::Static => Some(Err(NoSolution)), Movability::Movable => { Some(self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) @@ -959,7 +959,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { | ty::FnDef(_, _) | ty::FnPtr(_) | ty::Closure(_, _) - | ty::Coroutine(_, _, _) + | ty::Coroutine(_, _) | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs index 13ac2e26e4080..52f91d282f0b6 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs @@ -8,7 +8,7 @@ use rustc_attr as attr; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{struct_span_err, ErrorGuaranteed}; use rustc_hir as hir; -use rustc_hir::def_id::DefId; +use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_middle::ty::GenericArgsRef; use rustc_middle::ty::{self, GenericParamDefKind, TyCtxt}; use rustc_parse_format::{ParseMode, Parser, Piece, Position}; @@ -32,7 +32,7 @@ pub trait TypeErrCtxtExt<'tcx> { ) -> Option<(DefId, GenericArgsRef<'tcx>)>; /*private*/ - fn describe_enclosure(&self, hir_id: hir::HirId) -> Option<&'static str>; + fn describe_enclosure(&self, def_id: LocalDefId) -> Option<&'static str>; fn on_unimplemented_note( &self, @@ -101,43 +101,19 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { /// Used to set on_unimplemented's `ItemContext` /// to be the enclosing (async) block/function/closure - fn describe_enclosure(&self, hir_id: hir::HirId) -> Option<&'static str> { - let hir = self.tcx.hir(); - let node = self.tcx.opt_hir_node(hir_id)?; - match &node { - hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(sig, _, body_id), .. }) => { - self.describe_coroutine(*body_id).or_else(|| { - Some(match sig.header { - hir::FnHeader { asyncness: hir::IsAsync::Async(_), .. } => { - "an async function" - } - _ => "a function", - }) - }) + fn describe_enclosure(&self, def_id: LocalDefId) -> Option<&'static str> { + match self.tcx.opt_hir_node_by_def_id(def_id)? { + hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(..), .. }) => Some("a function"), + hir::Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Fn(..), .. }) => { + Some("a trait method") + } + hir::Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Fn(..), .. }) => { + Some("a method") } - hir::Node::TraitItem(hir::TraitItem { - kind: hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(body_id)), - .. - }) => self.describe_coroutine(*body_id).or_else(|| Some("a trait method")), - hir::Node::ImplItem(hir::ImplItem { - kind: hir::ImplItemKind::Fn(sig, body_id), - .. - }) => self.describe_coroutine(*body_id).or_else(|| { - Some(match sig.header { - hir::FnHeader { asyncness: hir::IsAsync::Async(_), .. } => "an async method", - _ => "a method", - }) - }), hir::Node::Expr(hir::Expr { - kind: hir::ExprKind::Closure(hir::Closure { body, movability, .. }), + kind: hir::ExprKind::Closure(hir::Closure { kind, .. }), .. - }) => self.describe_coroutine(*body).or_else(|| { - Some(if movability.is_some() { "an async closure" } else { "a closure" }) - }), - hir::Node::Expr(hir::Expr { .. }) => { - let parent_hid = hir.parent_id(hir_id); - if parent_hid != hir_id { self.describe_enclosure(parent_hid) } else { None } - } + }) => Some(self.describe_closure(*kind)), _ => None, } } @@ -156,12 +132,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { // FIXME(-Zlower-impl-trait-in-trait-to-assoc-ty): HIR is not present for RPITITs, // but I guess we could synthesize one here. We don't see any errors that rely on // that yet, though. - let enclosure = - if let Some(body_hir) = self.tcx.opt_local_def_id_to_hir_id(obligation.cause.body_id) { - self.describe_enclosure(body_hir).map(|s| s.to_owned()) - } else { - None - }; + let enclosure = self.describe_enclosure(obligation.cause.body_id).map(|t| t.to_owned()); flags.push((sym::ItemContext, enclosure)); match obligation.cause.code() { diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs index e1f3c6d4f975a..f63314081d615 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs @@ -47,6 +47,9 @@ use crate::traits::error_reporting::type_err_ctxt_ext::InferCtxtPrivExt; use crate::traits::query::evaluate_obligation::InferCtxtExt as _; use rustc_middle::ty::print::{with_forced_trimmed_paths, with_no_trimmed_paths}; +use itertools::EitherOrBoth; +use itertools::Itertools; + #[derive(Debug)] pub enum CoroutineInteriorOrUpvar { // span of interior type @@ -723,133 +726,276 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { err: &mut Diagnostic, trait_pred: ty::PolyTraitPredicate<'tcx>, ) -> bool { - // It only make sense when suggesting dereferences for arguments - let ObligationCauseCode::FunctionArgumentObligation { arg_hir_id, call_hir_id, .. } = - obligation.cause.code() - else { - return false; - }; - let Some(typeck_results) = &self.typeck_results else { - return false; - }; - let hir::Node::Expr(expr) = self.tcx.hir_node(*arg_hir_id) else { - return false; - }; - let Some(arg_ty) = typeck_results.expr_ty_adjusted_opt(expr) else { - return false; - }; - - let span = obligation.cause.span; - let mut real_trait_pred = trait_pred; let mut code = obligation.cause.code(); - while let Some((parent_code, parent_trait_pred)) = code.parent() { - code = parent_code; - if let Some(parent_trait_pred) = parent_trait_pred { - real_trait_pred = parent_trait_pred; - } + if let ObligationCauseCode::FunctionArgumentObligation { arg_hir_id, call_hir_id, .. } = + code + && let Some(typeck_results) = &self.typeck_results + && let hir::Node::Expr(expr) = self.tcx.hir_node(*arg_hir_id) + && let Some(arg_ty) = typeck_results.expr_ty_adjusted_opt(expr) + { + // Suggest dereferencing the argument to a function/method call if possible - // We `instantiate_bound_regions_with_erased` here because `make_subregion` does not handle - // `ReBound`, and we don't particularly care about the regions. - let real_ty = self.tcx.instantiate_bound_regions_with_erased(real_trait_pred.self_ty()); - if !self.can_eq(obligation.param_env, real_ty, arg_ty) { - continue; - } + let mut real_trait_pred = trait_pred; + while let Some((parent_code, parent_trait_pred)) = code.parent() { + code = parent_code; + if let Some(parent_trait_pred) = parent_trait_pred { + real_trait_pred = parent_trait_pred; + } + + // We `instantiate_bound_regions_with_erased` here because `make_subregion` does not handle + // `ReBound`, and we don't particularly care about the regions. + let real_ty = + self.tcx.instantiate_bound_regions_with_erased(real_trait_pred.self_ty()); - if let ty::Ref(region, base_ty, mutbl) = *real_ty.kind() { - let autoderef = (self.autoderef_steps)(base_ty); - if let Some(steps) = - autoderef.into_iter().enumerate().find_map(|(steps, (ty, obligations))| { - // Re-add the `&` - let ty = Ty::new_ref(self.tcx, region, TypeAndMut { ty, mutbl }); + if self.can_eq(obligation.param_env, real_ty, arg_ty) + && let ty::Ref(region, base_ty, mutbl) = *real_ty.kind() + { + let autoderef = (self.autoderef_steps)(base_ty); + if let Some(steps) = + autoderef.into_iter().enumerate().find_map(|(steps, (ty, obligations))| { + // Re-add the `&` + let ty = Ty::new_ref(self.tcx, region, TypeAndMut { ty, mutbl }); + + // Remapping bound vars here + let real_trait_pred_and_ty = real_trait_pred + .map_bound(|inner_trait_pred| (inner_trait_pred, ty)); + let obligation = self.mk_trait_obligation_with_new_self_ty( + obligation.param_env, + real_trait_pred_and_ty, + ); + let may_hold = obligations + .iter() + .chain([&obligation]) + .all(|obligation| self.predicate_may_hold(obligation)) + .then_some(steps); + may_hold + }) + { + if steps > 0 { + // Don't care about `&mut` because `DerefMut` is used less + // often and user will not expect that an autoderef happens. + if let Some(hir::Node::Expr(hir::Expr { + kind: + hir::ExprKind::AddrOf( + hir::BorrowKind::Ref, + hir::Mutability::Not, + expr, + ), + .. + })) = self.tcx.opt_hir_node(*arg_hir_id) + { + let derefs = "*".repeat(steps); + err.span_suggestion_verbose( + expr.span.shrink_to_lo(), + "consider dereferencing here", + derefs, + Applicability::MachineApplicable, + ); + return true; + } + } + } else if real_trait_pred != trait_pred { + // This branch addresses #87437. + + let span = obligation.cause.span; // Remapping bound vars here - let real_trait_pred_and_ty = - real_trait_pred.map_bound(|inner_trait_pred| (inner_trait_pred, ty)); + let real_trait_pred_and_base_ty = real_trait_pred + .map_bound(|inner_trait_pred| (inner_trait_pred, base_ty)); let obligation = self.mk_trait_obligation_with_new_self_ty( obligation.param_env, - real_trait_pred_and_ty, + real_trait_pred_and_base_ty, ); - let may_hold = obligations - .iter() - .chain([&obligation]) - .all(|obligation| self.predicate_may_hold(obligation)) - .then_some(steps); - - may_hold - }) - { - if steps > 0 { - // Don't care about `&mut` because `DerefMut` is used less - // often and user will not expect autoderef happens. - if let Some(hir::Node::Expr(hir::Expr { - kind: - hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, expr), - .. - })) = self.tcx.opt_hir_node(*arg_hir_id) + let sized_obligation = Obligation::new( + self.tcx, + obligation.cause.clone(), + obligation.param_env, + ty::TraitRef::from_lang_item( + self.tcx, + hir::LangItem::Sized, + obligation.cause.span, + [base_ty], + ), + ); + if self.predicate_may_hold(&obligation) + && self.predicate_must_hold_modulo_regions(&sized_obligation) { - let derefs = "*".repeat(steps); - err.span_suggestion_verbose( - expr.span.shrink_to_lo(), - "consider dereferencing here", - derefs, - Applicability::MachineApplicable, + let call_node = self.tcx.hir_node(*call_hir_id); + let msg = "consider dereferencing here"; + let is_receiver = matches!( + call_node, + Node::Expr(hir::Expr { + kind: hir::ExprKind::MethodCall(_, receiver_expr, ..), + .. + }) + if receiver_expr.hir_id == *arg_hir_id ); + if is_receiver { + err.multipart_suggestion_verbose( + msg, + vec![ + (span.shrink_to_lo(), "(*".to_string()), + (span.shrink_to_hi(), ")".to_string()), + ], + Applicability::MachineApplicable, + ) + } else { + err.span_suggestion_verbose( + span.shrink_to_lo(), + msg, + '*', + Applicability::MachineApplicable, + ) + }; return true; } } - } else if real_trait_pred != trait_pred { - // This branch addresses #87437. - - // Remapping bound vars here - let real_trait_pred_and_base_ty = - real_trait_pred.map_bound(|inner_trait_pred| (inner_trait_pred, base_ty)); + } + } + } else if let ( + ObligationCauseCode::BinOp { lhs_hir_id, rhs_hir_id: Some(rhs_hir_id), .. }, + predicate, + ) = code.peel_derives_with_predicate() + && let Some(typeck_results) = &self.typeck_results + && let hir::Node::Expr(lhs) = self.tcx.hir_node(*lhs_hir_id) + && let hir::Node::Expr(rhs) = self.tcx.hir_node(*rhs_hir_id) + && let Some(rhs_ty) = typeck_results.expr_ty_opt(rhs) + { + // Suggest dereferencing the LHS, RHS, or both terms of a binop if possible + + let trait_pred = predicate.unwrap_or(trait_pred); + let lhs_ty = self.tcx.instantiate_bound_regions_with_erased(trait_pred.self_ty()); + let lhs_autoderef = (self.autoderef_steps)(lhs_ty); + let rhs_autoderef = (self.autoderef_steps)(rhs_ty); + let first_lhs = lhs_autoderef.first().unwrap().clone(); + let first_rhs = rhs_autoderef.first().unwrap().clone(); + let mut autoderefs = lhs_autoderef + .into_iter() + .enumerate() + .rev() + .zip_longest(rhs_autoderef.into_iter().enumerate().rev()) + .map(|t| match t { + EitherOrBoth::Both(a, b) => (a, b), + EitherOrBoth::Left(a) => (a, (0, first_rhs.clone())), + EitherOrBoth::Right(b) => ((0, first_lhs.clone()), b), + }) + .rev(); + if let Some((lsteps, rsteps)) = + autoderefs.find_map(|((lsteps, (l_ty, _)), (rsteps, (r_ty, _)))| { + // Create a new predicate with the dereferenced LHS and RHS + // We simultaneously dereference both sides rather than doing them + // one at a time to account for cases such as &Box == &&T + let trait_pred_and_ty = trait_pred.map_bound(|inner| { + ( + ty::TraitPredicate { + trait_ref: ty::TraitRef::new( + self.tcx, + inner.trait_ref.def_id, + self.tcx.mk_args( + &[&[l_ty.into(), r_ty.into()], &inner.trait_ref.args[2..]] + .concat(), + ), + ), + ..inner + }, + l_ty, + ) + }); let obligation = self.mk_trait_obligation_with_new_self_ty( obligation.param_env, - real_trait_pred_and_base_ty, + trait_pred_and_ty, ); - let sized_obligation = Obligation::new( - self.tcx, - obligation.cause.clone(), - obligation.param_env, - ty::TraitRef::from_lang_item( - self.tcx, - hir::LangItem::Sized, - obligation.cause.span, - [base_ty], - ), - ); - if self.predicate_may_hold(&obligation) - && self.predicate_must_hold_modulo_regions(&sized_obligation) + self.predicate_may_hold(&obligation).then_some(match (lsteps, rsteps) { + (_, 0) => (Some(lsteps), None), + (0, _) => (None, Some(rsteps)), + _ => (Some(lsteps), Some(rsteps)), + }) + }) + { + let make_sugg = |mut expr: &Expr<'_>, mut steps| { + let mut prefix_span = expr.span.shrink_to_lo(); + let mut msg = "consider dereferencing here"; + if let hir::ExprKind::AddrOf(_, _, inner) = expr.kind { + msg = "consider removing the borrow and dereferencing instead"; + if let hir::ExprKind::AddrOf(..) = inner.kind { + msg = "consider removing the borrows and dereferencing instead"; + } + } + while let hir::ExprKind::AddrOf(_, _, inner) = expr.kind + && steps > 0 { - let call_node = self.tcx.hir_node(*call_hir_id); - let msg = "consider dereferencing here"; - let is_receiver = matches!( - call_node, - Node::Expr(hir::Expr { - kind: hir::ExprKind::MethodCall(_, receiver_expr, ..), - .. - }) - if receiver_expr.hir_id == *arg_hir_id + prefix_span = prefix_span.with_hi(inner.span.lo()); + expr = inner; + steps -= 1; + } + // Empty suggestions with empty spans ICE with debug assertions + if steps == 0 { + return ( + msg.trim_end_matches(" and dereferencing instead"), + vec![(prefix_span, String::new())], ); - if is_receiver { - err.multipart_suggestion_verbose( - msg, - vec![ - (span.shrink_to_lo(), "(*".to_string()), - (span.shrink_to_hi(), ")".to_string()), - ], - Applicability::MachineApplicable, - ) - } else { - err.span_suggestion_verbose( - span.shrink_to_lo(), - msg, - '*', - Applicability::MachineApplicable, - ) + } + let derefs = "*".repeat(steps); + let needs_parens = steps > 0 + && match expr.kind { + hir::ExprKind::Cast(_, _) | hir::ExprKind::Binary(_, _, _) => true, + _ if is_range_literal(expr) => true, + _ => false, }; - return true; + let mut suggestion = if needs_parens { + vec![ + ( + expr.span.with_lo(prefix_span.hi()).shrink_to_lo(), + format!("{derefs}("), + ), + (expr.span.shrink_to_hi(), ")".to_string()), + ] + } else { + vec![( + expr.span.with_lo(prefix_span.hi()).shrink_to_lo(), + format!("{derefs}"), + )] + }; + // Empty suggestions with empty spans ICE with debug assertions + if !prefix_span.is_empty() { + suggestion.push((prefix_span, String::new())); } + (msg, suggestion) + }; + + if let Some(lsteps) = lsteps + && let Some(rsteps) = rsteps + && lsteps > 0 + && rsteps > 0 + { + let mut suggestion = make_sugg(lhs, lsteps).1; + suggestion.append(&mut make_sugg(rhs, rsteps).1); + err.multipart_suggestion_verbose( + "consider dereferencing both sides of the expression", + suggestion, + Applicability::MachineApplicable, + ); + return true; + } else if let Some(lsteps) = lsteps + && lsteps > 0 + { + let (msg, suggestion) = make_sugg(lhs, lsteps); + err.multipart_suggestion_verbose( + msg, + suggestion, + Applicability::MachineApplicable, + ); + return true; + } else if let Some(rsteps) = rsteps + && rsteps > 0 + { + let (msg, suggestion) = make_sugg(rhs, rsteps); + err.multipart_suggestion_verbose( + msg, + suggestion, + Applicability::MachineApplicable, + ); + return true; } } } @@ -2577,7 +2723,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { let message = outer_coroutine .and_then(|coroutine_did| { Some(match self.tcx.coroutine_kind(coroutine_did).unwrap() { - CoroutineKind::Coroutine => format!("coroutine is not {trait_name}"), + CoroutineKind::Coroutine(_) => format!("coroutine is not {trait_name}"), CoroutineKind::Desugared( CoroutineDesugaring::Async, CoroutineSource::Fn, @@ -3169,7 +3315,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { ObligationCauseCode::SizedCoroutineInterior(coroutine_def_id) => { let what = match self.tcx.coroutine_kind(coroutine_def_id) { None - | Some(hir::CoroutineKind::Coroutine) + | Some(hir::CoroutineKind::Coroutine(_)) | Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _)) => { "yield" } @@ -3260,7 +3406,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { } err.note(msg.trim_end_matches(", ").to_string()) } - ty::Coroutine(def_id, _, _) => { + ty::Coroutine(def_id, _) => { let sp = self.tcx.def_span(def_id); // Special-case this to say "async block" instead of `[static coroutine]`. @@ -3564,55 +3710,52 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { trait_pred: ty::PolyTraitPredicate<'tcx>, span: Span, ) { - if let Some(body_id) = self.tcx.hir().maybe_body_owned_by(obligation.cause.body_id) { - let body = self.tcx.hir().body(body_id); - if let Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)) = - body.coroutine_kind - { - let future_trait = self.tcx.require_lang_item(LangItem::Future, None); + if let Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)) = + self.tcx.coroutine_kind(obligation.cause.body_id) + { + let future_trait = self.tcx.require_lang_item(LangItem::Future, None); - let self_ty = self.resolve_vars_if_possible(trait_pred.self_ty()); - let impls_future = self.type_implements_trait( - future_trait, - [self.tcx.instantiate_bound_regions_with_erased(self_ty)], - obligation.param_env, - ); - if !impls_future.must_apply_modulo_regions() { - return; - } + let self_ty = self.resolve_vars_if_possible(trait_pred.self_ty()); + let impls_future = self.type_implements_trait( + future_trait, + [self.tcx.instantiate_bound_regions_with_erased(self_ty)], + obligation.param_env, + ); + if !impls_future.must_apply_modulo_regions() { + return; + } - let item_def_id = self.tcx.associated_item_def_ids(future_trait)[0]; - // `::Output` - let projection_ty = trait_pred.map_bound(|trait_pred| { - Ty::new_projection( - self.tcx, - item_def_id, - // Future::Output has no args - [trait_pred.self_ty()], - ) - }); - let InferOk { value: projection_ty, .. } = - self.at(&obligation.cause, obligation.param_env).normalize(projection_ty); + let item_def_id = self.tcx.associated_item_def_ids(future_trait)[0]; + // `::Output` + let projection_ty = trait_pred.map_bound(|trait_pred| { + Ty::new_projection( + self.tcx, + item_def_id, + // Future::Output has no args + [trait_pred.self_ty()], + ) + }); + let InferOk { value: projection_ty, .. } = + self.at(&obligation.cause, obligation.param_env).normalize(projection_ty); - debug!( - normalized_projection_type = ?self.resolve_vars_if_possible(projection_ty) - ); - let try_obligation = self.mk_trait_obligation_with_new_self_ty( - obligation.param_env, - trait_pred.map_bound(|trait_pred| (trait_pred, projection_ty.skip_binder())), + debug!( + normalized_projection_type = ?self.resolve_vars_if_possible(projection_ty) + ); + let try_obligation = self.mk_trait_obligation_with_new_self_ty( + obligation.param_env, + trait_pred.map_bound(|trait_pred| (trait_pred, projection_ty.skip_binder())), + ); + debug!(try_trait_obligation = ?try_obligation); + if self.predicate_may_hold(&try_obligation) + && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) + && snippet.ends_with('?') + { + err.span_suggestion_verbose( + span.with_hi(span.hi() - BytePos(1)).shrink_to_hi(), + "consider `await`ing on the `Future`", + ".await", + Applicability::MaybeIncorrect, ); - debug!(try_trait_obligation = ?try_obligation); - if self.predicate_may_hold(&try_obligation) - && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) - && snippet.ends_with('?') - { - err.span_suggestion_verbose( - span.with_hi(span.hi() - BytePos(1)).shrink_to_hi(), - "consider `await`ing on the `Future`", - ".await", - Applicability::MaybeIncorrect, - ); - } } } } @@ -3624,7 +3767,9 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { trait_ref: &ty::PolyTraitRef<'tcx>, ) { let rhs_span = match obligation.cause.code() { - ObligationCauseCode::BinOp { rhs_span: Some(span), is_lit, .. } if *is_lit => span, + ObligationCauseCode::BinOp { rhs_span: Some(span), rhs_is_lit, .. } if *rhs_is_lit => { + span + } _ => return, }; if let ty::Float(_) = trait_ref.skip_binder().self_ty().kind() @@ -3727,6 +3872,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { ); } } + fn note_function_argument_obligation( &self, body_id: LocalDefId, @@ -4665,13 +4811,7 @@ impl<'v> Visitor<'v> for ReturnsVisitor<'v> { fn visit_body(&mut self, body: &'v hir::Body<'v>) { assert!(!self.in_block_tail); - if body.coroutine_kind().is_none() { - if let hir::ExprKind::Block(block, None) = body.value.kind { - if block.expr.is_some() { - self.in_block_tail = true; - } - } - } + self.in_block_tail = true; hir::intravisit::walk_body(self, body); } } diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/type_err_ctxt_ext.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/type_err_ctxt_ext.rs index 4b84aae70fd10..d2598b0defe7f 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/type_err_ctxt_ext.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/type_err_ctxt_ext.rs @@ -1348,7 +1348,7 @@ pub(super) trait InferCtxtPrivExt<'tcx> { ignoring_lifetimes: bool, ) -> Option; - fn describe_coroutine(&self, body_id: hir::BodyId) -> Option<&'static str>; + fn describe_closure(&self, kind: hir::ClosureKind) -> &'static str; fn find_similar_impl_candidates( &self, @@ -1925,46 +1925,49 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> { } } - fn describe_coroutine(&self, body_id: hir::BodyId) -> Option<&'static str> { - self.tcx.hir().body(body_id).coroutine_kind.map(|coroutine_source| match coroutine_source { - hir::CoroutineKind::Coroutine => "a coroutine", - hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::Async, - hir::CoroutineSource::Block, - ) => "an async block", - hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::Async, - hir::CoroutineSource::Fn, - ) => "an async function", - hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::Async, - hir::CoroutineSource::Closure, - ) => "an async closure", - hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::AsyncGen, - hir::CoroutineSource::Block, - ) => "an async gen block", - hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::AsyncGen, - hir::CoroutineSource::Fn, - ) => "an async gen function", - hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::AsyncGen, - hir::CoroutineSource::Closure, - ) => "an async gen closure", - hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::Gen, - hir::CoroutineSource::Block, - ) => "a gen block", - hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::Gen, - hir::CoroutineSource::Fn, - ) => "a gen function", - hir::CoroutineKind::Desugared( - hir::CoroutineDesugaring::Gen, - hir::CoroutineSource::Closure, - ) => "a gen closure", - }) + fn describe_closure(&self, kind: hir::ClosureKind) -> &'static str { + match kind { + hir::ClosureKind::Closure => "a closure", + hir::ClosureKind::Coroutine(kind) => match kind { + hir::CoroutineKind::Coroutine(_) => "a coroutine", + hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Async, + hir::CoroutineSource::Block, + ) => "an async block", + hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Async, + hir::CoroutineSource::Fn, + ) => "an async function", + hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Async, + hir::CoroutineSource::Closure, + ) => "an async closure", + hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::AsyncGen, + hir::CoroutineSource::Block, + ) => "an async gen block", + hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::AsyncGen, + hir::CoroutineSource::Fn, + ) => "an async gen function", + hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::AsyncGen, + hir::CoroutineSource::Closure, + ) => "an async gen closure", + hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Gen, + hir::CoroutineSource::Block, + ) => "a gen block", + hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Gen, + hir::CoroutineSource::Fn, + ) => "a gen function", + hir::CoroutineKind::Desugared( + hir::CoroutineDesugaring::Gen, + hir::CoroutineSource::Closure, + ) => "a gen closure", + }, + } } fn find_similar_impl_candidates( diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index d32b4adbefcdd..dd4e69efe379e 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -2083,7 +2083,7 @@ fn confirm_coroutine_candidate<'cx, 'tcx>( nested: Vec>, ) -> Progress<'tcx> { let self_ty = selcx.infcx.shallow_resolve(obligation.predicate.self_ty()); - let ty::Coroutine(_, args, _) = self_ty.kind() else { + let ty::Coroutine(_, args) = self_ty.kind() else { unreachable!( "expected coroutine self type for built-in coroutine candidate, found {self_ty}" ) @@ -2138,7 +2138,7 @@ fn confirm_future_candidate<'cx, 'tcx>( nested: Vec>, ) -> Progress<'tcx> { let self_ty = selcx.infcx.shallow_resolve(obligation.predicate.self_ty()); - let ty::Coroutine(_, args, _) = self_ty.kind() else { + let ty::Coroutine(_, args) = self_ty.kind() else { unreachable!( "expected coroutine self type for built-in async future candidate, found {self_ty}" ) @@ -2182,7 +2182,7 @@ fn confirm_iterator_candidate<'cx, 'tcx>( nested: Vec>, ) -> Progress<'tcx> { let self_ty = selcx.infcx.shallow_resolve(obligation.predicate.self_ty()); - let ty::Coroutine(_, args, _) = self_ty.kind() else { + let ty::Coroutine(_, args) = self_ty.kind() else { unreachable!("expected coroutine self type for built-in gen candidate, found {self_ty}") }; let gen_sig = args.as_coroutine().sig(); @@ -2223,8 +2223,7 @@ fn confirm_async_iterator_candidate<'cx, 'tcx>( obligation: &ProjectionTyObligation<'tcx>, nested: Vec>, ) -> Progress<'tcx> { - let ty::Coroutine(_, args, _) = - selcx.infcx.shallow_resolve(obligation.predicate.self_ty()).kind() + let ty::Coroutine(_, args) = selcx.infcx.shallow_resolve(obligation.predicate.self_ty()).kind() else { unreachable!() }; diff --git a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs index f9c8f3d14c3c1..138bc6129f7c7 100644 --- a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs +++ b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs @@ -259,7 +259,7 @@ pub fn dtorck_constraint_for_ty_inner<'tcx>( })? } - ty::Coroutine(_, args, _movability) => { + ty::Coroutine(_, args) => { // rust-lang/rust#49918: types can be constructed, stored // in the interior, and sit idle when coroutine yields // (and is subsequently dropped). diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs index 3a37bc518ef5f..54b91ab1d4d81 100644 --- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs +++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs @@ -263,7 +263,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { candidates: &mut SelectionCandidateSet<'tcx>, ) { let self_ty = obligation.self_ty().skip_binder(); - if let ty::Coroutine(did, args, _) = *self_ty.kind() { + if let ty::Coroutine(did, args) = *self_ty.kind() { // gen constructs get lowered to a special kind of coroutine that // should directly `impl AsyncIterator`. if self.tcx().coroutine_is_async_gen(did) { @@ -486,7 +486,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::RawPtr(_) | ty::Ref(_, _, _) | ty::Closure(_, _) - | ty::Coroutine(_, _, _) + | ty::Coroutine(_, _) | ty::CoroutineWitness(..) | ty::Never | ty::Tuple(_) @@ -529,7 +529,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let def_id = obligation.predicate.def_id(); if self.tcx().trait_is_auto(def_id) { - match self_ty.kind() { + match *self_ty.kind() { ty::Dynamic(..) => { // For object types, we don't know what the closed // over types are. This means we conservatively @@ -564,10 +564,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // The auto impl might apply; we don't know. candidates.ambiguous = true; } - ty::Coroutine(_, _, movability) + ty::Coroutine(coroutine_def_id, _) if self.tcx().lang_items().unpin_trait() == Some(def_id) => { - match movability { + match self.tcx().coroutine_movability(coroutine_def_id) { hir::Movability::Static => { // Immovable coroutines are never `Unpin`, so // suppress the normal auto-impl candidate for it. @@ -1023,7 +1023,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::FnPtr(_) | ty::Dynamic(_, _, _) | ty::Closure(_, _) - | ty::Coroutine(_, _, _) + | ty::Coroutine(_, _) | ty::CoroutineWitness(..) | ty::Never | ty::Alias(..) diff --git a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs index f1da1c046d45a..e20bb06d7770a 100644 --- a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs +++ b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs @@ -730,7 +730,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // touch bound regions, they just capture the in-scope // type/region parameters. let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder()); - let ty::Coroutine(coroutine_def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(coroutine_def_id, args) = *self_ty.kind() else { bug!("closure candidate for non-closure {:?}", obligation); }; @@ -768,7 +768,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // touch bound regions, they just capture the in-scope // type/region parameters. let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder()); - let ty::Coroutine(coroutine_def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(coroutine_def_id, args) = *self_ty.kind() else { bug!("closure candidate for non-closure {:?}", obligation); }; @@ -797,7 +797,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // touch bound regions, they just capture the in-scope // type/region parameters. let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder()); - let ty::Coroutine(coroutine_def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(coroutine_def_id, args) = *self_ty.kind() else { bug!("closure candidate for non-closure {:?}", obligation); }; @@ -826,7 +826,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // touch bound regions, they just capture the in-scope // type/region parameters. let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder()); - let ty::Coroutine(coroutine_def_id, args, _) = *self_ty.kind() else { + let ty::Coroutine(coroutine_def_id, args) = *self_ty.kind() else { bug!("closure candidate for non-closure {:?}", obligation); }; @@ -1298,7 +1298,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ty::Closure(_, args) => { stack.push(args.as_closure().tupled_upvars_ty()); } - ty::Coroutine(_, args, _) => { + ty::Coroutine(_, args) => { let coroutine = args.as_coroutine(); stack.extend([coroutine.tupled_upvars_ty(), coroutine.witness()]); } diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 336c0c5299fc9..c45925295ee72 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -2176,7 +2176,6 @@ impl<'tcx> SelectionContext<'_, 'tcx> { ty::Dynamic(..) | ty::Str | ty::Slice(..) - | ty::Coroutine(_, _, hir::Movability::Static) | ty::Foreign(..) | ty::Ref(_, _, hir::Mutability::Mut) => None, @@ -2185,26 +2184,31 @@ impl<'tcx> SelectionContext<'_, 'tcx> { Where(obligation.predicate.rebind(tys.iter().collect())) } - ty::Coroutine(_, args, hir::Movability::Movable) => { - if self.tcx().features().coroutine_clone { - let resolved_upvars = - self.infcx.shallow_resolve(args.as_coroutine().tupled_upvars_ty()); - let resolved_witness = - self.infcx.shallow_resolve(args.as_coroutine().witness()); - if resolved_upvars.is_ty_var() || resolved_witness.is_ty_var() { - // Not yet resolved. - Ambiguous - } else { - let all = args - .as_coroutine() - .upvar_tys() - .iter() - .chain([args.as_coroutine().witness()]) - .collect::>(); - Where(obligation.predicate.rebind(all)) + ty::Coroutine(coroutine_def_id, args) => { + match self.tcx().coroutine_movability(coroutine_def_id) { + hir::Movability::Static => None, + hir::Movability::Movable => { + if self.tcx().features().coroutine_clone { + let resolved_upvars = + self.infcx.shallow_resolve(args.as_coroutine().tupled_upvars_ty()); + let resolved_witness = + self.infcx.shallow_resolve(args.as_coroutine().witness()); + if resolved_upvars.is_ty_var() || resolved_witness.is_ty_var() { + // Not yet resolved. + Ambiguous + } else { + let all = args + .as_coroutine() + .upvar_tys() + .iter() + .chain([args.as_coroutine().witness()]) + .collect::>(); + Where(obligation.predicate.rebind(all)) + } + } else { + None + } } - } else { - None } } @@ -2307,7 +2311,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { t.rebind(vec![ty]) } - ty::Coroutine(_, args, _) => { + ty::Coroutine(_, args) => { let ty = self.infcx.shallow_resolve(args.as_coroutine().tupled_upvars_ty()); let witness = args.as_coroutine().witness(); t.rebind([ty].into_iter().chain(iter::once(witness)).collect()) diff --git a/compiler/rustc_ty_utils/src/abi.rs b/compiler/rustc_ty_utils/src/abi.rs index 86501b5a72d15..2772831e731ec 100644 --- a/compiler/rustc_ty_utils/src/abi.rs +++ b/compiler/rustc_ty_utils/src/abi.rs @@ -97,7 +97,7 @@ fn fn_sig_for_fn_abi<'tcx>( bound_vars, ) } - ty::Coroutine(did, args, _) => { + ty::Coroutine(did, args) => { let coroutine_kind = tcx.coroutine_kind(did).unwrap(); let sig = args.as_coroutine().sig(); @@ -121,7 +121,7 @@ fn fn_sig_for_fn_abi<'tcx>( } hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) | hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) - | hir::CoroutineKind::Coroutine => Ty::new_adt(tcx, pin_adt_ref, pin_args), + | hir::CoroutineKind::Coroutine(_) => Ty::new_adt(tcx, pin_adt_ref, pin_args), }; // The `FnSig` and the `ret_ty` here is for a coroutines main @@ -192,7 +192,7 @@ fn fn_sig_for_fn_abi<'tcx>( (Some(context_mut_ref), ret_ty) } - hir::CoroutineKind::Coroutine => { + hir::CoroutineKind::Coroutine(_) => { // The signature should be `Coroutine::resume(_, Resume) -> CoroutineState` let state_did = tcx.require_lang_item(LangItem::CoroutineState, None); let state_adt_ref = tcx.adt_def(state_did); diff --git a/compiler/rustc_ty_utils/src/consts.rs b/compiler/rustc_ty_utils/src/consts.rs index c9f99012b290c..2a2e53a81ed81 100644 --- a/compiler/rustc_ty_utils/src/consts.rs +++ b/compiler/rustc_ty_utils/src/consts.rs @@ -379,7 +379,7 @@ impl<'a, 'tcx> visit::Visitor<'a, 'tcx> for IsThirPolymorphic<'a, 'tcx> { } #[instrument(skip(self), level = "debug")] - fn visit_expr(&mut self, expr: &thir::Expr<'tcx>) { + fn visit_expr(&mut self, expr: &'a thir::Expr<'tcx>) { self.is_poly |= self.expr_is_poly(expr); if !self.is_poly { visit::walk_expr(self, expr) @@ -387,7 +387,7 @@ impl<'a, 'tcx> visit::Visitor<'a, 'tcx> for IsThirPolymorphic<'a, 'tcx> { } #[instrument(skip(self), level = "debug")] - fn visit_pat(&mut self, pat: &thir::Pat<'tcx>) { + fn visit_pat(&mut self, pat: &'a thir::Pat<'tcx>) { self.is_poly |= self.pat_is_poly(pat); if !self.is_poly { visit::walk_pat(self, pat); diff --git a/compiler/rustc_ty_utils/src/instance.rs b/compiler/rustc_ty_utils/src/instance.rs index 42db43caf9f6a..81d5304b81265 100644 --- a/compiler/rustc_ty_utils/src/instance.rs +++ b/compiler/rustc_ty_utils/src/instance.rs @@ -246,7 +246,7 @@ fn resolve_associated_item<'tcx>( }) } } else if Some(trait_ref.def_id) == lang_items.future_trait() { - let ty::Coroutine(coroutine_def_id, args, _) = *rcvr_args.type_at(0).kind() else { + let ty::Coroutine(coroutine_def_id, args) = *rcvr_args.type_at(0).kind() else { bug!() }; if Some(trait_item_id) == tcx.lang_items().future_poll_fn() { @@ -259,7 +259,7 @@ fn resolve_associated_item<'tcx>( Some(Instance::new(trait_item_id, rcvr_args)) } } else if Some(trait_ref.def_id) == lang_items.iterator_trait() { - let ty::Coroutine(coroutine_def_id, args, _) = *rcvr_args.type_at(0).kind() else { + let ty::Coroutine(coroutine_def_id, args) = *rcvr_args.type_at(0).kind() else { bug!() }; if Some(trait_item_id) == tcx.lang_items().next_fn() { @@ -272,7 +272,7 @@ fn resolve_associated_item<'tcx>( Some(Instance::new(trait_item_id, rcvr_args)) } } else if Some(trait_ref.def_id) == lang_items.async_iterator_trait() { - let ty::Coroutine(coroutine_def_id, args, _) = *rcvr_args.type_at(0).kind() else { + let ty::Coroutine(coroutine_def_id, args) = *rcvr_args.type_at(0).kind() else { bug!() }; @@ -287,7 +287,7 @@ fn resolve_associated_item<'tcx>( // `AsyncIterator::poll_next` is generated by the compiler. Some(Instance { def: ty::InstanceDef::Item(coroutine_def_id), args }) } else if Some(trait_ref.def_id) == lang_items.coroutine_trait() { - let ty::Coroutine(coroutine_def_id, args, _) = *rcvr_args.type_at(0).kind() else { + let ty::Coroutine(coroutine_def_id, args) = *rcvr_args.type_at(0).kind() else { bug!() }; if cfg!(debug_assertions) && tcx.item_name(trait_item_id) != sym::resume { diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index d39377a1acb8d..db89fba2a893e 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -316,7 +316,7 @@ fn layout_of_uncached<'tcx>( tcx.mk_layout(unit) } - ty::Coroutine(def_id, args, _) => coroutine_layout(cx, ty, def_id, args)?, + ty::Coroutine(def_id, args) => coroutine_layout(cx, ty, def_id, args)?, ty::Closure(_, args) => { let tys = args.as_closure().upvar_tys(); @@ -961,7 +961,7 @@ fn record_layout_for_printing<'tcx>(cx: &LayoutCx<'tcx, TyCtxt<'tcx>>, layout: T record(adt_kind.into(), adt_packed, opt_discr_size, variant_infos); } - ty::Coroutine(def_id, args, _) => { + ty::Coroutine(def_id, args) => { debug!("print-type-size t: `{:?}` record coroutine", layout.ty); // Coroutines always have a begin/poisoned/end state with additional suspend points let (variant_infos, opt_discr_size) = diff --git a/compiler/rustc_ty_utils/src/needs_drop.rs b/compiler/rustc_ty_utils/src/needs_drop.rs index 8d118e6dfeff8..08e5476ae43b7 100644 --- a/compiler/rustc_ty_utils/src/needs_drop.rs +++ b/compiler/rustc_ty_utils/src/needs_drop.rs @@ -145,7 +145,7 @@ where // for the coroutine witness and check whether any of the contained types // need to be dropped, and only require the captured types to be live // if they do. - ty::Coroutine(_, args, _) => { + ty::Coroutine(_, args) => { if self.reveal_coroutine_witnesses { queue_type(self, args.as_coroutine().witness()); } else { diff --git a/compiler/rustc_type_ir/Cargo.toml b/compiler/rustc_type_ir/Cargo.toml index 3a08d89cc448f..38f0eb8218015 100644 --- a/compiler/rustc_type_ir/Cargo.toml +++ b/compiler/rustc_type_ir/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start -bitflags = "1.2.1" +bitflags = "2.4.1" derivative = "2.2.0" rustc_data_structures = { path = "../rustc_data_structures", optional = true } rustc_index = { path = "../rustc_index", default-features = false } diff --git a/compiler/rustc_type_ir/src/flags.rs b/compiler/rustc_type_ir/src/flags.rs index af741a0a3a0bc..bc57d6daf4779 100644 --- a/compiler/rustc_type_ir/src/flags.rs +++ b/compiler/rustc_type_ir/src/flags.rs @@ -3,6 +3,7 @@ bitflags! { /// through the type during type construction, so that we can quickly check /// whether the type has various kinds of types in it without recursing /// over the type itself. + #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub struct TypeFlags: u32 { // Does this have parameters? Used to determine whether substitution is // required. @@ -13,9 +14,9 @@ bitflags! { /// Does this have `ConstKind::Param`? const HAS_CT_PARAM = 1 << 2; - const HAS_PARAM = TypeFlags::HAS_TY_PARAM.bits - | TypeFlags::HAS_RE_PARAM.bits - | TypeFlags::HAS_CT_PARAM.bits; + const HAS_PARAM = TypeFlags::HAS_TY_PARAM.bits() + | TypeFlags::HAS_RE_PARAM.bits() + | TypeFlags::HAS_CT_PARAM.bits(); /// Does this have `Infer`? const HAS_TY_INFER = 1 << 3; @@ -26,9 +27,9 @@ bitflags! { /// Does this have inference variables? Used to determine whether /// inference is required. - const HAS_INFER = TypeFlags::HAS_TY_INFER.bits - | TypeFlags::HAS_RE_INFER.bits - | TypeFlags::HAS_CT_INFER.bits; + const HAS_INFER = TypeFlags::HAS_TY_INFER.bits() + | TypeFlags::HAS_RE_INFER.bits() + | TypeFlags::HAS_CT_INFER.bits(); /// Does this have `Placeholder`? const HAS_TY_PLACEHOLDER = 1 << 6; @@ -38,9 +39,9 @@ bitflags! { const HAS_CT_PLACEHOLDER = 1 << 8; /// Does this have placeholders? - const HAS_PLACEHOLDER = TypeFlags::HAS_TY_PLACEHOLDER.bits - | TypeFlags::HAS_RE_PLACEHOLDER.bits - | TypeFlags::HAS_CT_PLACEHOLDER.bits; + const HAS_PLACEHOLDER = TypeFlags::HAS_TY_PLACEHOLDER.bits() + | TypeFlags::HAS_RE_PLACEHOLDER.bits() + | TypeFlags::HAS_CT_PLACEHOLDER.bits(); /// `true` if there are "names" of regions and so forth /// that are local to a particular fn/inferctxt @@ -48,12 +49,12 @@ bitflags! { /// `true` if there are "names" of types and regions and so forth /// that are local to a particular fn - const HAS_FREE_LOCAL_NAMES = TypeFlags::HAS_TY_PARAM.bits - | TypeFlags::HAS_CT_PARAM.bits - | TypeFlags::HAS_TY_INFER.bits - | TypeFlags::HAS_CT_INFER.bits - | TypeFlags::HAS_TY_PLACEHOLDER.bits - | TypeFlags::HAS_CT_PLACEHOLDER.bits + const HAS_FREE_LOCAL_NAMES = TypeFlags::HAS_TY_PARAM.bits() + | TypeFlags::HAS_CT_PARAM.bits() + | TypeFlags::HAS_TY_INFER.bits() + | TypeFlags::HAS_CT_INFER.bits() + | TypeFlags::HAS_TY_PLACEHOLDER.bits() + | TypeFlags::HAS_CT_PLACEHOLDER.bits() // We consider 'freshened' types and constants // to depend on a particular fn. // The freshening process throws away information, @@ -61,10 +62,10 @@ bitflags! { // cache. Note that there is no 'fresh lifetime' flag - // freshening replaces all lifetimes with `ReErased`, // which is different from how types/const are freshened. - | TypeFlags::HAS_TY_FRESH.bits - | TypeFlags::HAS_CT_FRESH.bits - | TypeFlags::HAS_FREE_LOCAL_REGIONS.bits - | TypeFlags::HAS_RE_ERASED.bits; + | TypeFlags::HAS_TY_FRESH.bits() + | TypeFlags::HAS_CT_FRESH.bits() + | TypeFlags::HAS_FREE_LOCAL_REGIONS.bits() + | TypeFlags::HAS_RE_ERASED.bits(); /// Does this have `Projection`? const HAS_TY_PROJECTION = 1 << 10; @@ -76,10 +77,10 @@ bitflags! { const HAS_CT_PROJECTION = 1 << 13; /// Could this type be normalized further? - const HAS_PROJECTION = TypeFlags::HAS_TY_PROJECTION.bits - | TypeFlags::HAS_TY_OPAQUE.bits - | TypeFlags::HAS_TY_INHERENT.bits - | TypeFlags::HAS_CT_PROJECTION.bits; + const HAS_PROJECTION = TypeFlags::HAS_TY_PROJECTION.bits() + | TypeFlags::HAS_TY_OPAQUE.bits() + | TypeFlags::HAS_TY_INHERENT.bits() + | TypeFlags::HAS_CT_PROJECTION.bits(); /// Is an error type/const reachable? const HAS_ERROR = 1 << 14; @@ -96,9 +97,9 @@ bitflags! { const HAS_CT_BOUND = 1 << 18; /// Does this have any bound variables? /// Used to check if a global bound is safe to evaluate. - const HAS_BOUND_VARS = TypeFlags::HAS_RE_BOUND.bits - | TypeFlags::HAS_TY_BOUND.bits - | TypeFlags::HAS_CT_BOUND.bits; + const HAS_BOUND_VARS = TypeFlags::HAS_RE_BOUND.bits() + | TypeFlags::HAS_TY_BOUND.bits() + | TypeFlags::HAS_CT_BOUND.bits(); /// Does this have any `ReErased` regions? const HAS_RE_ERASED = 1 << 19; diff --git a/compiler/rustc_type_ir/src/ty_kind.rs b/compiler/rustc_type_ir/src/ty_kind.rs index 70adfbee2edc6..859000fb6cb18 100644 --- a/compiler/rustc_type_ir/src/ty_kind.rs +++ b/compiler/rustc_type_ir/src/ty_kind.rs @@ -207,7 +207,7 @@ pub enum TyKind { /// /// For more info about coroutine args, visit the documentation for /// `CoroutineArgs`. - Coroutine(I::DefId, I::GenericArgs, Movability), + Coroutine(I::DefId, I::GenericArgs), /// A type representing the types stored inside a coroutine. /// This should only appear as part of the `CoroutineArgs`. @@ -317,7 +317,7 @@ const fn tykind_discriminant(value: &TyKind) -> usize { FnPtr(_) => 13, Dynamic(..) => 14, Closure(_, _) => 15, - Coroutine(_, _, _) => 16, + Coroutine(_, _) => 16, CoroutineWitness(_, _) => 17, Never => 18, Tuple(_) => 19, @@ -356,9 +356,7 @@ impl PartialEq for TyKind { a_p == b_p && a_r == b_r && a_repr == b_repr } (Closure(a_d, a_s), Closure(b_d, b_s)) => a_d == b_d && a_s == b_s, - (Coroutine(a_d, a_s, a_m), Coroutine(b_d, b_s, b_m)) => { - a_d == b_d && a_s == b_s && a_m == b_m - } + (Coroutine(a_d, a_s), Coroutine(b_d, b_s)) => a_d == b_d && a_s == b_s, (CoroutineWitness(a_d, a_s), CoroutineWitness(b_d, b_s)) => a_d == b_d && a_s == b_s, (Tuple(a_t), Tuple(b_t)) => a_t == b_t, (Alias(a_i, a_p), Alias(b_i, b_p)) => a_i == b_i && a_p == b_p, @@ -432,9 +430,7 @@ impl DebugWithInfcx for TyKind { } }, Closure(d, s) => f.debug_tuple("Closure").field(d).field(&this.wrap(s)).finish(), - Coroutine(d, s, m) => { - f.debug_tuple("Coroutine").field(d).field(&this.wrap(s)).field(m).finish() - } + Coroutine(d, s) => f.debug_tuple("Coroutine").field(d).field(&this.wrap(s)).finish(), CoroutineWitness(d, s) => { f.debug_tuple("CoroutineWitness").field(d).field(&this.wrap(s)).finish() } diff --git a/compiler/stable_mir/src/mir/body.rs b/compiler/stable_mir/src/mir/body.rs index 89d75569ce3db..72227a04bf189 100644 --- a/compiler/stable_mir/src/mir/body.rs +++ b/compiler/stable_mir/src/mir/body.rs @@ -285,7 +285,7 @@ impl AssertMessage { AssertMessage::RemainderByZero(_) => { Ok("attempt to calculate the remainder with a divisor of zero") } - AssertMessage::ResumedAfterReturn(CoroutineKind::Coroutine) => { + AssertMessage::ResumedAfterReturn(CoroutineKind::Coroutine(_)) => { Ok("coroutine resumed after completion") } AssertMessage::ResumedAfterReturn(CoroutineKind::Desugared( @@ -300,7 +300,7 @@ impl AssertMessage { CoroutineDesugaring::AsyncGen, _, )) => Ok("`gen fn` should just keep returning `AssertMessage::None` after completion"), - AssertMessage::ResumedAfterPanic(CoroutineKind::Coroutine) => { + AssertMessage::ResumedAfterPanic(CoroutineKind::Coroutine(_)) => { Ok("coroutine resumed after panicking") } AssertMessage::ResumedAfterPanic(CoroutineKind::Desugared( @@ -399,7 +399,7 @@ pub enum UnOp { #[derive(Clone, Debug, Eq, PartialEq)] pub enum CoroutineKind { Desugared(CoroutineDesugaring, CoroutineSource), - Coroutine, + Coroutine(Movability), } #[derive(Copy, Clone, Debug, Eq, PartialEq)] @@ -662,6 +662,7 @@ pub enum AggregateKind { Tuple, Adt(AdtDef, VariantIdx, GenericArgs, Option, Option), Closure(ClosureDef, GenericArgs), + // FIXME(stable_mir): Movability here is redundant Coroutine(CoroutineDef, GenericArgs, Movability), } diff --git a/compiler/stable_mir/src/ty.rs b/compiler/stable_mir/src/ty.rs index 1d4d7b6d3520f..9e6ecbe8315b5 100644 --- a/compiler/stable_mir/src/ty.rs +++ b/compiler/stable_mir/src/ty.rs @@ -460,6 +460,7 @@ pub enum RigidTy { FnDef(FnDef, GenericArgs), FnPtr(PolyFnSig), Closure(ClosureDef, GenericArgs), + // FIXME(stable_mir): Movability here is redundant Coroutine(CoroutineDef, GenericArgs, Movability), Dynamic(Vec>, Region, DynKind), Never, diff --git a/config.example.toml b/config.example.toml index 4cf7c1e81990c..f1ea6bac3ca16 100644 --- a/config.example.toml +++ b/config.example.toml @@ -30,7 +30,7 @@ # # If `change-id` does not match the version that is currently running, # `x.py` will inform you about the changes made on bootstrap. -# change-id = +#change-id = # ============================================================================= # Tweaking how LLVM is compiled @@ -661,7 +661,7 @@ # Indicates whether some LLVM tools, like llvm-objdump, will be made available in the # sysroot. -#llvm-tools = false +#llvm-tools = true # Whether to deny warnings in crates #deny-warnings = true diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index 99ec68f5aa53d..74fa30456eb95 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -284,7 +284,7 @@ impl RawVec { /// /// # Panics /// - /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// Panics if the new capacity exceeds `isize::MAX` _bytes_. /// /// # Aborts /// @@ -342,7 +342,7 @@ impl RawVec { /// /// # Panics /// - /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// Panics if the new capacity exceeds `isize::MAX` _bytes_. /// /// # Aborts /// diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 59f3a50ddb722..263b1449de156 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -2778,7 +2778,7 @@ impl Weak { } } -pub(crate) fn is_dangling(ptr: *mut T) -> bool { +pub(crate) fn is_dangling(ptr: *const T) -> bool { (ptr.cast::<()>()).addr() == usize::MAX } @@ -3003,7 +3003,7 @@ impl Weak { pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { // See Weak::as_ptr for context on how the input pointer is derived. - let ptr = if is_dangling(ptr as *mut T) { + let ptr = if is_dangling(ptr) { // This is a dangling Weak. ptr as *mut RcBox } else { diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 85df491636ab7..5273b3cb2dafa 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -2722,7 +2722,7 @@ impl Weak { pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { // See Weak::as_ptr for context on how the input pointer is derived. - let ptr = if is_dangling(ptr as *mut T) { + let ptr = if is_dangling(ptr) { // This is a dangling Weak. ptr as *mut ArcInner } else { @@ -2917,20 +2917,17 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { - let inner = if let Some(inner) = self.inner() { - inner - } else { - return Weak { ptr: self.ptr, alloc: self.alloc.clone() }; - }; - // See comments in Arc::clone() for why this is relaxed. This can use a - // fetch_add (ignoring the lock) because the weak count is only locked - // where are *no other* weak pointers in existence. (So we can't be - // running this code in that case). - let old_size = inner.weak.fetch_add(1, Relaxed); - - // See comments in Arc::clone() for why we do this (for mem::forget). - if old_size > MAX_REFCOUNT { - abort(); + if let Some(inner) = self.inner() { + // See comments in Arc::clone() for why this is relaxed. This can use a + // fetch_add (ignoring the lock) because the weak count is only locked + // where are *no other* weak pointers in existence. (So we can't be + // running this code in that case). + let old_size = inner.weak.fetch_add(1, Relaxed); + + // See comments in Arc::clone() for why we do this (for mem::forget). + if old_size > MAX_REFCOUNT { + abort(); + } } Weak { ptr: self.ptr, alloc: self.alloc.clone() } diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index fca85c6123b3f..e8a096cac869e 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -445,7 +445,7 @@ impl Vec { /// /// # Panics /// - /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// Panics if the new capacity exceeds `isize::MAX` _bytes_. /// /// # Examples /// @@ -633,7 +633,7 @@ impl Vec { /// /// # Panics /// - /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// Panics if the new capacity exceeds `isize::MAX` _bytes_. /// /// # Examples /// @@ -896,7 +896,7 @@ impl Vec { /// /// # Panics /// - /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// Panics if the new capacity exceeds `isize::MAX` _bytes_. /// /// # Examples /// @@ -926,7 +926,7 @@ impl Vec { /// /// # Panics /// - /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// Panics if the new capacity exceeds `isize::MAX` _bytes_. /// /// # Examples /// @@ -1900,7 +1900,7 @@ impl Vec { /// /// # Panics /// - /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// Panics if the new capacity exceeds `isize::MAX` _bytes_. /// /// # Examples /// @@ -2003,7 +2003,7 @@ impl Vec { /// /// # Panics /// - /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// Panics if the new capacity exceeds `isize::MAX` _bytes_. /// /// # Examples /// diff --git a/library/core/src/hint.rs b/library/core/src/hint.rs index ff177c70d39c9..4a73f160b6568 100644 --- a/library/core/src/hint.rs +++ b/library/core/src/hint.rs @@ -106,6 +106,54 @@ pub const unsafe fn unreachable_unchecked() -> ! { } } +/// Makes a *soundness* promise to the compiler that `cond` holds. +/// +/// This may allow the optimizer to simplify things, +/// but it might also make the generated code slower. +/// Either way, calling it will most likely make compilation take longer. +/// +/// This is a situational tool for micro-optimization, and is allowed to do nothing. +/// Any use should come with a repeatable benchmark to show the value +/// and allow removing it later should the optimizer get smarter and no longer need it. +/// +/// The more complicated the condition the less likely this is to be fruitful. +/// For example, `assert_unchecked(foo.is_sorted())` is a complex enough value +/// that the compiler is unlikely to be able to take advantage of it. +/// +/// There's also no need to `assert_unchecked` basic properties of things. For +/// example, the compiler already knows the range of `count_ones`, so there's no +/// benefit to `let n = u32::count_ones(x); assert_unchecked(n <= u32::BITS);`. +/// +/// If ever you're tempted to write `assert_unchecked(false)`, then you're +/// actually looking for [`unreachable_unchecked()`]. +/// +/// You may know this from other places +/// as [`llvm.assume`](https://llvm.org/docs/LangRef.html#llvm-assume-intrinsic) +/// or [`__builtin_assume`](https://clang.llvm.org/docs/LanguageExtensions.html#builtin-assume). +/// +/// This promotes a correctness requirement to a soundness requirement. +/// Don't do that without very good reason. +/// +/// # Safety +/// +/// `cond` must be `true`. It's immediate UB to call this with `false`. +/// +#[inline(always)] +#[doc(alias = "assume")] +#[track_caller] +#[unstable(feature = "hint_assert_unchecked", issue = "119131")] +#[rustc_const_unstable(feature = "const_hint_assert_unchecked", issue = "119131")] +pub const unsafe fn assert_unchecked(cond: bool) { + // SAFETY: The caller promised `cond` is true. + unsafe { + intrinsics::assert_unsafe_precondition!( + "hint::assert_unchecked must never be called when the condition is false", + (cond: bool) => cond, + ); + crate::intrinsics::assume(cond); + } +} + /// Emits a machine instruction to signal the processor that it is running in /// a busy-wait spin-loop ("spin lock"). /// diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs index 5107ba1a9e1be..031c8d9984cf3 100644 --- a/library/core/src/intrinsics.rs +++ b/library/core/src/intrinsics.rs @@ -2534,7 +2534,7 @@ extern "rust-intrinsic" { /// the occasional mistake, and this check should help them figure things out. #[allow_internal_unstable(const_eval_select)] // permit this to be called in stably-const fn macro_rules! assert_unsafe_precondition { - ($name:expr, $([$($tt:tt)*])?($($i:ident:$ty:ty),*$(,)?) => $e:expr) => { + ($name:expr, $([$($tt:tt)*])?($($i:ident:$ty:ty),*$(,)?) => $e:expr $(,)?) => { if cfg!(debug_assertions) { // allow non_snake_case to allow capturing const generics #[allow(non_snake_case)] diff --git a/library/core/src/intrinsics/mir.rs b/library/core/src/intrinsics/mir.rs index 34a61e76fcf66..c6401ec1e3333 100644 --- a/library/core/src/intrinsics/mir.rs +++ b/library/core/src/intrinsics/mir.rs @@ -104,21 +104,22 @@ //! } //! //! #[custom_mir(dialect = "runtime", phase = "optimized")] +#![cfg_attr(bootstrap, doc = "#[cfg(any())]")] // disable the following function in doctests when `bootstrap` is set //! fn push_and_pop(v: &mut Vec, value: T) { //! mir!( //! let _unused; //! let popped; //! //! { -//! Call(_unused = Vec::push(v, value), pop, UnwindContinue()) +//! Call(_unused = Vec::push(v, value), ReturnTo(pop), UnwindContinue()) //! } //! //! pop = { -//! Call(popped = Vec::pop(v), drop, UnwindContinue()) +//! Call(popped = Vec::pop(v), ReturnTo(drop), UnwindContinue()) //! } //! //! drop = { -//! Drop(popped, ret, UnwindContinue()) +//! Drop(popped, ReturnTo(ret), UnwindContinue()) //! } //! //! ret = { @@ -242,9 +243,8 @@ //! - `match some_int_operand` becomes a `SwitchInt`. Each arm should be `literal => basic_block` //! - The exception is the last arm, which must be `_ => basic_block` and corresponds to the //! otherwise branch. -//! - [`Call`] has an associated function as well. The third argument of this function is a normal -//! function call expression, for example `my_other_function(a, 5)`. -//! +//! - [`Call`] has an associated function as well, with special syntax: +//! `Call(ret_val = function(arg1, arg2, ...), ReturnTo(next_block), UnwindContinue())`. #![unstable( feature = "custom_mir", @@ -287,35 +287,68 @@ macro_rules! define { } // Unwind actions +pub struct UnwindActionArg; define!( "mir_unwind_continue", /// An unwind action that continues unwinding. - fn UnwindContinue() + fn UnwindContinue() -> UnwindActionArg ); define!( "mir_unwind_unreachable", /// An unwind action that triggers undefined behaviour. - fn UnwindUnreachable() -> BasicBlock + fn UnwindUnreachable() -> UnwindActionArg ); define!( "mir_unwind_terminate", /// An unwind action that terminates the execution. /// /// `UnwindTerminate` can also be used as a terminator. - fn UnwindTerminate(reason: UnwindTerminateReason) + fn UnwindTerminate(reason: UnwindTerminateReason) -> UnwindActionArg ); define!( "mir_unwind_cleanup", /// An unwind action that continues execution in a given basic blok. - fn UnwindCleanup(goto: BasicBlock) + fn UnwindCleanup(goto: BasicBlock) -> UnwindActionArg ); +// Return destination for `Call` +pub struct ReturnToArg; +define!("mir_return_to", fn ReturnTo(goto: BasicBlock) -> ReturnToArg); + // Terminators define!("mir_return", fn Return() -> BasicBlock); define!("mir_goto", fn Goto(destination: BasicBlock) -> BasicBlock); define!("mir_unreachable", fn Unreachable() -> BasicBlock); -define!("mir_drop", fn Drop(place: T, goto: BasicBlock, unwind_action: U)); -define!("mir_call", fn Call(call: (), goto: BasicBlock, unwind_action: U)); +define!("mir_drop", + /// Drop the contents of a place. + /// + /// The first argument must be a place. + /// + /// The second argument must be of the form `ReturnTo(bb)`, where `bb` is the basic block that + /// will be jumped to after the destructor returns. + /// + /// The third argument describes what happens on unwind. It can be one of: + /// - [`UnwindContinue`] + /// - [`UnwindUnreachable`] + /// - [`UnwindTerminate`] + /// - [`UnwindCleanup`] + fn Drop(place: T, goto: ReturnToArg, unwind_action: UnwindActionArg) +); +define!("mir_call", + /// Call a function. + /// + /// The first argument must be of the form `ret_val = fun(arg1, arg2, ...)`. + /// + /// The second argument must be of the form `ReturnTo(bb)`, where `bb` is the basic block that + /// will be jumped to after the function returns. + /// + /// The third argument describes what happens on unwind. It can be one of: + /// - [`UnwindContinue`] + /// - [`UnwindUnreachable`] + /// - [`UnwindTerminate`] + /// - [`UnwindCleanup`] + fn Call(call: (), goto: ReturnToArg, unwind_action: UnwindActionArg) +); define!("mir_unwind_resume", /// A terminator that resumes the unwinding. fn UnwindResume() diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index c1687abb7cb9c..407954001e4ce 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -1395,8 +1395,18 @@ impl SizedTypeProperties for T {} /// /// assert_eq!(mem::offset_of!(Option<&u8>, Some.0), 0); /// ``` +#[cfg(not(bootstrap))] #[unstable(feature = "offset_of", issue = "106655")] #[allow_internal_unstable(builtin_syntax, hint_must_use)] +pub macro offset_of($Container:ty, $($fields:expr)+ $(,)?) { + // The `{}` is for better error messages + crate::hint::must_use({builtin # offset_of($Container, $($fields)+)}) +} + +#[cfg(bootstrap)] +#[unstable(feature = "offset_of", issue = "106655")] +#[allow_internal_unstable(builtin_syntax, hint_must_use)] +#[allow(missing_docs)] pub macro offset_of($Container:ty, $($fields:tt).+ $(,)?) { // The `{}` is for better error messages crate::hint::must_use({builtin # offset_of($Container, $($fields).+)}) diff --git a/library/core/src/net/ip_addr.rs b/library/core/src/net/ip_addr.rs index 8bf15c736c34b..1ef876a3163c9 100644 --- a/library/core/src/net/ip_addr.rs +++ b/library/core/src/net/ip_addr.rs @@ -407,7 +407,7 @@ impl IpAddr { } /// Converts this address to an `IpAddr::V4` if it is an IPv4-mapped IPv6 addresses, otherwise it - /// return `self` as-is. + /// returns `self` as-is. /// /// # Examples /// diff --git a/library/core/src/primitive_docs.rs b/library/core/src/primitive_docs.rs index 99208fba67059..bd2851a26fb8c 100644 --- a/library/core/src/primitive_docs.rs +++ b/library/core/src/primitive_docs.rs @@ -1348,7 +1348,6 @@ mod prim_usize {} /// * [`Fn`] \(in addition, `&T` references get [`FnMut`] and [`FnOnce`] if `T: Fn`) /// * [`Hash`] /// * [`ToSocketAddrs`] -/// * [`Send`] \(`&T` references also require T: [Sync]) /// * [`Sync`] /// /// [`std::fmt`]: fmt @@ -1366,6 +1365,7 @@ mod prim_usize {} /// * [`ExactSizeIterator`] /// * [`FusedIterator`] /// * [`TrustedLen`] +/// * [`Send`] /// * [`io::Write`] /// * [`Read`] /// * [`Seek`] @@ -1378,6 +1378,8 @@ mod prim_usize {} /// [`Read`]: ../std/io/trait.Read.html /// [`io::Write`]: ../std/io/trait.Write.html /// +/// In addition, `&T` references implement [`Send`] if and only if `T` implements [`Sync`]. +/// /// Note that due to method call deref coercion, simply calling a trait method will act like they /// work on references as well as they do on owned values! The implementations described here are /// meant for generic contexts, where the final type `T` is a type parameter or otherwise not diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index fc54ea2377096..3d58afd26eacc 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -458,8 +458,12 @@ where match self.v.iter().position(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { - let ret = Some(&self.v[..idx]); - self.v = &self.v[idx + 1..]; + let (left, right) = + // SAFETY: if v.iter().position returns Some(idx), that + // idx is definitely a valid index for v + unsafe { (self.v.get_unchecked(..idx), self.v.get_unchecked(idx + 1..)) }; + let ret = Some(left); + self.v = right; ret } } @@ -491,8 +495,12 @@ where match self.v.iter().rposition(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { - let ret = Some(&self.v[idx + 1..]); - self.v = &self.v[..idx]; + let (left, right) = + // SAFETY: if v.iter().rposition returns Some(idx), then + // idx is definitely a valid index for v + unsafe { (self.v.get_unchecked(..idx), self.v.get_unchecked(idx + 1..)) }; + let ret = Some(right); + self.v = left; ret } } diff --git a/library/std/src/fs.rs b/library/std/src/fs.rs index 38050cf684f0d..c4a92927937a1 100644 --- a/library/std/src/fs.rs +++ b/library/std/src/fs.rs @@ -31,6 +31,10 @@ use crate::time::SystemTime; /// on closing are ignored by the implementation of `Drop`. Use the method /// [`sync_all`] if these errors must be manually handled. /// +/// `File` does not buffer reads and writes. For efficiency, consider wrapping the +/// file in a [`BufReader`] or [`BufWriter`] when performing many small [`read`] +/// or [`write`] calls, unless unbuffered reads and writes are required. +/// /// # Examples /// /// Creates a new file and write bytes to it (you can also use [`write()`]): @@ -61,8 +65,7 @@ use crate::time::SystemTime; /// } /// ``` /// -/// It can be more efficient to read the contents of a file with a buffered -/// [`Read`]er. This can be accomplished with [`BufReader`]: +/// Using a buffered [`Read`]er: /// /// ```no_run /// use std::fs::File; @@ -93,8 +96,11 @@ use crate::time::SystemTime; /// perform synchronous I/O operations. Therefore the underlying file must not /// have been opened for asynchronous I/O (e.g. by using `FILE_FLAG_OVERLAPPED`). /// -/// [`BufReader`]: io::BufReader +/// [`BufReader`]: io::BufReader +/// [`BufWriter`]: io::BufReader /// [`sync_all`]: File::sync_all +/// [`write`]: File::write +/// [`read`]: File::read #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "File")] pub struct File { diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 6365366297c43..95ee6a9b29c9c 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -265,6 +265,7 @@ // // Language features: // tidy-alphabetical-start +#![cfg_attr(not(bootstrap), feature(cfg_sanitizer_cfi))] #![feature(alloc_error_handler)] #![feature(allocator_internals)] #![feature(allow_internal_unsafe)] diff --git a/library/std/src/os/unix/fs.rs b/library/std/src/os/unix/fs.rs index 0eb4e88cfad96..e995d5133f8a7 100644 --- a/library/std/src/os/unix/fs.rs +++ b/library/std/src/os/unix/fs.rs @@ -68,7 +68,7 @@ pub trait FileExt { io::default_read_vectored(|b| self.read_at(b, offset), bufs) } - /// Reads the exact number of byte required to fill `buf` from the given offset. + /// Reads the exact number of bytes required to fill `buf` from the given offset. /// /// The offset is relative to the start of the file and thus independent /// from the current cursor. diff --git a/library/std/src/os/unix/net/listener.rs b/library/std/src/os/unix/net/listener.rs index 5be8aebc70fd5..1b70b669c7790 100644 --- a/library/std/src/os/unix/net/listener.rs +++ b/library/std/src/os/unix/net/listener.rs @@ -73,8 +73,18 @@ impl UnixListener { unsafe { let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_STREAM)?; let (addr, len) = sockaddr_un(path.as_ref())?; - const backlog: libc::c_int = - if cfg!(any(target_os = "linux", target_os = "freebsd")) { -1 } else { 128 }; + #[cfg(any(target_os = "windows", target_os = "redox"))] + const backlog: libc::c_int = 128; + #[cfg(any(target_os = "linux", target_os = "freebsd", target_os = "openbsd"))] + const backlog: libc::c_int = -1; + #[cfg(not(any( + target_os = "windows", + target_os = "redox", + target_os = "linux", + target_os = "freebsd", + target_os = "openbsd" + )))] + const backlog: libc::c_int = libc::SOMAXCONN; cvt(libc::bind(inner.as_inner().as_raw_fd(), &addr as *const _ as *const _, len as _))?; cvt(libc::listen(inner.as_inner().as_raw_fd(), backlog))?; diff --git a/library/std/src/sys/unix/thread.rs b/library/std/src/sys/unix/thread.rs index 76b96bb37df1c..7e4a01a5ecd33 100644 --- a/library/std/src/sys/unix/thread.rs +++ b/library/std/src/sys/unix/thread.rs @@ -354,7 +354,12 @@ pub fn available_parallelism() -> io::Result { Ok(unsafe { NonZeroUsize::new_unchecked(count) }) } } - } else if #[cfg(any(target_os = "freebsd", target_os = "dragonfly", target_os = "netbsd"))] { + } else if #[cfg(any( + target_os = "freebsd", + target_os = "dragonfly", + target_os = "openbsd", + target_os = "netbsd", + ))] { use crate::ptr; #[cfg(target_os = "freebsd")] @@ -427,31 +432,6 @@ pub fn available_parallelism() -> io::Result { return Err(io::const_io_error!(io::ErrorKind::NotFound, "The number of hardware threads is not known for the target platform")); } } - Ok(unsafe { NonZeroUsize::new_unchecked(cpus as usize) }) - } else if #[cfg(target_os = "openbsd")] { - use crate::ptr; - - let mut cpus: libc::c_uint = 0; - let mut cpus_size = crate::mem::size_of_val(&cpus); - let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0]; - - let res = unsafe { - libc::sysctl( - mib.as_mut_ptr(), - 2, - &mut cpus as *mut _ as *mut _, - &mut cpus_size as *mut _ as *mut _, - ptr::null_mut(), - 0, - ) - }; - - // Handle errors if any. - if res == -1 { - return Err(io::Error::last_os_error()); - } else if cpus == 0 { - return Err(io::const_io_error!(io::ErrorKind::NotFound, "The number of hardware threads is not known for the target platform")); - } Ok(unsafe { NonZeroUsize::new_unchecked(cpus as usize) }) } else if #[cfg(target_os = "nto")] { diff --git a/library/std/src/sys/unix/thread_local_dtor.rs b/library/std/src/sys/unix/thread_local_dtor.rs index ac85531c372ea..58f7ab84101ae 100644 --- a/library/std/src/sys/unix/thread_local_dtor.rs +++ b/library/std/src/sys/unix/thread_local_dtor.rs @@ -11,7 +11,7 @@ // Note, however, that we run on lots older linuxes, as well as cross // compiling from a newer linux to an older linux, so we also have a // fallback implementation to use as well. -#[allow(unexpected_cfgs)] +#[cfg_attr(bootstrap, allow(unexpected_cfgs))] #[cfg(any( target_os = "linux", target_os = "android", diff --git a/library/std/src/thread/local.rs b/library/std/src/thread/local.rs index def94acd45727..9cf37b0e6347c 100644 --- a/library/std/src/thread/local.rs +++ b/library/std/src/thread/local.rs @@ -16,7 +16,8 @@ use crate::fmt; /// /// This key uses the fastest possible implementation available to it for the /// target platform. It is instantiated with the [`thread_local!`] macro and the -/// primary method is the [`with`] method. +/// primary method is the [`with`] method, though there are helpers to make +/// working with [`Cell`] types easier. /// /// The [`with`] method yields a reference to the contained value which cannot /// outlive the current thread or escape the given closure. @@ -25,14 +26,30 @@ use crate::fmt; /// /// # Initialization and Destruction /// -/// Initialization is dynamically performed on the first call to [`with`] -/// within a thread, and values that implement [`Drop`] get destructed when a -/// thread exits. Some caveats apply, which are explained below. +/// Initialization is dynamically performed on the first call to a setter (e.g. +/// [`with`]) within a thread, and values that implement [`Drop`] get +/// destructed when a thread exits. Some caveats apply, which are explained below. /// /// A `LocalKey`'s initializer cannot recursively depend on itself. Using a /// `LocalKey` in this way may cause panics, aborts or infinite recursion on /// the first call to `with`. /// +/// # Single-thread Synchronization +/// +/// Though there is no potential race with other threads, it is still possible to +/// obtain multiple references to the thread-local data in different places on +/// the call stack. For this reason, only shared (`&T`) references may be obtained. +/// +/// To allow obtaining an exclusive mutable reference (`&mut T`), typically a +/// [`Cell`] or [`RefCell`] is used (see the [`std::cell`] for more information +/// on how exactly this works). To make this easier there are specialized +/// implementations for [`LocalKey>`] and [`LocalKey>`]. +/// +/// [`std::cell`]: `crate::cell` +/// [`LocalKey>`]: struct.LocalKey.html#impl-LocalKey> +/// [`LocalKey>`]: struct.LocalKey.html#impl-LocalKey> +/// +/// /// # Examples /// /// ``` @@ -41,26 +58,20 @@ use crate::fmt; /// /// thread_local!(static FOO: RefCell = RefCell::new(1)); /// -/// FOO.with(|f| { -/// assert_eq!(*f.borrow(), 1); -/// *f.borrow_mut() = 2; -/// }); +/// FOO.with_borrow(|v| assert_eq!(*v, 1)); +/// FOO.with_borrow_mut(|v| *v = 2); /// /// // each thread starts out with the initial value of 1 /// let t = thread::spawn(move|| { -/// FOO.with(|f| { -/// assert_eq!(*f.borrow(), 1); -/// *f.borrow_mut() = 3; -/// }); +/// FOO.with_borrow(|v| assert_eq!(*v, 1)); +/// FOO.with_borrow_mut(|v| *v = 3); /// }); /// /// // wait for the thread to complete and bail out on panic /// t.join().unwrap(); /// /// // we retain our original value of 2 despite the child thread -/// FOO.with(|f| { -/// assert_eq!(*f.borrow(), 2); -/// }); +/// FOO.with_borrow(|v| assert_eq!(*v, 2)); /// ``` /// /// # Platform-specific behavior @@ -137,10 +148,13 @@ impl fmt::Debug for LocalKey { /// static BAR: RefCell = RefCell::new(1.0); /// } /// -/// FOO.with(|foo| assert_eq!(*foo.borrow(), 1)); -/// BAR.with(|bar| assert_eq!(*bar.borrow(), 1.0)); +/// FOO.with_borrow(|v| assert_eq!(*v, 1)); +/// BAR.with_borrow(|v| assert_eq!(*v, 1.0)); /// ``` /// +/// Note that only shared references (`&T`) to the inner data may be obtained, so a +/// type such as [`Cell`] or [`RefCell`] is typically used to allow mutating access. +/// /// This macro supports a special `const {}` syntax that can be used /// when the initialization expression can be evaluated as a constant. /// This can enable a more efficient thread local implementation that @@ -155,7 +169,7 @@ impl fmt::Debug for LocalKey { /// pub static FOO: Cell = const { Cell::new(1) }; /// } /// -/// FOO.with(|foo| assert_eq!(foo.get(), 1)); +/// assert_eq!(FOO.get(), 1); /// ``` /// /// See [`LocalKey` documentation][`std::thread::LocalKey`] for more diff --git a/library/test/Cargo.toml b/library/test/Cargo.toml index 91a1abde059f6..92c535501bf9c 100644 --- a/library/test/Cargo.toml +++ b/library/test/Cargo.toml @@ -3,9 +3,6 @@ name = "test" version = "0.0.0" edition = "2021" -[lib] -crate-type = ["dylib", "rlib"] - [dependencies] getopts = { version = "0.2.21", features = ['rustc-dep-of-std'] } std = { path = "../std" } diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock index 63190fc318020..95553e7ea0254 100644 --- a/src/bootstrap/Cargo.lock +++ b/src/bootstrap/Cargo.lock @@ -246,9 +246,9 @@ checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" [[package]] name = "errno" -version = "0.3.5" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ "libc", "windows-sys", @@ -256,9 +256,9 @@ dependencies = [ [[package]] name = "fd-lock" -version = "3.0.13" +version = "4.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" +checksum = "7e5768da2206272c81ef0b5e951a41862938a6070da63bcea197899942d3b947" dependencies = [ "cfg-if", "rustix", @@ -369,9 +369,9 @@ checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" [[package]] name = "linux-raw-sys" -version = "0.4.10" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" [[package]] name = "log" @@ -530,9 +530,9 @@ checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" [[package]] name = "rustix" -version = "0.38.19" +version = "0.38.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed" +checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" dependencies = [ "bitflags 2.4.1", "errno", @@ -620,9 +620,9 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.30.0" +version = "0.30.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c68492e7268037de59ae153d7efb79546cf94a18a9548235420d3d8d2436b4b1" +checksum = "ba2dbd2894d23b2d78dae768d85e323b557ac3ac71a5d917a31536d8f77ebada" dependencies = [ "cfg-if", "core-foundation-sys", @@ -733,9 +733,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" -version = "0.51.1" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca229916c5ee38c2f2bc1e9d8f04df975b4bd93f9955dc69fabb5d91270045c9" +checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" dependencies = [ "windows-core", "windows-targets", @@ -743,27 +743,27 @@ dependencies = [ [[package]] name = "windows-core" -version = "0.51.1" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ "windows-targets", ] [[package]] name = "windows-sys" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" -version = "0.48.5" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", @@ -776,45 +776,45 @@ dependencies = [ [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.5" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" [[package]] name = "windows_aarch64_msvc" -version = "0.48.5" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" [[package]] name = "windows_i686_gnu" -version = "0.48.5" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" [[package]] name = "windows_i686_msvc" -version = "0.48.5" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" [[package]] name = "windows_x86_64_gnu" -version = "0.48.5" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.5" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" [[package]] name = "windows_x86_64_msvc" -version = "0.48.5" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" [[package]] name = "xattr" diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml index 225eccca40f77..8e09f216d7493 100644 --- a/src/bootstrap/Cargo.toml +++ b/src/bootstrap/Cargo.toml @@ -38,6 +38,7 @@ cc = "1.0.69" clap = { version = "4.4.7", default-features = false, features = ["std", "usage", "help", "derive", "error-context"] } clap_complete = "4.4.3" cmake = "0.1.38" +fd-lock = "4.0" filetime = "0.2" home = "0.5.4" ignore = "0.4.10" @@ -59,17 +60,13 @@ walkdir = "2" xz2 = "0.1" # Dependencies needed by the build-metrics feature -sysinfo = { version = "0.30.0", optional = true } - -# Solaris doesn't support flock() and thus fd-lock is not option now -[target.'cfg(not(target_os = "solaris"))'.dependencies] -fd-lock = "3.0.13" +sysinfo = { version = "0.30", optional = true } [target.'cfg(windows)'.dependencies.junction] version = "1.0.0" [target.'cfg(windows)'.dependencies.windows] -version = "0.51.1" +version = "0.52" features = [ "Win32_Foundation", "Win32_Security", diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py index 544a42d9ada1a..d34c19a47e3fb 100755 --- a/src/bootstrap/configure.py +++ b/src/bootstrap/configure.py @@ -55,7 +55,6 @@ def v(*args): o("full-tools", None, "enable all tools") o("lld", "rust.lld", "build lld") o("clang", "llvm.clang", "build clang") -o("missing-tools", "dist.missing-tools", "allow failures when building tools") o("use-libcxx", "llvm.use-libcxx", "build LLVM with libc++") o("control-flow-guard", "rust.control-flow-guard", "Enable Control Flow Guard") o("patch-binaries-for-nix", "build.patch-binaries-for-nix", "whether patch binaries for usage with Nix toolchains") diff --git a/src/bootstrap/src/bin/main.rs b/src/bootstrap/src/bin/main.rs index b1ab8dae535db..b97f73aa65275 100644 --- a/src/bootstrap/src/bin/main.rs +++ b/src/bootstrap/src/bin/main.rs @@ -6,7 +6,6 @@ //! directory in each respective module. use std::io::Write; -#[cfg(all(any(unix, windows), not(target_os = "solaris")))] use std::process; use std::{ env, @@ -22,43 +21,35 @@ fn main() { let args = env::args().skip(1).collect::>(); let config = Config::parse(&args); - #[cfg(all(any(unix, windows), not(target_os = "solaris")))] let mut build_lock; - #[cfg(all(any(unix, windows), not(target_os = "solaris")))] let _build_lock_guard; if !config.bypass_bootstrap_lock { // Display PID of process holding the lock // PID will be stored in a lock file - #[cfg(all(any(unix, windows), not(target_os = "solaris")))] - { - let path = config.out.join("lock"); - let pid = match fs::read_to_string(&path) { - Ok(contents) => contents, - Err(_) => String::new(), - }; - - build_lock = fd_lock::RwLock::new(t!(fs::OpenOptions::new() - .write(true) - .create(true) - .open(&path))); - _build_lock_guard = match build_lock.try_write() { - Ok(mut lock) => { - t!(lock.write(&process::id().to_string().as_ref())); - lock - } - err => { - drop(err); - println!("WARNING: build directory locked by process {pid}, waiting for lock"); - let mut lock = t!(build_lock.write()); - t!(lock.write(&process::id().to_string().as_ref())); - lock - } - }; - } - - #[cfg(any(not(any(unix, windows)), target_os = "solaris"))] - println!("WARNING: file locking not supported for target, not locking build directory"); + let lock_path = config.out.join("lock"); + let pid = match fs::read_to_string(&lock_path) { + Ok(contents) => contents, + Err(_) => String::new(), + }; + + build_lock = fd_lock::RwLock::new(t!(fs::OpenOptions::new() + .write(true) + .create(true) + .open(&lock_path))); + _build_lock_guard = match build_lock.try_write() { + Ok(mut lock) => { + t!(lock.write(&process::id().to_string().as_ref())); + lock + } + err => { + drop(err); + println!("WARNING: build directory locked by process {pid}, waiting for lock"); + let mut lock = t!(build_lock.write()); + t!(lock.write(&process::id().to_string().as_ref())); + lock + } + }; } // check_version warnings are not printed during setup @@ -158,25 +149,27 @@ fn check_version(config: &Config) -> Option { let changes = find_recent_config_change_ids(id); - if !changes.is_empty() { - msg.push_str("There have been changes to x.py since you last updated:\n"); + if changes.is_empty() { + return None; + } - for change in changes { - msg.push_str(&format!(" [{}] {}\n", change.severity.to_string(), change.summary)); - msg.push_str(&format!( - " - PR Link https://github.com/rust-lang/rust/pull/{}\n", - change.change_id - )); - } + msg.push_str("There have been changes to x.py since you last updated:\n"); - msg.push_str("NOTE: to silence this warning, "); + for change in changes { + msg.push_str(&format!(" [{}] {}\n", change.severity.to_string(), change.summary)); msg.push_str(&format!( - "update `config.toml` to use `change-id = {latest_change_id}` instead" + " - PR Link https://github.com/rust-lang/rust/pull/{}\n", + change.change_id )); + } - if io::stdout().is_terminal() && !config.dry_run() { - t!(fs::write(warned_id_path, latest_change_id.to_string())); - } + msg.push_str("NOTE: to silence this warning, "); + msg.push_str(&format!( + "update `config.toml` to use `change-id = {latest_change_id}` instead" + )); + + if io::stdout().is_terminal() && !config.dry_run() { + t!(fs::write(warned_id_path, latest_change_id.to_string())); } } else { msg.push_str("WARNING: The `change-id` is missing in the `config.toml`. This means that you will not be able to track the major changes made to the bootstrap configurations.\n"); diff --git a/src/bootstrap/src/core/build_steps/check.rs b/src/bootstrap/src/core/build_steps/check.rs index ecaaf91aec12f..f1f2f28909dee 100644 --- a/src/bootstrap/src/core/build_steps/check.rs +++ b/src/bootstrap/src/core/build_steps/check.rs @@ -386,7 +386,7 @@ impl Step for RustAnalyzer { cargo_subcommand(builder.kind), "src/tools/rust-analyzer", SourceType::InTree, - &["rust-analyzer/in-rust-tree".to_owned()], + &["in-rust-tree".to_owned()], ); cargo.allow_features(crate::core::build_steps::tool::RustAnalyzer::ALLOW_FEATURES); diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index df4d1a43dabc7..d699c4fe536c5 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -274,7 +274,7 @@ fn copy_third_party_objects( ) -> Vec<(PathBuf, DependencyType)> { let mut target_deps = vec![]; - if builder.config.sanitizers_enabled(target) && compiler.stage != 0 { + if builder.config.needs_sanitizer_runtime_built(target) && compiler.stage != 0 { // The sanitizers are only copied in stage1 or above, // to avoid creating dependency on LLVM. target_deps.extend( @@ -1738,7 +1738,7 @@ impl Step for Assemble { if builder.config.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) { let llvm::LlvmResult { llvm_config, .. } = builder.ensure(llvm::Llvm { target: target_compiler.host }); - if !builder.config.dry_run() { + if !builder.config.dry_run() && builder.config.llvm_tools_enabled { let llvm_bin_dir = output(Command::new(llvm_config).arg("--bindir")); let llvm_bin_dir = Path::new(llvm_bin_dir.trim()); diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index 98e267713daf7..d87651cb367e6 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -1110,9 +1110,7 @@ impl Step for Rls { let compiler = self.compiler; let target = self.target; - let rls = builder - .ensure(tool::Rls { compiler, target, extra_features: Vec::new() }) - .expect("rls expected to build"); + let rls = builder.ensure(tool::Rls { compiler, target, extra_features: Vec::new() }); let mut tarball = Tarball::new(builder, "rls", &target.triple); tarball.set_overlay(OverlayKind::RLS); @@ -1154,9 +1152,7 @@ impl Step for RustAnalyzer { let compiler = self.compiler; let target = self.target; - let rust_analyzer = builder - .ensure(tool::RustAnalyzer { compiler, target }) - .expect("rust-analyzer always builds"); + let rust_analyzer = builder.ensure(tool::RustAnalyzer { compiler, target }); let mut tarball = Tarball::new(builder, "rust-analyzer", &target.triple); tarball.set_overlay(OverlayKind::RustAnalyzer); @@ -1201,12 +1197,9 @@ impl Step for Clippy { // Prepare the image directory // We expect clippy to build, because we've exited this step above if tool // state for clippy isn't testing. - let clippy = builder - .ensure(tool::Clippy { compiler, target, extra_features: Vec::new() }) - .expect("clippy expected to build - essential tool"); - let cargoclippy = builder - .ensure(tool::CargoClippy { compiler, target, extra_features: Vec::new() }) - .expect("clippy expected to build - essential tool"); + let clippy = builder.ensure(tool::Clippy { compiler, target, extra_features: Vec::new() }); + let cargoclippy = + builder.ensure(tool::CargoClippy { compiler, target, extra_features: Vec::new() }); let mut tarball = Tarball::new(builder, "clippy", &target.triple); tarball.set_overlay(OverlayKind::Clippy); @@ -1255,9 +1248,9 @@ impl Step for Miri { let compiler = self.compiler; let target = self.target; - let miri = builder.ensure(tool::Miri { compiler, target, extra_features: Vec::new() })?; + let miri = builder.ensure(tool::Miri { compiler, target, extra_features: Vec::new() }); let cargomiri = - builder.ensure(tool::CargoMiri { compiler, target, extra_features: Vec::new() })?; + builder.ensure(tool::CargoMiri { compiler, target, extra_features: Vec::new() }); let mut tarball = Tarball::new(builder, "miri", &target.triple); tarball.set_overlay(OverlayKind::Miri); @@ -1396,12 +1389,10 @@ impl Step for Rustfmt { let compiler = self.compiler; let target = self.target; - let rustfmt = builder - .ensure(tool::Rustfmt { compiler, target, extra_features: Vec::new() }) - .expect("rustfmt expected to build - essential tool"); - let cargofmt = builder - .ensure(tool::Cargofmt { compiler, target, extra_features: Vec::new() }) - .expect("cargo fmt expected to build - essential tool"); + let rustfmt = + builder.ensure(tool::Rustfmt { compiler, target, extra_features: Vec::new() }); + let cargofmt = + builder.ensure(tool::Cargofmt { compiler, target, extra_features: Vec::new() }); let mut tarball = Tarball::new(builder, "rustfmt", &target.triple); tarball.set_overlay(OverlayKind::Rustfmt); tarball.is_preview(true); @@ -1455,9 +1446,8 @@ impl Step for RustDemangler { return None; } - let rust_demangler = builder - .ensure(tool::RustDemangler { compiler, target, extra_features: Vec::new() }) - .expect("rust-demangler expected to build - in-tree tool"); + let rust_demangler = + builder.ensure(tool::RustDemangler { compiler, target, extra_features: Vec::new() }); // Prepare the image directory let mut tarball = Tarball::new(builder, "rust-demangler", &target.triple); @@ -2157,12 +2147,14 @@ impl Step for LlvmTools { tarball.set_overlay(OverlayKind::LLVM); tarball.is_preview(true); - // Prepare the image directory - let src_bindir = builder.llvm_out(target).join("bin"); - let dst_bindir = format!("lib/rustlib/{}/bin", target.triple); - for tool in LLVM_TOOLS { - let exe = src_bindir.join(exe(tool, target)); - tarball.add_file(&exe, &dst_bindir, 0o755); + if builder.config.llvm_tools_enabled { + // Prepare the image directory + let src_bindir = builder.llvm_out(target).join("bin"); + let dst_bindir = format!("lib/rustlib/{}/bin", target.triple); + for tool in LLVM_TOOLS { + let exe = src_bindir.join(exe(tool, target)); + tarball.add_file(&exe, &dst_bindir, 0o755); + } } // Copy libLLVM.so to the target lib dir as well, so the RPATH like diff --git a/src/bootstrap/src/core/build_steps/run.rs b/src/bootstrap/src/core/build_steps/run.rs index d1d6b7e869ecb..d9e0da14a70ba 100644 --- a/src/bootstrap/src/core/build_steps/run.rs +++ b/src/bootstrap/src/core/build_steps/run.rs @@ -148,9 +148,8 @@ impl Step for Miri { let target = self.target; let compiler = builder.compiler(stage, host); - let miri = builder - .ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); + let miri = + builder.ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() }); let miri_sysroot = test::Miri::build_miri_sysroot(builder, compiler, &miri, target); // # Run miri. diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index d0f36f99342fe..4fadc4201c8d2 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -385,7 +385,7 @@ impl Step for RustAnalyzer { "test", crate_path, SourceType::InTree, - &["sysroot-abi".to_owned()], + &["in-rust-tree".to_owned()], ); cargo.allow_features(tool::RustAnalyzer::ALLOW_FEATURES); @@ -427,9 +427,7 @@ impl Step for Rustfmt { let host = self.host; let compiler = builder.compiler(stage, host); - builder - .ensure(tool::Rustfmt { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); + builder.ensure(tool::Rustfmt { compiler, target: self.host, extra_features: Vec::new() }); let mut cargo = tool::prepare_tool_cargo( builder, @@ -476,9 +474,11 @@ impl Step for RustDemangler { let host = self.host; let compiler = builder.compiler(stage, host); - let rust_demangler = builder - .ensure(tool::RustDemangler { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); + let rust_demangler = builder.ensure(tool::RustDemangler { + compiler, + target: self.host, + extra_features: Vec::new(), + }); let mut cargo = tool::prepare_tool_cargo( builder, compiler, @@ -609,12 +609,13 @@ impl Step for Miri { // Except if we are at stage 2, the bootstrap loop is complete and we can stick with our current stage. let compiler_std = builder.compiler(if stage < 2 { stage + 1 } else { stage }, host); - let miri = builder - .ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); - let _cargo_miri = builder - .ensure(tool::CargoMiri { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); + let miri = + builder.ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new() }); + let _cargo_miri = builder.ensure(tool::CargoMiri { + compiler, + target: self.host, + extra_features: Vec::new(), + }); // The stdlib we need might be at a different stage. And just asking for the // sysroot does not seem to populate it, so we do that first. builder.ensure(compile::Std::new(compiler_std, host)); @@ -788,9 +789,7 @@ impl Step for Clippy { let host = self.host; let compiler = builder.compiler(stage, host); - builder - .ensure(tool::Clippy { compiler, target: self.host, extra_features: Vec::new() }) - .expect("in-tree tool"); + builder.ensure(tool::Clippy { compiler, target: self.host, extra_features: Vec::new() }); let mut cargo = tool::prepare_tool_cargo( builder, compiler, @@ -1668,13 +1667,11 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the if mode == "coverage-run" { // The demangler doesn't need the current compiler, so we can avoid // unnecessary rebuilds by using the bootstrap compiler instead. - let rust_demangler = builder - .ensure(tool::RustDemangler { - compiler: compiler.with_stage(0), - target: compiler.host, - extra_features: Vec::new(), - }) - .expect("in-tree tool"); + let rust_demangler = builder.ensure(tool::RustDemangler { + compiler: compiler.with_stage(0), + target: compiler.host, + extra_features: Vec::new(), + }); cmd.arg("--rust-demangler-path").arg(rust_demangler); } @@ -1940,6 +1937,29 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the } } + // Special setup to enable running with sanitizers on MSVC. + if !builder.config.dry_run() + && target.contains("msvc") + && builder.config.sanitizers_enabled(target) + { + // Ignore interception failures: not all dlls in the process will have been built with + // address sanitizer enabled (e.g., ntdll.dll). + cmd.env("ASAN_WIN_CONTINUE_ON_INTERCEPTION_FAILURE", "1"); + // Add the address sanitizer runtime to the PATH - it is located next to cl.exe. + let asan_runtime_path = + builder.cc.borrow()[&target].path().parent().unwrap().to_path_buf(); + let old_path = cmd + .get_envs() + .find_map(|(k, v)| (k == "PATH").then_some(v)) + .flatten() + .map_or_else(|| env::var_os("PATH").unwrap_or_default(), |v| v.to_owned()); + let new_path = env::join_paths( + env::split_paths(&old_path).chain(std::iter::once(asan_runtime_path)), + ) + .expect("Could not add ASAN runtime path to PATH"); + cmd.env("PATH", new_path); + } + // Some UI tests trigger behavior in rustc where it reads $CARGO and changes behavior if it exists. // To make the tests work that rely on it not being set, make sure it is not set. cmd.env_remove("CARGO"); diff --git a/src/bootstrap/src/core/build_steps/tool.rs b/src/bootstrap/src/core/build_steps/tool.rs index 8e3941dbedaca..5d8d10a7debc3 100644 --- a/src/bootstrap/src/core/build_steps/tool.rs +++ b/src/bootstrap/src/core/build_steps/tool.rs @@ -27,7 +27,6 @@ struct ToolBuild { tool: &'static str, path: &'static str, mode: Mode, - is_optional_tool: bool, source_type: SourceType, extra_features: Vec, /// Nightly-only features that are allowed (comma-separated list). @@ -60,7 +59,7 @@ impl Builder<'_> { } impl Step for ToolBuild { - type Output = Option; + type Output = PathBuf; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { run.never() @@ -70,12 +69,11 @@ impl Step for ToolBuild { /// /// This will build the specified tool with the specified `host` compiler in /// `stage` into the normal cargo output directory. - fn run(self, builder: &Builder<'_>) -> Option { + fn run(self, builder: &Builder<'_>) -> PathBuf { let compiler = self.compiler; let target = self.target; let mut tool = self.tool; let path = self.path; - let is_optional_tool = self.is_optional_tool; match self.mode { Mode::ToolRustc => { @@ -109,20 +107,16 @@ impl Step for ToolBuild { ); let mut cargo = Command::from(cargo); - // we check this in `is_optional_tool` in a second - let is_expected = builder.run_cmd(BootstrapCommand::from(&mut cargo).allow_failure()); + // we check this below + let build_success = builder.run_cmd(BootstrapCommand::from(&mut cargo).allow_failure()); builder.save_toolstate( tool, - if is_expected { ToolState::TestFail } else { ToolState::BuildFail }, + if build_success { ToolState::TestFail } else { ToolState::BuildFail }, ); - if !is_expected { - if !is_optional_tool { - crate::exit!(1); - } else { - None - } + if !build_success { + crate::exit!(1); } else { // HACK(#82501): on Windows, the tools directory gets added to PATH when running tests, and // compiletest confuses HTML tidy with the in-tree tidy. Name the in-tree tidy something @@ -133,7 +127,7 @@ impl Step for ToolBuild { let cargo_out = builder.cargo_out(compiler, self.mode, target).join(exe(tool, target)); let bin = builder.tools_dir(compiler).join(exe(tool, target)); builder.copy(&cargo_out, &bin); - Some(bin) + bin } } } @@ -278,7 +272,6 @@ macro_rules! bootstrap_tool { Mode::ToolBootstrap }, path: $path, - is_optional_tool: false, source_type: if false $(|| $external)* { SourceType::Submodule } else { @@ -286,7 +279,7 @@ macro_rules! bootstrap_tool { }, extra_features: vec![], allow_features: concat!($($allow_features)*), - }).expect("expected to build -- essential tool") + }) } } )+ @@ -361,19 +354,16 @@ impl Step for ErrorIndex { } fn run(self, builder: &Builder<'_>) -> PathBuf { - builder - .ensure(ToolBuild { - compiler: self.compiler, - target: self.compiler.host, - tool: "error_index_generator", - mode: Mode::ToolRustc, - path: "src/tools/error_index_generator", - is_optional_tool: false, - source_type: SourceType::InTree, - extra_features: Vec::new(), - allow_features: "", - }) - .expect("expected to build -- essential tool") + builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.compiler.host, + tool: "error_index_generator", + mode: Mode::ToolRustc, + path: "src/tools/error_index_generator", + source_type: SourceType::InTree, + extra_features: Vec::new(), + allow_features: "", + }) } } @@ -398,19 +388,16 @@ impl Step for RemoteTestServer { } fn run(self, builder: &Builder<'_>) -> PathBuf { - builder - .ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "remote-test-server", - mode: Mode::ToolStd, - path: "src/tools/remote-test-server", - is_optional_tool: false, - source_type: SourceType::InTree, - extra_features: Vec::new(), - allow_features: "", - }) - .expect("expected to build -- essential tool") + builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "remote-test-server", + mode: Mode::ToolStd, + path: "src/tools/remote-test-server", + source_type: SourceType::InTree, + extra_features: Vec::new(), + allow_features: "", + }) } } @@ -557,19 +544,16 @@ impl Step for Cargo { } fn run(self, builder: &Builder<'_>) -> PathBuf { - let cargo_bin_path = builder - .ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "cargo", - mode: Mode::ToolRustc, - path: "src/tools/cargo", - is_optional_tool: false, - source_type: SourceType::Submodule, - extra_features: Vec::new(), - allow_features: "", - }) - .expect("expected to build -- essential tool"); + let cargo_bin_path = builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "cargo", + mode: Mode::ToolRustc, + path: "src/tools/cargo", + source_type: SourceType::Submodule, + extra_features: Vec::new(), + allow_features: "", + }); cargo_bin_path } } @@ -588,19 +572,16 @@ impl Step for LldWrapper { } fn run(self, builder: &Builder<'_>) -> PathBuf { - let src_exe = builder - .ensure(ToolBuild { - compiler: self.compiler, - target: self.target, - tool: "lld-wrapper", - mode: Mode::ToolStd, - path: "src/tools/lld-wrapper", - is_optional_tool: false, - source_type: SourceType::InTree, - extra_features: Vec::new(), - allow_features: "", - }) - .expect("expected to build -- essential tool"); + let src_exe = builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "lld-wrapper", + mode: Mode::ToolStd, + path: "src/tools/lld-wrapper", + source_type: SourceType::InTree, + extra_features: Vec::new(), + allow_features: "", + }); src_exe } @@ -613,11 +594,11 @@ pub struct RustAnalyzer { } impl RustAnalyzer { - pub const ALLOW_FEATURES: &'static str = "rustc_private,proc_macro_internals,proc_macro_diagnostic,proc_macro_span,proc_macro_span_shrink"; + pub const ALLOW_FEATURES: &'static str = "rustc_private,proc_macro_internals,proc_macro_diagnostic,proc_macro_span,proc_macro_span_shrink,proc_macro_def_site"; } impl Step for RustAnalyzer { - type Output = Option; + type Output = PathBuf; const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; @@ -640,15 +621,14 @@ impl Step for RustAnalyzer { }); } - fn run(self, builder: &Builder<'_>) -> Option { + fn run(self, builder: &Builder<'_>) -> PathBuf { builder.ensure(ToolBuild { compiler: self.compiler, target: self.target, tool: "rust-analyzer", mode: Mode::ToolRustc, path: "src/tools/rust-analyzer", - extra_features: vec!["rust-analyzer/in-rust-tree".to_owned()], - is_optional_tool: false, + extra_features: vec!["in-rust-tree".to_owned()], source_type: SourceType::InTree, allow_features: RustAnalyzer::ALLOW_FEATURES, }) @@ -693,13 +673,12 @@ impl Step for RustAnalyzerProcMacroSrv { compiler: self.compiler, target: self.target, tool: "rust-analyzer-proc-macro-srv", - mode: Mode::ToolStd, + mode: Mode::ToolRustc, path: "src/tools/rust-analyzer/crates/proc-macro-srv-cli", - extra_features: vec!["sysroot-abi".to_owned()], - is_optional_tool: false, + extra_features: vec!["in-rust-tree".to_owned()], source_type: SourceType::InTree, allow_features: RustAnalyzer::ALLOW_FEATURES, - })?; + }); // Copy `rust-analyzer-proc-macro-srv` to `/libexec/` // so that r-a can use it. @@ -730,7 +709,7 @@ macro_rules! tool_extended { } impl Step for $name { - type Output = Option; + type Output = PathBuf; const DEFAULT: bool = true; // Overwritten below const ONLY_HOSTS: bool = true; @@ -761,7 +740,7 @@ macro_rules! tool_extended { } #[allow(unused_mut)] - fn run(mut $sel, $builder: &Builder<'_>) -> Option { + fn run(mut $sel, $builder: &Builder<'_>) -> PathBuf { let tool = $builder.ensure(ToolBuild { compiler: $sel.compiler, target: $sel.target, @@ -769,10 +748,9 @@ macro_rules! tool_extended { mode: if false $(|| $tool_std)? { Mode::ToolStd } else { Mode::ToolRustc }, path: $path, extra_features: $sel.extra_features, - is_optional_tool: true, source_type: SourceType::InTree, allow_features: concat!($($allow_features)*), - })?; + }); if (false $(|| !$add_bins_to_sysroot.is_empty())?) && $sel.compiler.stage > 0 { let bindir = $builder.sysroot($sel.compiler).join("bin"); @@ -789,9 +767,9 @@ macro_rules! tool_extended { })? let tool = bindir.join(exe($tool_name, $sel.compiler.host)); - Some(tool) + tool } else { - Some(tool) + tool } } } diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs index 753b41abaf489..4e20babc55a68 100644 --- a/src/bootstrap/src/core/builder.rs +++ b/src/bootstrap/src/core/builder.rs @@ -21,7 +21,7 @@ use crate::core::config::{DryRun, SplitDebuginfo, TargetSelection}; use crate::prepare_behaviour_dump_dir; use crate::utils::cache::{Cache, Interned, INTERNER}; use crate::utils::helpers::{self, add_dylib_path, add_link_lib_path, exe, linker_args}; -use crate::utils::helpers::{libdir, linker_flags, output, t, LldThreads}; +use crate::utils::helpers::{check_cfg_arg, libdir, linker_flags, output, t, LldThreads}; use crate::EXTRA_CHECK_CFGS; use crate::{Build, CLang, Crate, DocTests, GitRepo, Mode}; @@ -1198,7 +1198,7 @@ impl<'a> Builder<'a> { let mut dylib_path = helpers::dylib_path(); dylib_path.insert(0, self.sysroot(run_compiler).join("lib")); - let mut cmd = Command::new(cargo_clippy.unwrap()); + let mut cmd = Command::new(cargo_clippy); cmd.env(helpers::dylib_path_var(), env::join_paths(&dylib_path).unwrap()); cmd.env("PATH", path); cmd @@ -1467,18 +1467,7 @@ impl<'a> Builder<'a> { rustflags.arg("-Zunstable-options"); for (restricted_mode, name, values) in EXTRA_CHECK_CFGS { if *restricted_mode == None || *restricted_mode == Some(mode) { - // Creating a string of the values by concatenating each value: - // ',"tvos","watchos"' or '' (nothing) when there are no values - let values = match values { - Some(values) => values - .iter() - .map(|val| [",", "\"", val, "\""]) - .flatten() - .collect::(), - None => String::new(), - }; - let values = values.strip_prefix(",").unwrap_or(&values); // remove the first `,` - rustflags.arg(&format!("--check-cfg=cfg({name},values({values}))")); + rustflags.arg(&check_cfg_arg(name, *values)); } } diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index f1e1b89d9ba71..3ac3e54563148 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -305,7 +305,7 @@ pub struct Config { pub save_toolstates: Option, pub print_step_timings: bool, pub print_step_rusage: bool, - pub missing_tools: bool, + pub missing_tools: bool, // FIXME: Deprecated field. Remove it at 2024. // Fallback musl-root for all targets pub musl_root: Option, @@ -1630,7 +1630,7 @@ impl Config { ); } - set(&mut config.llvm_tools_enabled, llvm_tools); + config.llvm_tools_enabled = llvm_tools.unwrap_or(true); config.rustc_parallel = parallel_compiler.unwrap_or(config.channel == "dev" || config.channel == "nightly"); config.rustc_default_linker = default_linker; @@ -2180,8 +2180,15 @@ impl Config { self.target_config.get(&target).map(|t| t.sanitizers).flatten().unwrap_or(self.sanitizers) } - pub fn any_sanitizers_enabled(&self) -> bool { - self.target_config.values().any(|t| t.sanitizers == Some(true)) || self.sanitizers + pub fn needs_sanitizer_runtime_built(&self, target: TargetSelection) -> bool { + // MSVC uses the Microsoft-provided sanitizer runtime, but all other runtimes we build. + !target.is_msvc() && self.sanitizers_enabled(target) + } + + pub fn any_sanitizers_to_build(&self) -> bool { + self.target_config + .iter() + .any(|(ts, t)| !ts.is_msvc() && t.sanitizers.unwrap_or(self.sanitizers)) } pub fn profiler_path(&self, target: TargetSelection) -> Option<&str> { diff --git a/src/bootstrap/src/core/sanity.rs b/src/bootstrap/src/core/sanity.rs index 9101d94ea881e..82755f418000d 100644 --- a/src/bootstrap/src/core/sanity.rs +++ b/src/bootstrap/src/core/sanity.rs @@ -96,7 +96,7 @@ pub fn check(build: &mut Build) { }) .any(|build_llvm_ourselves| build_llvm_ourselves); - let need_cmake = building_llvm || build.config.any_sanitizers_enabled(); + let need_cmake = building_llvm || build.config.any_sanitizers_to_build(); if need_cmake && cmd_finder.maybe_have("cmake").is_none() { eprintln!( " diff --git a/src/bootstrap/src/tests/helpers.rs b/src/bootstrap/src/tests/helpers.rs index afe18aebafada..163594dbb2f14 100644 --- a/src/bootstrap/src/tests/helpers.rs +++ b/src/bootstrap/src/tests/helpers.rs @@ -1,4 +1,4 @@ -use crate::utils::helpers::{extract_beta_rev, hex_encode, make}; +use crate::utils::helpers::{extract_beta_rev, hex_encode, make, check_cfg_arg}; use std::path::PathBuf; #[test] @@ -57,3 +57,16 @@ fn test_string_to_hex_encode() { let hex_string = hex_encode(input_string); assert_eq!(hex_string, "48656c6c6f2c20576f726c6421"); } + +#[test] +fn test_check_cfg_arg() { + assert_eq!(check_cfg_arg("bootstrap", None), "--check-cfg=cfg(bootstrap)"); + assert_eq!( + check_cfg_arg("target_arch", Some(&["s360"])), + "--check-cfg=cfg(target_arch,values(\"s360\"))" + ); + assert_eq!( + check_cfg_arg("target_os", Some(&["nixos", "nix2"])), + "--check-cfg=cfg(target_os,values(\"nixos\",\"nix2\"))" + ); +} diff --git a/src/bootstrap/src/utils/change_tracker.rs b/src/bootstrap/src/utils/change_tracker.rs index 1eadc036b5e6e..25efa5079c873 100644 --- a/src/bootstrap/src/utils/change_tracker.rs +++ b/src/bootstrap/src/utils/change_tracker.rs @@ -101,4 +101,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[ severity: ChangeSeverity::Warning, summary: "rust-analyzer-proc-macro-srv is no longer enabled by default. To build it, you must either enable it in the configuration or explicitly invoke it with x.py.", }, + ChangeInfo { + change_id: 119373, + severity: ChangeSeverity::Info, + summary: "The dist.missing-tools config option was deprecated, as it was unused. If you are using it, remove it from your config, it will be removed soon.", + }, ]; diff --git a/src/bootstrap/src/utils/helpers.rs b/src/bootstrap/src/utils/helpers.rs index 0c4297db6cc7b..0c917c3d57933 100644 --- a/src/bootstrap/src/utils/helpers.rs +++ b/src/bootstrap/src/utils/helpers.rs @@ -552,3 +552,22 @@ where { input.as_ref().iter().map(|x| format!("{:02x}", x)).collect() } + +/// Create a `--check-cfg` argument invocation for a given name +/// and it's values. +pub fn check_cfg_arg(name: &str, values: Option<&[&str]>) -> String { + // Creating a string of the values by concatenating each value: + // ',values("tvos","watchos")' or '' (nothing) when there are no values. + let next = match values { + Some(values) => { + let mut tmp = + values.iter().map(|val| [",", "\"", val, "\""]).flatten().collect::(); + + tmp.insert_str(1, "values("); + tmp.push_str(")"); + tmp + } + None => "".to_string(), + }; + format!("--check-cfg=cfg({name}{next})") +} diff --git a/src/ci/docker/host-x86_64/dist-powerpc-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-powerpc-linux/Dockerfile index b546f571f66bd..7081d9527f060 100644 --- a/src/ci/docker/host-x86_64/dist-powerpc-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-powerpc-linux/Dockerfile @@ -26,5 +26,5 @@ ENV \ ENV HOSTS=powerpc-unknown-linux-gnu -ENV RUST_CONFIGURE_ARGS --enable-extended --disable-docs +ENV RUST_CONFIGURE_ARGS --enable-extended --enable-profiler --disable-docs ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index 3af370bf006ad..68a3afc910f22 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -553,14 +553,17 @@ jobs: # This target only needs to support 11.0 and up as nothing else supports the hardware - name: dist-aarch64-apple env: - SCRIPT: ./x.py dist bootstrap --include-default-paths --host=aarch64-apple-darwin --target=aarch64-apple-darwin + SCRIPT: ./x.py dist bootstrap --include-default-paths --stage 2 RUST_CONFIGURE_ARGS: >- + --build=x86_64-apple-darwin + --host=aarch64-apple-darwin + --target=aarch64-apple-darwin --enable-full-tools --enable-sanitizers --enable-profiler + --disable-docs --set rust.jemalloc --set llvm.ninja=false - --set rust.lto=thin RUSTC_RETRY_LINKER_ON_SEGFAULT: 1 SELECT_XCODE: /Applications/Xcode_13.4.1.app USE_XCODE_CLANG: 1 @@ -570,26 +573,15 @@ jobs: NO_DEBUG_ASSERTIONS: 1 NO_OVERFLOW_CHECKS: 1 DIST_REQUIRE_ALL_TOOLS: 1 - <<: *job-macos-m1 - - # This target only needs to support 11.0 and up as nothing else supports the hardware - - name: aarch64-apple - env: - SCRIPT: ./x.py --stage 2 test --host=aarch64-apple-darwin --target=aarch64-apple-darwin - RUST_CONFIGURE_ARGS: >- - --enable-sanitizers - --enable-profiler - --set rust.jemalloc - --set llvm.ninja=false - RUSTC_RETRY_LINKER_ON_SEGFAULT: 1 - SELECT_XCODE: /Applications/Xcode_13.4.1.app - USE_XCODE_CLANG: 1 - MACOSX_DEPLOYMENT_TARGET: 11.0 - MACOSX_STD_DEPLOYMENT_TARGET: 11.0 - NO_LLVM_ASSERTIONS: 1 - NO_DEBUG_ASSERTIONS: 1 - NO_OVERFLOW_CHECKS: 1 - <<: *job-macos-m1 + # Corresponds to 16K page size + # + # Shouldn't be needed if jemalloc-sys is updated to + # handle this platform like iOS or if we build on + # aarch64-apple-darwin itself. + # + # https://github.com/gnzlbg/jemallocator/blob/c27a859e98e3cb790dc269773d9da71a1e918458/jemalloc-sys/build.rs#L237 + JEMALLOC_SYS_WITH_LG_PAGE: 14 + <<: *job-macos-xl ###################### # Windows Builders # diff --git a/src/ci/run.sh b/src/ci/run.sh index 5700172fd3ec4..dc0d5e02cb1bc 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -47,6 +47,11 @@ source "$ci_dir/shared.sh" export CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse +# suppress change-tracker warnings on CI +if [ "$CI" != "" ]; then + RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set change-id=99999999" +fi + if ! isCI || isCiBranch auto || isCiBranch beta || isCiBranch try || isCiBranch try-perf || \ isCiBranch automation/bors/try; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set build.print-step-timings --enable-verbose-tests" @@ -153,10 +158,6 @@ else fi fi -if [ "$RUST_RELEASE_CHANNEL" = "nightly" ] || [ "$DIST_REQUIRE_ALL_TOOLS" = "" ]; then - RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-missing-tools" -fi - # Unless we're using an older version of LLVM, check that all LLVM components # used by tests are available. if [ "$IS_NOT_LATEST_LLVM" = "" ]; then @@ -241,7 +242,7 @@ fi if [ "$RUN_CHECK_WITH_PARALLEL_QUERIES" != "" ]; then rm -f config.toml - $SRC/configure --set rust.parallel-compiler + $SRC/configure --set change-id=99999999 --set rust.parallel-compiler # Save the build metrics before we wipe the directory if [ "$HAS_METRICS" = 1 ]; then diff --git a/src/doc/reference b/src/doc/reference index f9f5b5babd955..3565c7978cfc9 160000 --- a/src/doc/reference +++ b/src/doc/reference @@ -1 +1 @@ -Subproject commit f9f5b5babd95515e7028c32d6ca4d9790f64c146 +Subproject commit 3565c7978cfc9662f5963b135690ff9cbbfa0318 diff --git a/src/doc/rust-by-example b/src/doc/rust-by-example index 4c2b24ff9d9cf..c0be6299e52e4 160000 --- a/src/doc/rust-by-example +++ b/src/doc/rust-by-example @@ -1 +1 @@ -Subproject commit 4c2b24ff9d9cf19f2fcff799a3a49b9a2c50ae8e +Subproject commit c0be6299e52e4164c30ba6f41bd0ad0aaee64972 diff --git a/src/doc/rustc-dev-guide b/src/doc/rustc-dev-guide index 0610665a8687b..d13e85152a977 160000 --- a/src/doc/rustc-dev-guide +++ b/src/doc/rustc-dev-guide @@ -1 +1 @@ -Subproject commit 0610665a8687b1b0aa037917a1598b9f2a21e3ef +Subproject commit d13e85152a977cd0bcaf583cf5f49e86225697de diff --git a/src/doc/rustc/src/command-line-arguments.md b/src/doc/rustc/src/command-line-arguments.md index 4d32897cc14c7..5e02453e23667 100644 --- a/src/doc/rustc/src/command-line-arguments.md +++ b/src/doc/rustc/src/command-line-arguments.md @@ -343,7 +343,7 @@ _Note:_ The order of these lint level arguments is taken into account, see [lint ## `-Z`: set unstable options This flag will allow you to set unstable options of rustc. In order to set multiple options, -the -Z flag can be used multiple times. For example: `rustc -Z verbose -Z time-passes`. +the -Z flag can be used multiple times. For example: `rustc -Z verbose-internals -Z time-passes`. Specifying options with -Z is only available on nightly. To view all available options run: `rustc -Z help`, or see [The Unstable Book](../unstable-book/index.html). diff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md index 09070168b67b9..0d8213f048d94 100644 --- a/src/doc/rustc/src/platform-support.md +++ b/src/doc/rustc/src/platform-support.md @@ -161,7 +161,9 @@ target | std | notes [`nvptx64-nvidia-cuda`](platform-support/nvptx64-nvidia-cuda.md) | * | --emit=asm generates PTX code that [runs on NVIDIA GPUs] [`riscv32imac-unknown-none-elf`](platform-support/riscv32imac-unknown-none-elf.md) | * | Bare RISC-V (RV32IMAC ISA) [`riscv32i-unknown-none-elf`](platform-support/riscv32imac-unknown-none-elf.md) | * | Bare RISC-V (RV32I ISA) +[`riscv32im-unknown-none-elf`](platform-support/riscv32imac-unknown-none-elf.md) | * | | Bare RISC-V (RV32IM ISA) [`riscv32imc-unknown-none-elf`](platform-support/riscv32imac-unknown-none-elf.md) | * | Bare RISC-V (RV32IMC ISA) +[`riscv32imafc-unknown-none-elf`](platform-support/riscv32imac-unknown-none-elf.md) | * | Bare RISC-V (RV32IMAFC ISA) `riscv64gc-unknown-none-elf` | * | Bare RISC-V (RV64IMAFDC ISA) `riscv64imac-unknown-none-elf` | * | Bare RISC-V (RV64IMAC ISA) `sparc64-unknown-linux-gnu` | ✓ | SPARC Linux (kernel 4.4, glibc 2.23) @@ -318,8 +320,6 @@ target | std | host | notes [`powerpc64-ibm-aix`](platform-support/aix.md) | ? | | 64-bit AIX (7.2 and newer) `riscv32gc-unknown-linux-gnu` | | | RISC-V Linux (kernel 5.4, glibc 2.33) `riscv32gc-unknown-linux-musl` | | | RISC-V Linux (kernel 5.4, musl + RISCV32 support patches) -[`riscv32imafc-unknown-none-elf`](platform-support/riscv32imac-unknown-none-elf.md) | * | Bare RISC-V (RV32IMAFC ISA) -[`riscv32im-unknown-none-elf`](platform-support/riscv32imac-unknown-none-elf.md) | * | | Bare RISC-V (RV32IM ISA) [`riscv32imac-unknown-xous-elf`](platform-support/riscv32imac-unknown-xous-elf.md) | ? | | RISC-V Xous (RV32IMAC ISA) [`riscv32imc-esp-espidf`](platform-support/esp-idf.md) | ✓ | | RISC-V ESP-IDF [`riscv32imac-esp-espidf`](platform-support/esp-idf.md) | ✓ | | RISC-V ESP-IDF diff --git a/src/doc/rustc/src/platform-support/riscv32imac-unknown-none-elf.md b/src/doc/rustc/src/platform-support/riscv32imac-unknown-none-elf.md index a069f3d3aa9c3..739b12bad8b47 100644 --- a/src/doc/rustc/src/platform-support/riscv32imac-unknown-none-elf.md +++ b/src/doc/rustc/src/platform-support/riscv32imac-unknown-none-elf.md @@ -1,6 +1,6 @@ # `riscv32{i,im,imc,imac,imafc}-unknown-none-elf` -**Tier: 2/3** +**Tier: 2** Bare-metal target for RISC-V CPUs with the RV32I, RV32IM, RV32IMC, RV32IMAFC and RV32IMAC ISAs. @@ -24,11 +24,11 @@ This target is included in Rust and can be installed via `rustup`. ## Testing -This is a cross-compiled no-std target, which must be run either in a simulator +This is a cross-compiled `no-std` target, which must be run either in a simulator or by programming them onto suitable hardware. It is not possible to run the -Rust testsuite on this target. +Rust test-suite on this target. ## Cross-compilation toolchains and C code This target supports C code. If interlinking with C or C++, you may need to use -riscv64-unknown-elf-gcc as a linker instead of rust-lld. +`riscv64-unknown-elf-gcc` as a linker instead of `rust-lld`. diff --git a/src/doc/unstable-book/src/language-features/asm-experimental-arch.md b/src/doc/unstable-book/src/language-features/asm-experimental-arch.md index 968c9bb4ebb86..59acbc73db461 100644 --- a/src/doc/unstable-book/src/language-features/asm-experimental-arch.md +++ b/src/doc/unstable-book/src/language-features/asm-experimental-arch.md @@ -84,7 +84,7 @@ This feature tracks `asm!` and `global_asm!` support for the following architect | M68k | `reg_data` | None | `i8`, `i16`, `i32` | | CSKY | `reg` | None | `i8`, `i16`, `i32` | | CSKY | `freg` | None | `f32`, | -| s390x | `reg` | None | `i8`, `i16`, `i32`, `i64` | +| s390x | `reg`, `reg_addr` | None | `i8`, `i16`, `i32`, `i64` | | s390x | `freg` | None | `f32`, `f64` | ## Register aliases @@ -158,9 +158,10 @@ This feature tracks `asm!` and `global_asm!` support for the following architect | NVPTX | `reg64` | None | `rd0` | None | | Hexagon | `reg` | None | `r0` | None | | PowerPC | `reg` | None | `0` | None | -| PowerPC | `reg_nonzero` | None | `3` | `b` | +| PowerPC | `reg_nonzero` | None | `3` | None | | PowerPC | `freg` | None | `0` | None | | s390x | `reg` | None | `%r0` | None | +| s390x | `reg_addr` | None | `%r1` | None | | s390x | `freg` | None | `%f0` | None | | CSKY | `reg` | None | `r0` | None | | CSKY | `freg` | None | `f0` | None | diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 102a9f40e9b32..11a035ad3e338 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -323,7 +323,9 @@ pub(crate) fn run_global_ctxt( tcx.hir().try_par_for_each_module(|module| tcx.ensure().check_mod_type_wf(module)) }); tcx.sess.time("item_types_checking", || { - tcx.hir().for_each_module(|module| tcx.ensure().check_mod_item_types(module)) + tcx.hir().for_each_module(|module| { + let _ = tcx.ensure().check_mod_type_wf(module); + }); }); tcx.dcx().abort_if_errors(); diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index a9c0ab557cb33..1923fc1511970 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -449,8 +449,8 @@ impl clean::GenericBound { hir::TraitBoundModifier::None => "", hir::TraitBoundModifier::Maybe => "?", hir::TraitBoundModifier::Negative => "!", - // ~const is experimental; do not display those bounds in rustdoc - hir::TraitBoundModifier::MaybeConst => "", + // `const` and `~const` trait bounds are experimental; don't render them. + hir::TraitBoundModifier::Const | hir::TraitBoundModifier::MaybeConst => "", }; if f.alternate() { write!(f, "{modifier_str}{ty:#}", ty = ty.print(cx)) diff --git a/src/librustdoc/html/static/css/noscript.css b/src/librustdoc/html/static/css/noscript.css index 390e812772a70..f425f3ec95c31 100644 --- a/src/librustdoc/html/static/css/noscript.css +++ b/src/librustdoc/html/static/css/noscript.css @@ -11,7 +11,7 @@ rules. #copy-path, #sidebar-button, .sidebar-resizer { /* It requires JS to work so no need to display it in this case. */ - display: none; + display: none !important; } nav.sub { @@ -54,6 +54,7 @@ nav.sub { --code-attribute-color: #999; --toggles-color: #999; --toggle-filter: none; + --mobile-sidebar-menu-filter: none; --search-input-focused-border-color: #66afe9; --copy-path-button-color: #999; --copy-path-img-filter: invert(50%); @@ -159,6 +160,7 @@ nav.sub { --code-attribute-color: #999; --toggles-color: #999; --toggle-filter: invert(100%); + --mobile-sidebar-menu-filter: invert(100%); --search-input-focused-border-color: #008dfd; --copy-path-button-color: #999; --copy-path-img-filter: invert(50%); diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index 665bb5d42bcc4..cd53fcb8b7c16 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -14,6 +14,7 @@ and on the RUSTDOC_MOBILE_BREAKPOINT */ --desktop-sidebar-width: 200px; --src-sidebar-width: 300px; + --desktop-sidebar-z-index: 100; } /* See FiraSans-LICENSE.txt for the Fira Sans license. */ @@ -366,22 +367,12 @@ img { max-width: 100%; } -.sub-logo-container, .logo-container { +.logo-container { /* zero text boxes so that computed line height = image height exactly */ line-height: 0; display: block; } -.sub-logo-container { - margin-right: 32px; -} - -.sub-logo-container > img { - height: 60px; - width: 60px; - object-fit: contain; -} - .rust-logo { filter: var(--rust-logo-filter); } @@ -396,11 +387,12 @@ img { height: 100vh; top: 0; left: 0; - z-index: 100; + z-index: var(--desktop-sidebar-z-index); } .rustdoc.src .sidebar { flex-basis: 50px; + width: 50px; border-right: 1px solid; overflow-x: hidden; /* The sidebar is by default hidden */ @@ -416,7 +408,7 @@ img { touch-action: none; width: 9px; cursor: col-resize; - z-index: 200; + z-index: calc(var(--desktop-sidebar-z-index) + 1); position: fixed; height: 100%; /* make sure there's a 1px gap between the scrollbar and resize handle */ @@ -424,12 +416,12 @@ img { } .rustdoc.src .sidebar-resizer { - /* when closed, place resizer glow on top of the normal src sidebar border (no need to worry - about sidebar) */ + /* when closed, place resizer glow on top of the normal src sidebar border (no need to + worry about sidebar) */ left: 49px; } -.src-sidebar-expanded .rustdoc.src .sidebar-resizer { +.src-sidebar-expanded .src .sidebar-resizer { /* for src sidebar, gap is already provided by 1px border on sidebar itself, so place resizer to right of it */ left: var(--src-sidebar-width); @@ -448,7 +440,6 @@ img { .sidebar-resizing .sidebar { position: fixed; - z-index: 100; } .sidebar-resizing > body { padding-left: var(--resizing-sidebar-width); @@ -497,15 +488,11 @@ img { } .sidebar, .mobile-topbar, .sidebar-menu-toggle, -#src-sidebar-toggle, #src-sidebar { +#src-sidebar { background-color: var(--sidebar-background-color); } -#src-sidebar-toggle > button:hover, #src-sidebar-toggle > button:focus { - background-color: var(--sidebar-background-color-hover); -} - -.src .sidebar > *:not(#src-sidebar-toggle) { +.src .sidebar > * { visibility: hidden; } @@ -515,7 +502,7 @@ img { width: var(--src-sidebar-width); } -.src-sidebar-expanded .src .sidebar > *:not(#src-sidebar-toggle) { +.src-sidebar-expanded .src .sidebar > * { visibility: visible; } @@ -1059,7 +1046,7 @@ so that we can apply CSS-filters to change the arrow color in themes */ position: absolute; top: 100%; right: 0; - z-index: 2; + z-index: calc(var(--desktop-sidebar-z-index) + 1); margin-top: 7px; border-radius: 3px; border: 1px solid var(--border-color); @@ -1531,28 +1518,10 @@ a.tooltip:hover::after { font-weight: normal; } -#src-sidebar-toggle { - position: sticky; - top: 0; - left: 0; - font-size: 1.25rem; - border-bottom: 1px solid; - display: flex; - height: 40px; - justify-content: stretch; - align-items: stretch; - z-index: 10; -} #src-sidebar { width: 100%; overflow: auto; } -#src-sidebar > .title { - font-size: 1.5rem; - text-align: center; - border-bottom: 1px solid var(--border-color); - margin-bottom: 6px; -} #src-sidebar div.files > a:hover, details.dir-entry summary:hover, #src-sidebar div.files > a:focus, details.dir-entry summary:focus { background-color: var(--src-sidebar-background-hover); @@ -1560,18 +1529,15 @@ a.tooltip:hover::after { #src-sidebar div.files > a.selected { background-color: var(--src-sidebar-background-selected); } -#src-sidebar-toggle > button { - font-size: inherit; - font-weight: bold; - background: none; - color: inherit; - text-align: center; - border: none; - outline: none; - flex: 1 1; - /* iOS button gradient: https://stackoverflow.com/q/5438567 */ - -webkit-appearance: none; - opacity: 1; + +.src-sidebar-title { + position: sticky; + top: 0; + display: flex; + padding: 8px 8px 0 48px; + margin-bottom: 7px; + background: var(--sidebar-background-color); + border-bottom: 1px solid var(--border-color); } #settings-menu, #help-button { @@ -1580,8 +1546,10 @@ a.tooltip:hover::after { } #sidebar-button { display: none; + line-height: 0; } -.hide-sidebar #sidebar-button { +.hide-sidebar #sidebar-button, +.src #sidebar-button { display: flex; margin-right: 4px; position: fixed; @@ -1591,6 +1559,13 @@ a.tooltip:hover::after { background-color: var(--main-background-color); z-index: 1; } +.src #sidebar-button { + left: 8px; + z-index: calc(var(--desktop-sidebar-z-index) + 1); +} +.hide-sidebar .src #sidebar-button { + position: static; +} #settings-menu > a, #help-button > a, #sidebar-button > a { display: flex; align-items: center; @@ -1823,6 +1798,30 @@ However, it's not needed with smaller screen width because the doc/code block is margin-top: 16px; } +/* sidebar button opens modal + use hamburger button */ +.src #sidebar-button > a:before, .sidebar-menu-toggle:before { + content: url('data:image/svg+xml,\ + '); + opacity: 0.75; +} +.sidebar-menu-toggle:hover:before, +.sidebar-menu-toggle:active:before, +.sidebar-menu-toggle:focus:before { + opacity: 1; +} + +/* src sidebar button opens a folder view */ +.src #sidebar-button > a:before { + content: url('data:image/svg+xml,\ + \ + \ + '); + opacity: 0.75; +} + /* Media Queries */ /* Make sure all the buttons line wrap at the same time */ @@ -1846,10 +1845,6 @@ in src-script.js and main.js scroll-margin-top: 45px; } - .hide-sidebar #sidebar-button { - position: static; - } - .rustdoc { /* Sidebar should overlay main content, rather than pushing main content to the right. Turn off `display: flex` on the body element. */ @@ -1904,6 +1899,15 @@ in src-script.js and main.js height: 100vh; border: 0; } + .src .search-form { + margin-left: 40px; + } + .hide-sidebar .search-form { + margin-left: 32px; + } + .hide-sidebar .src .search-form { + margin-left: 0; + } .sidebar.shown, .src-sidebar-expanded .src .sidebar, @@ -1953,11 +1957,8 @@ in src-script.js and main.js .sidebar-menu-toggle { width: 45px; - /* Rare exception to specifying font sizes in rem. Since this is acting - as an icon, it's okay to specify its sizes in pixels. */ - font-size: 32px; border: none; - color: var(--main-color); + line-height: 0; } .hide-sidebar .sidebar-menu-toggle { @@ -1977,31 +1978,6 @@ in src-script.js and main.js left: -11px; } - #src-sidebar-toggle { - position: fixed; - left: 1px; - top: 100px; - width: 30px; - font-size: 1.5rem; - padding: 0; - z-index: 10; - border-top-right-radius: 3px; - border-bottom-right-radius: 3px; - border: 1px solid; - border-left: 0; - } - - .src-sidebar-expanded #src-sidebar-toggle { - left: unset; - top: unset; - width: unset; - border-top-right-radius: unset; - border-bottom-right-radius: unset; - position: sticky; - border: 0; - border-bottom: 1px solid; - } - /* We don't display these buttons on mobile devices. */ #copy-path, #help-button { display: none; @@ -2017,6 +1993,12 @@ in src-script.js and main.js width: 22px; height: 22px; } + .sidebar-menu-toggle:before { + filter: var(--mobile-sidebar-menu-filter); + } + .sidebar-menu-toggle:hover { + background: var(--main-background-color); + } /* Display an alternating layout on tablets and phones */ .item-table, .item-row, .item-table > li, .item-table > li > div, @@ -2043,9 +2025,13 @@ in src-script.js and main.js } .src-sidebar-expanded .src .sidebar { + position: fixed; max-width: 100vw; width: 100vw; } + .src .src-sidebar-title { + padding-top: 0; + } /* Position of the "[-]" element. */ details.toggle:not(.top-doc) > summary { @@ -2117,12 +2103,6 @@ in src-script.js and main.js .search-form { align-self: stretch; } - - .sub-logo-container > img { - height: 35px; - width: 35px; - margin-bottom: var(--nav-sub-mobile-padding); - } } .variant, @@ -2344,6 +2324,7 @@ in src-script.js and main.js --code-attribute-color: #999; --toggles-color: #999; --toggle-filter: none; + --mobile-sidebar-menu-filter: none; --search-input-focused-border-color: #66afe9; --copy-path-button-color: #999; --copy-path-img-filter: invert(50%); @@ -2448,6 +2429,7 @@ in src-script.js and main.js --code-attribute-color: #999; --toggles-color: #999; --toggle-filter: invert(100%); + --mobile-sidebar-menu-filter: invert(100%); --search-input-focused-border-color: #008dfd; --copy-path-button-color: #999; --copy-path-img-filter: invert(50%); @@ -2559,6 +2541,7 @@ Original by Dempfi (https://github.com/dempfi/ayu) --code-attribute-color: #999; --toggles-color: #999; --toggle-filter: invert(100%); + --mobile-sidebar-menu-filter: invert(100%); --search-input-focused-border-color: #5c6773; /* Same as `--border-color`. */ --copy-path-button-color: #fff; --copy-path-img-filter: invert(70%); @@ -2650,8 +2633,7 @@ Original by Dempfi (https://github.com/dempfi/ayu) :root[data-theme="ayu"] h4, :where(:root[data-theme="ayu"]) h1 a, :root[data-theme="ayu"] .sidebar h2 a, -:root[data-theme="ayu"] .sidebar h3 a, -:root[data-theme="ayu"] #source-sidebar > .title { +:root[data-theme="ayu"] .sidebar h3 a { color: #fff; } diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index 63ab56053af9f..88901191cda25 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -279,7 +279,8 @@ function preLoadCss(cssUrl) { const params = {}; window.location.search.substring(1).split("&"). map(s => { - const pair = s.split("="); + // https://github.com/rust-lang/rust/issues/119219 + const pair = s.split("=").map(x => x.replace(/\+/g, " ")); params[decodeURIComponent(pair[0])] = typeof pair[1] === "undefined" ? null : decodeURIComponent(pair[1]); }); @@ -1522,6 +1523,9 @@ href="https://doc.rust-lang.org/${channel}/rustdoc/read-documentation/search.htm sidebarButton.addEventListener("click", e => { removeClass(document.documentElement, "hide-sidebar"); updateLocalStorage("hide-sidebar", "false"); + if (document.querySelector(".rustdoc.src")) { + window.rustdocToggleSrcSidebar(); + } e.preventDefault(); }); } @@ -1646,7 +1650,7 @@ href="https://doc.rust-lang.org/${channel}/rustdoc/read-documentation/search.htm return; } e.preventDefault(); - const pos = e.clientX - sidebar.offsetLeft - 3; + const pos = e.clientX - 3; if (pos < SIDEBAR_VANISH_THRESHOLD) { hideSidebar(); } else if (pos >= SIDEBAR_MIN) { diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js index e824a1fd4bda1..e6263db32835d 100644 --- a/src/librustdoc/html/static/js/search.js +++ b/src/librustdoc/html/static/js/search.js @@ -1805,11 +1805,20 @@ function initSearch(rawSearchIndex) { return unifyFunctionTypes([row], [elem], whereClause, mgens); } - function checkPath(contains, ty, maxEditDistance) { + /** + * Compute an "edit distance" that ignores missing path elements. + * @param {string[]} contains search query path + * @param {Row} ty indexed item + * @returns {null|number} edit distance + */ + function checkPath(contains, ty) { if (contains.length === 0) { return 0; } - let ret_dist = maxEditDistance + 1; + const maxPathEditDistance = Math.floor( + contains.reduce((acc, next) => acc + next.length, 0) / 3 + ); + let ret_dist = maxPathEditDistance + 1; const path = ty.path.split("::"); if (ty.parent && ty.parent.name) { @@ -1821,15 +1830,23 @@ function initSearch(rawSearchIndex) { pathiter: for (let i = length - clength; i >= 0; i -= 1) { let dist_total = 0; for (let x = 0; x < clength; ++x) { - const dist = editDistance(path[i + x], contains[x], maxEditDistance); - if (dist > maxEditDistance) { - continue pathiter; + const [p, c] = [path[i + x], contains[x]]; + if (Math.floor((p.length - c.length) / 3) <= maxPathEditDistance && + p.indexOf(c) !== -1 + ) { + // discount distance on substring match + dist_total += Math.floor((p.length - c.length) / 3); + } else { + const dist = editDistance(p, c, maxPathEditDistance); + if (dist > maxPathEditDistance) { + continue pathiter; + } + dist_total += dist; } - dist_total += dist; } ret_dist = Math.min(ret_dist, Math.round(dist_total / clength)); } - return ret_dist; + return ret_dist > maxPathEditDistance ? null : ret_dist; } function typePassesFilter(filter, type) { @@ -2030,8 +2047,8 @@ function initSearch(rawSearchIndex) { } if (elem.fullPath.length > 1) { - path_dist = checkPath(elem.pathWithoutLast, row, maxEditDistance); - if (path_dist > maxEditDistance) { + path_dist = checkPath(elem.pathWithoutLast, row); + if (path_dist === null) { return; } } @@ -2045,7 +2062,7 @@ function initSearch(rawSearchIndex) { const dist = editDistance(row.normalizedName, elem.normalizedPathLast, maxEditDistance); - if (index === -1 && dist + path_dist > maxEditDistance) { + if (index === -1 && dist > maxEditDistance) { return; } @@ -2100,13 +2117,9 @@ function initSearch(rawSearchIndex) { } function innerRunQuery() { - let queryLen = 0; - for (const elem of parsedQuery.elems) { - queryLen += elem.name.length; - } - for (const elem of parsedQuery.returned) { - queryLen += elem.name.length; - } + const queryLen = + parsedQuery.elems.reduce((acc, next) => acc + next.pathLast.length, 0) + + parsedQuery.returned.reduce((acc, next) => acc + next.pathLast.length, 0); const maxEditDistance = Math.floor(queryLen / 3); /** diff --git a/src/librustdoc/html/static/js/src-script.js b/src/librustdoc/html/static/js/src-script.js index fc1d2d37845eb..3003f4c150338 100644 --- a/src/librustdoc/html/static/js/src-script.js +++ b/src/librustdoc/html/static/js/src-script.js @@ -2,7 +2,7 @@ /* global srcIndex */ // Local js definitions: -/* global addClass, getCurrentValue, onEachLazy, removeClass, browserSupportsHistoryApi */ +/* global addClass, onEachLazy, removeClass, browserSupportsHistoryApi */ /* global updateLocalStorage, getVar */ "use strict"; @@ -71,68 +71,34 @@ function createDirEntry(elem, parent, fullPath, hasFoundFile) { return hasFoundFile; } -let toggleLabel; - -function getToggleLabel() { - toggleLabel = toggleLabel || document.querySelector("#src-sidebar-toggle button"); - return toggleLabel; -} - window.rustdocCloseSourceSidebar = () => { removeClass(document.documentElement, "src-sidebar-expanded"); - getToggleLabel().innerText = ">"; updateLocalStorage("source-sidebar-show", "false"); }; window.rustdocShowSourceSidebar = () => { addClass(document.documentElement, "src-sidebar-expanded"); - getToggleLabel().innerText = "<"; updateLocalStorage("source-sidebar-show", "true"); }; -function toggleSidebar() { - const child = this.parentNode.children[0]; - if (child.innerText === ">") { - window.rustdocShowSourceSidebar(); - } else { +window.rustdocToggleSrcSidebar = () => { + if (document.documentElement.classList.contains("src-sidebar-expanded")) { window.rustdocCloseSourceSidebar(); - } -} - -function createSidebarToggle() { - const sidebarToggle = document.createElement("div"); - sidebarToggle.id = "src-sidebar-toggle"; - - const inner = document.createElement("button"); - - if (getCurrentValue("source-sidebar-show") === "true") { - inner.innerText = "<"; } else { - inner.innerText = ">"; + window.rustdocShowSourceSidebar(); } - inner.onclick = toggleSidebar; - - sidebarToggle.appendChild(inner); - return sidebarToggle; -} +}; // This function is called from "src-files.js", generated in `html/render/write_shared.rs`. // eslint-disable-next-line no-unused-vars function createSrcSidebar() { const container = document.querySelector("nav.sidebar"); - const sidebarToggle = createSidebarToggle(); - container.insertBefore(sidebarToggle, container.firstChild); - const sidebar = document.createElement("div"); sidebar.id = "src-sidebar"; let hasFoundFile = false; - const title = document.createElement("div"); - title.className = "title"; - title.innerText = "Files"; - sidebar.appendChild(title); for (const [key, source] of srcIndex) { source[NAME_OFFSET] = key; hasFoundFile = createDirEntry(source, sidebar, "", hasFoundFile); diff --git a/src/librustdoc/html/templates/page.html b/src/librustdoc/html/templates/page.html index 60ca5660c0205..e5bb8e6d19cec 100644 --- a/src/librustdoc/html/templates/page.html +++ b/src/librustdoc/html/templates/page.html @@ -77,7 +77,7 @@ {{ layout.external_html.before_content|safe }} {% if page.css_class != "src" %} {# #} @@ -118,22 +122,11 @@

{# #}
{# #} {% if page.css_class != "src" %}
{% endif %}
- - Fork me on GitHub + + + diff --git a/src/tools/compiletest/src/errors.rs b/src/tools/compiletest/src/errors.rs index c33e66e02ac41..e0ec76aa027b7 100644 --- a/src/tools/compiletest/src/errors.rs +++ b/src/tools/compiletest/src/errors.rs @@ -11,7 +11,7 @@ use once_cell::sync::Lazy; use regex::Regex; use tracing::*; -#[derive(Clone, Debug, PartialEq)] +#[derive(Copy, Clone, Debug, PartialEq)] pub enum ErrorKind { Help, Error, diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index ca80328f3ac03..1f5f77839de41 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -3977,23 +3977,29 @@ impl<'test> TestCx<'test> { proc_res.status, self.props.error_patterns ); - if !explicit && self.config.compare_mode.is_none() { - let check_patterns = should_run == WillExecute::No - && (!self.props.error_patterns.is_empty() - || !self.props.regex_error_patterns.is_empty()); + let check_patterns = should_run == WillExecute::No + && (!self.props.error_patterns.is_empty() + || !self.props.regex_error_patterns.is_empty()); + if !explicit && self.config.compare_mode.is_none() { let check_annotations = !check_patterns || !expected_errors.is_empty(); - if check_patterns { - // "// error-pattern" comments - let output_to_check = self.get_output(&proc_res); - self.check_all_error_patterns(&output_to_check, &proc_res, pm); - } - if check_annotations { // "//~ERROR comments" self.check_expected_errors(expected_errors, &proc_res); } + } else if explicit && !expected_errors.is_empty() { + let msg = format!( + "line {}: cannot combine `--error-format` with {} annotations; use `error-pattern` instead", + expected_errors[0].line_num, + expected_errors[0].kind.unwrap_or(ErrorKind::Error), + ); + self.fatal(&msg); + } + if check_patterns { + // "// error-pattern" comments + let output_to_check = self.get_output(&proc_res); + self.check_all_error_patterns(&output_to_check, &proc_res, pm); } if self.props.run_rustfix && self.config.compare_mode.is_none() { diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index 5298ff36f2141..19618f6d76f20 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -2271c26e4a8e062bb00d709d0ccb5846e0c341b9 +a59a98024e3fe317e37e218392f5c34e932b2394 diff --git a/src/tools/miri/tests/compiletest.rs b/src/tools/miri/tests/compiletest.rs index 3394c4a49f836..074808b11227f 100644 --- a/src/tools/miri/tests/compiletest.rs +++ b/src/tools/miri/tests/compiletest.rs @@ -111,6 +111,8 @@ fn test_config(target: &str, path: &str, mode: Mode, with_dependencies: bool) -> "run".into(), // There is no `cargo miri build` so we just use `cargo miri run`. ]); config.dependency_builder.args = builder_args.into_iter().map(Into::into).collect(); + // Reset `RUSTFLAGS` to work around . + config.dependency_builder.envs.push(("RUSTFLAGS".into(), None)); } config } diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.rs b/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.rs index e79bd70e915e5..8a5c10913b48a 100644 --- a/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.rs +++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.rs @@ -14,7 +14,7 @@ fn main() { let ptr = std::ptr::addr_of_mut!(non_copy); // Inside `callee`, the first argument and `*ptr` are basically // aliasing places! - Call(_unit = callee(Move(*ptr), ptr), after_call, UnwindContinue()) + Call(_unit = callee(Move(*ptr), ptr), ReturnTo(after_call), UnwindContinue()) } after_call = { Return() diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.stack.stderr b/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.stack.stderr index 1756123c84ebc..422dc24343612 100644 --- a/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.stack.stderr +++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.stack.stderr @@ -27,8 +27,8 @@ LL | unsafe { ptr.write(S(0)) }; note: inside `main` --> $DIR/arg_inplace_mutate.rs:LL:CC | -LL | Call(_unit = callee(Move(*ptr), ptr), after_call, UnwindContinue()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | Call(_unit = callee(Move(*ptr), ptr), ReturnTo(after_call), UnwindContinue()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info) note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.tree.stderr b/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.tree.stderr index 76f7ee189e3d4..4fe9b7b4728da 100644 --- a/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.tree.stderr +++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_mutate.tree.stderr @@ -35,8 +35,8 @@ LL | unsafe { ptr.write(S(0)) }; note: inside `main` --> $DIR/arg_inplace_mutate.rs:LL:CC | -LL | Call(_unit = callee(Move(*ptr), ptr), after_call, UnwindContinue()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | Call(_unit = callee(Move(*ptr), ptr), ReturnTo(after_call), UnwindContinue()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info) note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.rs b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.rs index e4c00fdd84561..18daf9497a15b 100644 --- a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.rs +++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.rs @@ -11,7 +11,7 @@ fn main() { { let non_copy = S(42); // This could change `non_copy` in-place - Call(_unit = change_arg(Move(non_copy)), after_call, UnwindContinue()) + Call(_unit = change_arg(Move(non_copy)), ReturnTo(after_call), UnwindContinue()) } after_call = { // So now we must not be allowed to observe non-copy again. diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.none.stderr b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.none.stderr index 723ca75daef89..1c73577f5cd5d 100644 --- a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.none.stderr +++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.none.stderr @@ -11,8 +11,8 @@ LL | unsafe { ptr.read() }; note: inside `main` --> $DIR/arg_inplace_observe_during.rs:LL:CC | -LL | Call(_unit = change_arg(Move(*ptr), ptr), after_call, UnwindContinue()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | Call(_unit = change_arg(Move(*ptr), ptr), ReturnTo(after_call), UnwindContinue()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.rs b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.rs index 517abd733a9ce..2201bf17bfc78 100644 --- a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.rs +++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.rs @@ -14,7 +14,7 @@ fn main() { let non_copy = S(42); let ptr = std::ptr::addr_of_mut!(non_copy); // This could change `non_copy` in-place - Call(_unit = change_arg(Move(*ptr), ptr), after_call, UnwindContinue()) + Call(_unit = change_arg(Move(*ptr), ptr), ReturnTo(after_call), UnwindContinue()) } after_call = { Return() diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.stack.stderr b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.stack.stderr index 401e8c6d5a8a6..09c9a777eca46 100644 --- a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.stack.stderr +++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.stack.stderr @@ -27,8 +27,8 @@ LL | x.0 = 0; note: inside `main` --> $DIR/arg_inplace_observe_during.rs:LL:CC | -LL | Call(_unit = change_arg(Move(*ptr), ptr), after_call, UnwindContinue()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | Call(_unit = change_arg(Move(*ptr), ptr), ReturnTo(after_call), UnwindContinue()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info) note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.tree.stderr b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.tree.stderr index 3529ddd3c53cb..67906f24bbd03 100644 --- a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.tree.stderr +++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_during.tree.stderr @@ -35,8 +35,8 @@ LL | x.0 = 0; note: inside `main` --> $DIR/arg_inplace_observe_during.rs:LL:CC | -LL | Call(_unit = change_arg(Move(*ptr), ptr), after_call, UnwindContinue()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | Call(_unit = change_arg(Move(*ptr), ptr), ReturnTo(after_call), UnwindContinue()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info) note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.none.stderr b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.none.stderr index 48db898a250fc..eb215a2d2e805 100644 --- a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.none.stderr +++ b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.none.stderr @@ -11,8 +11,8 @@ LL | unsafe { ptr.read() }; note: inside `main` --> $DIR/return_pointer_aliasing.rs:LL:CC | -LL | Call(*ptr = myfun(ptr), after_call, UnwindContinue()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.rs b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.rs index 23b1e38b99f72..c8e0782eff2ff 100644 --- a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.rs +++ b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.rs @@ -15,7 +15,7 @@ pub fn main() { let ptr = &raw mut x; // We arrange for `myfun` to have a pointer that aliases // its return place. Even just reading from that pointer is UB. - Call(*ptr = myfun(ptr), after_call, UnwindContinue()) + Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue()) } after_call = { diff --git a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.stack.stderr b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.stack.stderr index 85dcd29ba5513..01357f430fc71 100644 --- a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.stack.stderr +++ b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.stack.stderr @@ -27,8 +27,8 @@ LL | unsafe { ptr.read() }; note: inside `main` --> $DIR/return_pointer_aliasing.rs:LL:CC | -LL | Call(*ptr = myfun(ptr), after_call, UnwindContinue()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info) note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.tree.stderr b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.tree.stderr index ea1867b1a7153..6b3f5fbedee9c 100644 --- a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.tree.stderr +++ b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing.tree.stderr @@ -35,8 +35,8 @@ LL | unsafe { ptr.read() }; note: inside `main` --> $DIR/return_pointer_aliasing.rs:LL:CC | -LL | Call(*ptr = myfun(ptr), after_call, UnwindContinue()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | Call(*ptr = myfun(ptr), ReturnTo(after_call), UnwindContinue()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info) note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.rs b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.rs index 56706cdb63bad..7db641538ce55 100644 --- a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.rs +++ b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.rs @@ -15,7 +15,7 @@ pub fn main() { let ptr = &raw mut _x; // We arrange for `myfun` to have a pointer that aliases // its return place. Even just reading from that pointer is UB. - Call(_x = myfun(ptr), after_call, UnwindContinue()) + Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue()) } after_call = { diff --git a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.stack.stderr b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.stack.stderr index 12a99fbf2931f..04040827b0f97 100644 --- a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.stack.stderr +++ b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.stack.stderr @@ -30,8 +30,8 @@ LL | unsafe { ptr.write(0) }; note: inside `main` --> $DIR/return_pointer_aliasing2.rs:LL:CC | -LL | Call(_x = myfun(ptr), after_call, UnwindContinue()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info) note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.tree.stderr b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.tree.stderr index 926303bb48948..37c98eabbec83 100644 --- a/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.tree.stderr +++ b/src/tools/miri/tests/fail/function_calls/return_pointer_aliasing2.tree.stderr @@ -35,8 +35,8 @@ LL | unsafe { ptr.write(0) }; note: inside `main` --> $DIR/return_pointer_aliasing2.rs:LL:CC | -LL | Call(_x = myfun(ptr), after_call, UnwindContinue()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | Call(_x = myfun(ptr), ReturnTo(after_call), UnwindContinue()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info) note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/function_calls/return_pointer_on_unwind.rs b/src/tools/miri/tests/fail/function_calls/return_pointer_on_unwind.rs index 923c59e74299a..244acd8f2be5e 100644 --- a/src/tools/miri/tests/fail/function_calls/return_pointer_on_unwind.rs +++ b/src/tools/miri/tests/fail/function_calls/return_pointer_on_unwind.rs @@ -14,7 +14,7 @@ struct S(i32, [u8; 128]); fn docall(out: &mut S) { mir! { { - Call(*out = callee(), after_call, UnwindContinue()) + Call(*out = callee(), ReturnTo(after_call), UnwindContinue()) } after_call = { @@ -37,7 +37,7 @@ fn callee() -> S { // become visible to the outside. In codegen we can see them // but Miri should detect this as UB! RET.0 = 42; - Call(_unit = startpanic(), after_call, UnwindContinue()) + Call(_unit = startpanic(), ReturnTo(after_call), UnwindContinue()) } after_call = { diff --git a/src/tools/miri/tests/fail/validity/cast_fn_ptr_invalid_caller_arg.rs b/src/tools/miri/tests/fail/validity/cast_fn_ptr_invalid_caller_arg.rs index 9357b37250508..3a87bb786776e 100644 --- a/src/tools/miri/tests/fail/validity/cast_fn_ptr_invalid_caller_arg.rs +++ b/src/tools/miri/tests/fail/validity/cast_fn_ptr_invalid_caller_arg.rs @@ -20,7 +20,7 @@ fn call(f: fn(NonZeroU32)) { let tmp = ptr::addr_of!(c); let ptr = tmp as *const NonZeroU32; // The call site now is a NonZeroU32-to-u32 transmute. - Call(_res = f(*ptr), retblock, UnwindContinue()) //~ERROR: expected something greater or equal to 1 + Call(_res = f(*ptr), ReturnTo(retblock), UnwindContinue()) //~ERROR: expected something greater or equal to 1 } retblock = { Return() diff --git a/src/tools/miri/tests/fail/validity/cast_fn_ptr_invalid_caller_arg.stderr b/src/tools/miri/tests/fail/validity/cast_fn_ptr_invalid_caller_arg.stderr index 9e9ea710f0e47..b1a2bd2c79a40 100644 --- a/src/tools/miri/tests/fail/validity/cast_fn_ptr_invalid_caller_arg.stderr +++ b/src/tools/miri/tests/fail/validity/cast_fn_ptr_invalid_caller_arg.stderr @@ -1,8 +1,8 @@ error: Undefined Behavior: constructing invalid value: encountered 0, but expected something greater or equal to 1 --> $DIR/cast_fn_ptr_invalid_caller_arg.rs:LL:CC | -LL | Call(_res = f(*ptr), retblock, UnwindContinue()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 +LL | Call(_res = f(*ptr), ReturnTo(retblock), UnwindContinue()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/pass/function_calls/return_place_on_heap.rs b/src/tools/miri/tests/pass/function_calls/return_place_on_heap.rs index 89ee689fabe1f..a5cbe2a0d1d9e 100644 --- a/src/tools/miri/tests/pass/function_calls/return_place_on_heap.rs +++ b/src/tools/miri/tests/pass/function_calls/return_place_on_heap.rs @@ -11,7 +11,7 @@ pub fn main() { { let x = 0; let ptr = &raw mut x; - Call(*ptr = myfun(), after_call, UnwindContinue()) + Call(*ptr = myfun(), ReturnTo(after_call), UnwindContinue()) } after_call = { diff --git a/src/tools/rust-analyzer/.cargo/config.toml b/src/tools/rust-analyzer/.cargo/config.toml index c9ad7803951ad..c3cfda85517e4 100644 --- a/src/tools/rust-analyzer/.cargo/config.toml +++ b/src/tools/rust-analyzer/.cargo/config.toml @@ -2,7 +2,7 @@ xtask = "run --package xtask --bin xtask --" tq = "test -- -q" qt = "tq" -lint = "clippy --all-targets -- -Aclippy::collapsible_if -Aclippy::needless_pass_by_value -Aclippy::nonminimal_bool -Aclippy::redundant_pattern_matching --cap-lints warn" +lint = "clippy --all-targets -- --cap-lints warn" [target.x86_64-pc-windows-msvc] linker = "rust-lld" diff --git a/src/tools/rust-analyzer/.github/workflows/ci.yaml b/src/tools/rust-analyzer/.github/workflows/ci.yaml index 1f2a7796d114e..be830415f9cb7 100644 --- a/src/tools/rust-analyzer/.github/workflows/ci.yaml +++ b/src/tools/rust-analyzer/.github/workflows/ci.yaml @@ -38,7 +38,6 @@ jobs: - 'crates/proc-macro-api/**' - 'crates/proc-macro-srv/**' - 'crates/proc-macro-srv-cli/**' - - 'crates/proc-macro-test/**' rust: needs: changes diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 227d1db0ec72d..c7d110eafb66f 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -74,11 +74,11 @@ dependencies = [ "profile", "rust-analyzer-salsa", "rustc-hash", + "semver", + "span", "stdx", "syntax", - "test-utils", "triomphe", - "tt", "vfs", ] @@ -516,8 +516,10 @@ dependencies = [ "rustc-dependencies", "rustc-hash", "smallvec", + "span", "stdx", "syntax", + "test-fixture", "test-utils", "tracing", "triomphe", @@ -542,6 +544,7 @@ dependencies = [ "profile", "rustc-hash", "smallvec", + "span", "stdx", "syntax", "tracing", @@ -581,6 +584,7 @@ dependencies = [ "smallvec", "stdx", "syntax", + "test-fixture", "test-utils", "tracing", "tracing-subscriber", @@ -624,6 +628,7 @@ dependencies = [ "smallvec", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", "toolchain", @@ -647,6 +652,7 @@ dependencies = [ "sourcegen", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", ] @@ -666,6 +672,7 @@ dependencies = [ "smallvec", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", ] @@ -694,8 +701,10 @@ dependencies = [ "rayon", "rustc-hash", "sourcegen", + "span", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", "tracing", @@ -720,6 +729,7 @@ dependencies = [ "sourcegen", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", ] @@ -737,6 +747,7 @@ dependencies = [ "parser", "stdx", "syntax", + "test-fixture", "test-utils", "text-edit", "triomphe", @@ -903,11 +914,13 @@ version = "0.0.0" dependencies = [ "anyhow", "crossbeam-channel", + "hir-expand", "ide", "ide-db", "itertools", "proc-macro-api", "project-model", + "span", "tracing", "tt", "vfs", @@ -932,33 +945,33 @@ checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" [[package]] name = "lsp-server" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928" +version = "0.7.6" dependencies = [ "crossbeam-channel", + "ctrlc", "log", + "lsp-types", "serde", "serde_json", ] [[package]] name = "lsp-server" -version = "0.7.5" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "248f65b78f6db5d8e1b1604b4098a28b43d21a8eb1deeca22b1c421b276c7095" dependencies = [ "crossbeam-channel", - "ctrlc", "log", - "lsp-types", "serde", "serde_json", ] [[package]] name = "lsp-types" -version = "0.94.0" +version = "0.95.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237" +checksum = "158c1911354ef73e8fe42da6b10c0484cb65c7f1007f28022e847706c1ab6984" dependencies = [ "bitflags 1.3.2", "serde", @@ -975,6 +988,7 @@ dependencies = [ "parser", "rustc-hash", "smallvec", + "span", "stdx", "syntax", "test-utils", @@ -1251,6 +1265,7 @@ dependencies = [ "serde", "serde_json", "snap", + "span", "stdx", "text-size", "tracing", @@ -1262,6 +1277,7 @@ dependencies = [ name = "proc-macro-srv" version = "0.0.0" dependencies = [ + "base-db", "expect-test", "libloading", "mbe", @@ -1270,6 +1286,7 @@ dependencies = [ "paths", "proc-macro-api", "proc-macro-test", + "span", "stdx", "tt", ] @@ -1287,14 +1304,9 @@ name = "proc-macro-test" version = "0.0.0" dependencies = [ "cargo_metadata", - "proc-macro-test-impl", "toolchain", ] -[[package]] -name = "proc-macro-test-impl" -version = "0.0.0" - [[package]] name = "proc-macro2" version = "1.0.69" @@ -1514,7 +1526,7 @@ dependencies = [ "ide-ssr", "itertools", "load-cargo", - "lsp-server 0.7.4", + "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-types", "mbe", "mimalloc", @@ -1535,6 +1547,7 @@ dependencies = [ "sourcegen", "stdx", "syntax", + "test-fixture", "test-utils", "tikv-jemallocator", "toolchain", @@ -1726,6 +1739,17 @@ dependencies = [ "xshell", ] +[[package]] +name = "span" +version = "0.0.0" +dependencies = [ + "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rust-analyzer-salsa", + "stdx", + "syntax", + "vfs", +] + [[package]] name = "static_assertions" version = "1.1.0" @@ -1796,6 +1820,20 @@ dependencies = [ "ungrammar", ] +[[package]] +name = "test-fixture" +version = "0.0.0" +dependencies = [ + "base-db", + "cfg", + "hir-expand", + "rustc-hash", + "span", + "stdx", + "test-utils", + "tt", +] + [[package]] name = "test-utils" version = "0.0.0" @@ -1998,6 +2036,7 @@ name = "tt" version = "0.0.0" dependencies = [ "smol_str", + "span", "stdx", "text-size", ] diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 1213979c390f6..7054020086ea6 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -1,10 +1,10 @@ [workspace] members = ["xtask/", "lib/*", "crates/*"] -exclude = ["crates/proc-macro-test/imp"] +exclude = ["crates/proc-macro-srv/proc-macro-test/"] resolver = "2" [workspace.package] -rust-version = "1.70" +rust-version = "1.74" edition = "2021" license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] @@ -70,10 +70,9 @@ proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" } proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" } profile = { path = "./crates/profile", version = "0.0.0" } project-model = { path = "./crates/project-model", version = "0.0.0" } -sourcegen = { path = "./crates/sourcegen", version = "0.0.0" } +span = { path = "./crates/span", version = "0.0.0" } stdx = { path = "./crates/stdx", version = "0.0.0" } syntax = { path = "./crates/syntax", version = "0.0.0" } -test-utils = { path = "./crates/test-utils", version = "0.0.0" } text-edit = { path = "./crates/text-edit", version = "0.0.0" } toolchain = { path = "./crates/toolchain", version = "0.0.0" } tt = { path = "./crates/tt", version = "0.0.0" } @@ -82,19 +81,25 @@ vfs = { path = "./crates/vfs", version = "0.0.0" } rustc-dependencies = { path = "./crates/rustc-dependencies", version = "0.0.0" } # local crates that aren't published to crates.io. These should not have versions. -proc-macro-test = { path = "./crates/proc-macro-test" } +sourcegen = { path = "./crates/sourcegen" } +test-fixture = { path = "./crates/test-fixture" } +test-utils = { path = "./crates/test-utils" } # In-tree crates that are published separately and follow semver. See lib/README.md line-index = { version = "0.1.1" } la-arena = { version = "0.3.1" } -lsp-server = { version = "0.7.4" } +lsp-server = { version = "0.7.6" } # non-local crates anyhow = "1.0.75" +arrayvec = "0.7.4" bitflags = "2.4.1" cargo_metadata = "0.18.1" +command-group = "2.0.1" +crossbeam-channel = "0.5.8" dissimilar = "1.0.7" either = "1.9.0" +expect-test = "1.4.0" hashbrown = { version = "0.14", features = [ "inline-more", ], default-features = false } @@ -105,6 +110,7 @@ nohash-hasher = "0.2.0" rayon = "1.8.0" rust-analyzer-salsa = "0.17.0-pre.4" rustc-hash = "1.1.0" +semver = "1.0.14" serde = { version = "1.0.192", features = ["derive"] } serde_json = "1.0.108" smallvec = { version = "1.10.0", features = [ @@ -124,5 +130,12 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features = triomphe = { version = "0.1.10", default-features = false, features = ["std"] } xshell = "0.2.5" + # We need to freeze the version of the crate, as the raw-api feature is considered unstable dashmap = { version = "=5.5.3", features = ["raw-api"] } + +[workspace.lints.clippy] +collapsible_if = "allow" +needless_pass_by_value = "allow" +nonminimal_bool = "allow" +redundant_pattern_matching = "allow" \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml index 393ffe155ba88..1aa43175f90b2 100644 --- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml @@ -16,12 +16,15 @@ la-arena.workspace = true rust-analyzer-salsa.workspace = true rustc-hash.workspace = true triomphe.workspace = true +semver.workspace = true # local deps cfg.workspace = true profile.workspace = true stdx.workspace = true syntax.workspace = true -test-utils.workspace = true -tt.workspace = true vfs.workspace = true +span.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/base-db/src/change.rs b/src/tools/rust-analyzer/crates/base-db/src/change.rs index 6a3b36b231280..4332e572e2092 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/change.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/change.rs @@ -7,18 +7,17 @@ use salsa::Durability; use triomphe::Arc; use vfs::FileId; -use crate::{CrateGraph, ProcMacros, SourceDatabaseExt, SourceRoot, SourceRootId}; +use crate::{CrateGraph, SourceDatabaseExt, SourceRoot, SourceRootId}; /// Encapsulate a bunch of raw `.set` calls on the database. #[derive(Default)] -pub struct Change { +pub struct FileChange { pub roots: Option>, pub files_changed: Vec<(FileId, Option>)>, pub crate_graph: Option, - pub proc_macros: Option, } -impl fmt::Debug for Change { +impl fmt::Debug for FileChange { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { let mut d = fmt.debug_struct("Change"); if let Some(roots) = &self.roots { @@ -34,9 +33,9 @@ impl fmt::Debug for Change { } } -impl Change { +impl FileChange { pub fn new() -> Self { - Change::default() + FileChange::default() } pub fn set_roots(&mut self, roots: Vec) { @@ -51,10 +50,6 @@ impl Change { self.crate_graph = Some(graph); } - pub fn set_proc_macros(&mut self, proc_macros: ProcMacros) { - self.proc_macros = Some(proc_macros); - } - pub fn apply(self, db: &mut dyn SourceDatabaseExt) { let _p = profile::span("RootDatabase::apply_change"); if let Some(roots) = self.roots { @@ -79,9 +74,6 @@ impl Change { if let Some(crate_graph) = self.crate_graph { db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH); } - if let Some(proc_macros) = self.proc_macros { - db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); - } } } diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index c2472363aacd3..e45a81238ac9b 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -6,22 +6,19 @@ //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO is done and lowered to input. -use std::{fmt, mem, ops, panic::RefUnwindSafe, str::FromStr, sync}; +use std::{fmt, mem, ops, str::FromStr}; use cfg::CfgOptions; use la_arena::{Arena, Idx}; use rustc_hash::{FxHashMap, FxHashSet}; +use semver::Version; use syntax::SmolStr; use triomphe::Arc; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; -use crate::span::SpanData; - // Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`, // then the crate for the proc-macro hasn't been build yet as the build data is missing. pub type ProcMacroPaths = FxHashMap, AbsPathBuf), String>>; -pub type ProcMacros = FxHashMap; - /// Files are grouped into source roots. A source root is a directory on the /// file systems which is watched for changes. Typically it corresponds to a /// Rust crate. Source roots *might* be nested: in this case, a file belongs to @@ -242,49 +239,8 @@ impl CrateDisplayName { CrateDisplayName { crate_name, canonical_name } } } - -// FIXME: These should not be defined in here? Why does base db know about proc-macros -// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either. - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ProcMacroId(pub u32); - -#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] -pub enum ProcMacroKind { - CustomDerive, - FuncLike, - Attr, -} - -pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { - fn expand( - &self, - subtree: &tt::Subtree, - attrs: Option<&tt::Subtree>, - env: &Env, - def_site: SpanData, - call_site: SpanData, - mixed_site: SpanData, - ) -> Result, ProcMacroExpansionError>; -} - -#[derive(Debug)] -pub enum ProcMacroExpansionError { - Panic(String), - /// Things like "proc macro server was killed by OOM". - System(String), -} - -pub type ProcMacroLoadResult = Result, String>; pub type TargetLayoutLoadResult = Result, Arc>; -#[derive(Debug, Clone)] -pub struct ProcMacro { - pub name: SmolStr, - pub kind: ProcMacroKind, - pub expander: sync::Arc, -} - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum ReleaseChannel { Stable, @@ -303,7 +259,7 @@ impl ReleaseChannel { pub fn from_str(str: &str) -> Option { Some(match str { - "" => ReleaseChannel::Stable, + "" | "stable" => ReleaseChannel::Stable, "nightly" => ReleaseChannel::Nightly, _ if str.starts_with("beta") => ReleaseChannel::Beta, _ => return None, @@ -334,7 +290,7 @@ pub struct CrateData { // things. This info does need to be somewhat present though as to prevent deduplication from // happening across different workspaces with different layouts. pub target_layout: TargetLayoutLoadResult, - pub channel: Option, + pub toolchain: Option, } impl CrateData { @@ -391,6 +347,10 @@ impl CrateData { slf_deps.eq(other_deps) } + + pub fn channel(&self) -> Option { + self.toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -398,10 +358,12 @@ pub enum Edition { Edition2015, Edition2018, Edition2021, + Edition2024, } impl Edition { pub const CURRENT: Edition = Edition::Edition2021; + pub const DEFAULT: Edition = Edition::Edition2015; } #[derive(Default, Debug, Clone, PartialEq, Eq)] @@ -472,7 +434,7 @@ impl CrateGraph { is_proc_macro: bool, origin: CrateOrigin, target_layout: Result, Arc>, - channel: Option, + toolchain: Option, ) -> CrateId { let data = CrateData { root_file_id, @@ -486,7 +448,7 @@ impl CrateGraph { origin, target_layout, is_proc_macro, - channel, + toolchain, }; self.arena.alloc(data) } @@ -784,6 +746,7 @@ impl FromStr for Edition { "2015" => Edition::Edition2015, "2018" => Edition::Edition2018, "2021" => Edition::Edition2021, + "2024" => Edition::Edition2024, _ => return Err(ParseEditionError { invalid_input: s.to_string() }), }; Ok(res) @@ -796,6 +759,7 @@ impl fmt::Display for Edition { Edition::Edition2015 => "2015", Edition::Edition2018 => "2018", Edition::Edition2021 => "2021", + Edition::Edition2024 => "2024", }) } } diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs index 57e7934367bb2..a0a55df5f99af 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs @@ -4,27 +4,27 @@ mod input; mod change; -pub mod fixture; -pub mod span; use std::panic; use rustc_hash::FxHashSet; -use syntax::{ast, Parse, SourceFile, TextRange, TextSize}; +use syntax::{ast, Parse, SourceFile}; use triomphe::Arc; pub use crate::{ - change::Change, + change::FileChange, input::{ CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, - DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, - ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, - ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult, + DependencyKind, Edition, Env, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, SourceRoot, + SourceRootId, TargetLayoutLoadResult, }, }; pub use salsa::{self, Cancelled}; +pub use span::{FilePosition, FileRange}; pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath}; +pub use semver::{BuildMetadata, Prerelease, Version, VersionReq}; + #[macro_export] macro_rules! impl_intern_key { ($name:ident) => { @@ -43,18 +43,6 @@ pub trait Upcast { fn upcast(&self) -> &T; } -#[derive(Clone, Copy, Debug)] -pub struct FilePosition { - pub file_id: FileId, - pub offset: TextSize, -} - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub struct FileRange { - pub file_id: FileId, - pub range: TextRange, -} - pub const DEFAULT_PARSE_LRU_CAP: usize = 128; pub trait FileLoader { @@ -74,10 +62,6 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { /// The crate graph. #[salsa::input] fn crate_graph(&self) -> Arc; - - /// The proc macros. - #[salsa::input] - fn proc_macros(&self) -> Arc; } fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse { diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml index 4324584df39d6..fbda065b10f34 100644 --- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml +++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true doctest = false [dependencies] -rustc-hash = "1.1.0" +rustc-hash.workspace = true # locals deps tt.workspace = true @@ -29,3 +29,6 @@ derive_arbitrary = "1.3.2" # local deps mbe.workspace = true syntax.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs index c7ac1af934a08..62fb429a63fab 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/tests.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs @@ -1,6 +1,6 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; -use mbe::{syntax_node_to_token_tree, DummyTestSpanMap}; +use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; use syntax::{ast, AstNode}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; @@ -8,7 +8,7 @@ use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; fn assert_parse_result(input: &str, expected: CfgExpr) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); } @@ -16,7 +16,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) { fn check_dnf(input: &str, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); expect.assert_eq(&actual); @@ -25,7 +25,7 @@ fn check_dnf(input: &str, expect: Expect) { fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); @@ -36,7 +36,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml index 4322d2d966a11..b8c10da1b6e34 100644 --- a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml +++ b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml @@ -13,14 +13,17 @@ doctest = false [dependencies] cargo_metadata.workspace = true -crossbeam-channel = "0.5.8" +crossbeam-channel.workspace = true tracing.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true serde_json.workspace = true serde.workspace = true -command-group = "2.0.1" +command-group.workspace = true # local deps paths.workspace = true stdx.workspace = true toolchain.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml index 2d174517605f1..5933d30040fad 100644 --- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true doctest = false [dependencies] -arrayvec = "0.7.2" +arrayvec.workspace = true bitflags.workspace = true cov-mark = "2.0.0-pre.1" dashmap.workspace = true @@ -23,7 +23,7 @@ indexmap.workspace = true itertools.workspace = true la-arena.workspace = true once_cell = "1.17.0" -rustc-hash = "1.1.0" +rustc-hash.workspace = true tracing.workspace = true smallvec.workspace = true hashbrown.workspace = true @@ -42,13 +42,18 @@ mbe.workspace = true cfg.workspace = true tt.workspace = true limit.workspace = true +span.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test.workspace = true # local deps test-utils.workspace = true +test-fixture.workspace = true [features] in-rust-tree = ["rustc-dependencies/in-rust-tree"] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs index 942b28fc1450e..26f76afb1f09f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs @@ -637,9 +637,12 @@ impl<'attr> AttrQuery<'attr> { } } -fn any_has_attrs( - db: &dyn DefDatabase, - id: impl Lookup>, +fn any_has_attrs<'db>( + db: &(dyn DefDatabase + 'db), + id: impl Lookup< + Database<'db> = dyn DefDatabase + 'db, + Data = impl HasSource, + >, ) -> InFile { id.lookup(db).source(db).map(ast::AnyHasAttrs::new) } @@ -650,17 +653,17 @@ fn attrs_from_item_tree(db: &dyn DefDatabase, id: ItemTreeId tree.raw_attrs(mod_item.into()).clone() } -fn attrs_from_item_tree_loc( - db: &dyn DefDatabase, - lookup: impl Lookup>, +fn attrs_from_item_tree_loc<'db, N: ItemTreeNode>( + db: &(dyn DefDatabase + 'db), + lookup: impl Lookup = dyn DefDatabase + 'db, Data = ItemLoc>, ) -> RawAttrs { let id = lookup.lookup(db).id; attrs_from_item_tree(db, id) } -fn attrs_from_item_tree_assoc( - db: &dyn DefDatabase, - lookup: impl Lookup>, +fn attrs_from_item_tree_assoc<'db, N: ItemTreeNode>( + db: &(dyn DefDatabase + 'db), + lookup: impl Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc>, ) -> RawAttrs { let id = lookup.lookup(db).id; attrs_from_item_tree(db, id) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs index 0f98a4ec93c63..1a63e96bfa921 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs @@ -1,19 +1,23 @@ //! This module contains tests for doc-expression parsing. //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. +use triomphe::Arc; + use base_db::FileId; -use hir_expand::span::{RealSpanMap, SpanMapRef}; +use hir_expand::span_map::{RealSpanMap, SpanMap}; use mbe::syntax_node_to_token_tree; -use syntax::{ast, AstNode}; +use syntax::{ast, AstNode, TextRange}; use crate::attr::{DocAtom, DocExpr}; fn assert_parse_result(input: &str, expected: DocExpr) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); let tt = syntax_node_to_token_tree( tt.syntax(), - SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::from_raw(0))), + map.as_ref(), + map.span_for_range(TextRange::empty(0.into())), ); let cfg = DocExpr::parse(&tt); assert_eq!(cfg, expected); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs index c6a9093201592..a45ec844aba0f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs @@ -8,7 +8,7 @@ use either::Either; use hir_expand::{ ast_id_map::AstIdMap, name::{name, AsName, Name}, - AstId, ExpandError, InFile, + ExpandError, InFile, }; use intern::Interned; use profile::Count; @@ -66,7 +66,7 @@ pub(super) fn lower( krate, def_map: expander.module.def_map(db), source_map: BodySourceMap::default(), - ast_id_map: db.ast_id_map(expander.current_file_id), + ast_id_map: db.ast_id_map(expander.current_file_id()), body: Body { exprs: Default::default(), pats: Default::default(), @@ -408,7 +408,7 @@ impl ExprCollector<'_> { ast::Expr::ParenExpr(e) => { let inner = self.collect_expr_opt(e.expr()); // make the paren expr point to the inner expression as well - let src = self.expander.to_source(syntax_ptr); + let src = self.expander.in_file(syntax_ptr); self.source_map.expr_map.insert(src, inner); inner } @@ -441,7 +441,7 @@ impl ExprCollector<'_> { Some(e) => self.collect_expr(e), None => self.missing_expr(), }; - let src = self.expander.to_source(AstPtr::new(&field)); + let src = self.expander.in_file(AstPtr::new(&field)); self.source_map.field_map_back.insert(expr, src); Some(RecordLitField { name, expr }) }) @@ -644,7 +644,7 @@ impl ExprCollector<'_> { Some(id) => { // Make the macro-call point to its expanded expression so we can query // semantics on syntax pointers to the macro - let src = self.expander.to_source(syntax_ptr); + let src = self.expander.in_file(syntax_ptr); self.source_map.expr_map.insert(src, id); id } @@ -957,22 +957,31 @@ impl ExprCollector<'_> { T: ast::AstNode, { // File containing the macro call. Expansion errors will be attached here. - let outer_file = self.expander.current_file_id; + let outer_file = self.expander.current_file_id(); - let macro_call_ptr = self.expander.to_source(AstPtr::new(&mcall)); + let macro_call_ptr = self.expander.in_file(syntax_ptr); let module = self.expander.module.local_id; - let res = self.expander.enter_expand(self.db, mcall, |path| { - self.def_map - .resolve_path( - self.db, - module, - &path, - crate::item_scope::BuiltinShadowMode::Other, - Some(MacroSubNs::Bang), - ) - .0 - .take_macros() - }); + + let res = match self.def_map.modules[module] + .scope + .macro_invocations + .get(&InFile::new(outer_file, self.ast_id_map.ast_id_for_ptr(syntax_ptr))) + { + // fast path, macro call is in a block module + Some(&call) => Ok(self.expander.enter_expand_id(self.db, call)), + None => self.expander.enter_expand(self.db, mcall, |path| { + self.def_map + .resolve_path( + self.db, + module, + &path, + crate::item_scope::BuiltinShadowMode::Other, + Some(MacroSubNs::Bang), + ) + .0 + .take_macros() + }), + }; let res = match res { Ok(res) => res, @@ -986,7 +995,6 @@ impl ExprCollector<'_> { return collector(self, None); } }; - if record_diagnostics { match &res.err { Some(ExpandError::UnresolvedProcMacro(krate)) => { @@ -1013,10 +1021,10 @@ impl ExprCollector<'_> { Some((mark, expansion)) => { // Keep collecting even with expansion errors so we can provide completions and // other services in incomplete macro expressions. - self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id); + self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id()); let prev_ast_id_map = mem::replace( &mut self.ast_id_map, - self.db.ast_id_map(self.expander.current_file_id), + self.db.ast_id_map(self.expander.current_file_id()), ); if record_diagnostics { @@ -1066,7 +1074,7 @@ impl ExprCollector<'_> { Some(tail) => { // Make the macro-call point to its expanded expression so we can query // semantics on syntax pointers to the macro - let src = self.expander.to_source(syntax_ptr); + let src = self.expander.in_file(syntax_ptr); self.source_map.expr_map.insert(src, tail); Some(tail) } @@ -1140,7 +1148,7 @@ impl ExprCollector<'_> { let block_id = if block_has_items { let file_local_id = self.ast_id_map.ast_id(&block); - let ast_id = AstId::new(self.expander.current_file_id, file_local_id); + let ast_id = self.expander.in_file(file_local_id); Some(self.db.intern_block(BlockLoc { ast_id, module: self.expander.module })) } else { None @@ -1333,7 +1341,7 @@ impl ExprCollector<'_> { let ast_pat = f.pat()?; let pat = self.collect_pat(ast_pat, binding_list); let name = f.field_name()?.as_name(); - let src = self.expander.to_source(AstPtr::new(&f)); + let src = self.expander.in_file(AstPtr::new(&f)); self.source_map.pat_field_map_back.insert(pat, src); Some(RecordFieldPat { name, pat }) }) @@ -1391,7 +1399,7 @@ impl ExprCollector<'_> { ast::Pat::MacroPat(mac) => match mac.macro_call() { Some(call) => { let macro_ptr = AstPtr::new(&call); - let src = self.expander.to_source(AstPtr::new(&Either::Left(pat))); + let src = self.expander.in_file(AstPtr::new(&Either::Left(pat))); let pat = self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| { this.collect_pat_opt(expanded_pat, binding_list) @@ -1472,10 +1480,7 @@ impl ExprCollector<'_> { } self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode { - node: InFile::new( - self.expander.current_file_id, - SyntaxNodePtr::new(owner.syntax()), - ), + node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())), cfg, opts: self.expander.cfg_options().clone(), }); @@ -1514,10 +1519,7 @@ impl ExprCollector<'_> { } else { Err(BodyDiagnostic::UnreachableLabel { name, - node: InFile::new( - self.expander.current_file_id, - AstPtr::new(&lifetime), - ), + node: self.expander.in_file(AstPtr::new(&lifetime)), }) }; } @@ -1526,7 +1528,7 @@ impl ExprCollector<'_> { Err(BodyDiagnostic::UndeclaredLabel { name, - node: InFile::new(self.expander.current_file_id, AstPtr::new(&lifetime)), + node: self.expander.in_file(AstPtr::new(&lifetime)), }) } @@ -1990,7 +1992,7 @@ fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)> impl ExprCollector<'_> { fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId { - let src = self.expander.to_source(ptr); + let src = self.expander.in_file(ptr); let id = self.body.exprs.alloc(expr); self.source_map.expr_map_back.insert(id, src.clone()); self.source_map.expr_map.insert(src, id); @@ -2018,7 +2020,7 @@ impl ExprCollector<'_> { } fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { - let src = self.expander.to_source(ptr); + let src = self.expander.in_file(ptr); let id = self.body.pats.alloc(pat); self.source_map.pat_map_back.insert(id, src.clone()); self.source_map.pat_map.insert(src, id); @@ -2033,7 +2035,7 @@ impl ExprCollector<'_> { } fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId { - let src = self.expander.to_source(ptr); + let src = self.expander.in_file(ptr); let id = self.body.labels.alloc(label); self.source_map.label_map_back.insert(id, src.clone()); self.source_map.label_map.insert(src, id); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs index baca293e29041..ab623250d4072 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs @@ -267,9 +267,10 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope #[cfg(test)] mod tests { - use base_db::{fixture::WithFixture, FileId, SourceDatabase}; + use base_db::{FileId, SourceDatabase}; use hir_expand::{name::AsName, InFile}; use syntax::{algo::find_node_at_offset, ast, AstNode}; + use test_fixture::WithFixture; use test_utils::{assert_eq_text, extract_offset}; use crate::{db::DefDatabase, test_db::TestDB, FunctionId, ModuleDefId}; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs index 2b432dfbb92bc..a76ddffb41166 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs @@ -1,7 +1,8 @@ mod block; -use base_db::{fixture::WithFixture, SourceDatabase}; +use base_db::SourceDatabase; use expect_test::{expect, Expect}; +use test_fixture::WithFixture; use crate::{test_db::TestDB, ModuleDefId}; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs index 635d13f24ad81..9c183c9332ba8 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs @@ -16,7 +16,7 @@ use crate::{ db::DefDatabase, expander::{Expander, Mark}, item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId}, - macro_call_as_call_id, macro_id_to_def_id, + macro_call_as_call_id, nameres::{ attr_resolution::ResolvedAttr, diagnostics::DefDiagnostic, @@ -720,7 +720,7 @@ impl<'a> AssocItemCollector<'a> { ) .0 .take_macros() - .map(|it| macro_id_to_def_id(self.db, it)) + .map(|it| self.db.macro_def(it)) }; match macro_call_as_call_id( self.db.upcast(), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index 31c1a713031cb..d5831022f28f7 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -1,7 +1,7 @@ //! Defines database & queries for name resolution. use base_db::{salsa, CrateId, SourceDatabase, Upcast}; use either::Either; -use hir_expand::{db::ExpandDatabase, HirFileId}; +use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId}; use intern::Interned; use la_arena::ArenaMap; use syntax::{ast, AstPtr}; @@ -24,9 +24,10 @@ use crate::{ AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId, - LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc, - StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, - TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId, + LocalFieldId, Macro2Id, Macro2Loc, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, + ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, + TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, + VariantId, }; #[salsa::query_group(InternDatabaseStorage)] @@ -110,6 +111,8 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast Arc; + fn macro_def(&self, m: MacroId) -> MacroDefId; + // region:data #[salsa::invoke(StructData::struct_data_query)] @@ -239,12 +242,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast Arc; - #[salsa::transparent] - fn crate_limits(&self, crate_id: CrateId) -> CrateLimits; - - #[salsa::transparent] - fn recursion_limit(&self, crate_id: CrateId) -> u32; - fn crate_supports_no_std(&self, crate_id: CrateId) -> bool; } @@ -253,24 +250,6 @@ fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc { db.crate_def_map_query(krate) } -pub struct CrateLimits { - /// The maximum depth for potentially infinitely-recursive compile-time operations like macro expansion or auto-dereference. - pub recursion_limit: u32, -} - -fn crate_limits(db: &dyn DefDatabase, crate_id: CrateId) -> CrateLimits { - let def_map = db.crate_def_map(crate_id); - - CrateLimits { - // 128 is the default in rustc. - recursion_limit: def_map.recursion_limit().unwrap_or(128), - } -} - -fn recursion_limit(db: &dyn DefDatabase, crate_id: CrateId) -> u32 { - db.crate_limits(crate_id).recursion_limit -} - fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool { let file = db.crate_graph()[crate_id].root_file_id; let item_tree = db.file_item_tree(file.into()); @@ -305,3 +284,78 @@ fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool { false } + +fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId { + use hir_expand::InFile; + + use crate::{Lookup, MacroDefKind, MacroExpander}; + + let kind = |expander, file_id, m| { + let in_file = InFile::new(file_id, m); + match expander { + MacroExpander::Declarative => MacroDefKind::Declarative(in_file), + MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file), + MacroExpander::BuiltInAttr(it) => MacroDefKind::BuiltInAttr(it, in_file), + MacroExpander::BuiltInDerive(it) => MacroDefKind::BuiltInDerive(it, in_file), + MacroExpander::BuiltInEager(it) => MacroDefKind::BuiltInEager(it, in_file), + } + }; + + match id { + MacroId::Macro2Id(it) => { + let loc: Macro2Loc = it.lookup(db); + + let item_tree = loc.id.item_tree(db); + let makro = &item_tree[loc.id.value]; + MacroDefId { + krate: loc.container.krate, + kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()), + local_inner: false, + allow_internal_unsafe: loc.allow_internal_unsafe, + span: db + .span_map(loc.id.file_id()) + .span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()), + edition: loc.edition, + } + } + + MacroId::MacroRulesId(it) => { + let loc: MacroRulesLoc = it.lookup(db); + + let item_tree = loc.id.item_tree(db); + let makro = &item_tree[loc.id.value]; + MacroDefId { + krate: loc.container.krate, + kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()), + local_inner: loc.flags.contains(MacroRulesLocFlags::LOCAL_INNER), + allow_internal_unsafe: loc + .flags + .contains(MacroRulesLocFlags::ALLOW_INTERNAL_UNSAFE), + span: db + .span_map(loc.id.file_id()) + .span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()), + edition: loc.edition, + } + } + MacroId::ProcMacroId(it) => { + let loc = it.lookup(db); + + let item_tree = loc.id.item_tree(db); + let makro = &item_tree[loc.id.value]; + MacroDefId { + krate: loc.container.krate, + kind: MacroDefKind::ProcMacro( + loc.expander, + loc.kind, + InFile::new(loc.id.file_id(), makro.ast_id), + ), + local_inner: false, + allow_internal_unsafe: false, + span: db + .span_map(loc.id.file_id()) + .span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()), + edition: loc.edition, + } + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs index 398f116d83135..b83feeedc34c1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs @@ -4,15 +4,15 @@ use base_db::CrateId; use cfg::CfgOptions; use drop_bomb::DropBomb; use hir_expand::{ - attrs::RawAttrs, mod_path::ModPath, span::SpanMap, ExpandError, ExpandResult, HirFileId, + attrs::RawAttrs, mod_path::ModPath, span_map::SpanMap, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId, }; use limit::Limit; -use syntax::{ast, Parse, SyntaxNode}; +use syntax::{ast, Parse}; use crate::{ - attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall, - MacroId, ModuleId, UnresolvedMacro, + attr::Attrs, db::DefDatabase, lower::LowerCtx, path::Path, AsMacroCall, MacroId, ModuleId, + UnresolvedMacro, }; #[derive(Debug)] @@ -20,7 +20,7 @@ pub struct Expander { cfg_options: CfgOptions, span_map: SpanMap, krate: CrateId, - pub(crate) current_file_id: HirFileId, + current_file_id: HirFileId, pub(crate) module: ModuleId, /// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached. recursion_depth: u32, @@ -29,12 +29,13 @@ pub struct Expander { impl Expander { pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander { - let recursion_limit = db.recursion_limit(module.krate); - #[cfg(not(test))] - let recursion_limit = Limit::new(recursion_limit as usize); - // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug - #[cfg(test)] - let recursion_limit = Limit::new(std::cmp::min(32, recursion_limit as usize)); + let recursion_limit = module.def_map(db).recursion_limit() as usize; + let recursion_limit = Limit::new(if cfg!(test) { + // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug + std::cmp::min(32, recursion_limit) + } else { + recursion_limit + }); Expander { current_file_id, module, @@ -56,9 +57,9 @@ impl Expander { let mut unresolved_macro_err = None; let result = self.within_limit(db, |this| { - let macro_call = InFile::new(this.current_file_id, ¯o_call); + let macro_call = this.in_file(¯o_call); match macro_call.as_call_id_with_errors(db.upcast(), this.module.krate(), |path| { - resolver(path).map(|it| macro_id_to_def_id(db, it)) + resolver(path).map(|it| db.macro_def(it)) }) { Ok(call_id) => call_id, Err(resolve_err) => { @@ -83,17 +84,6 @@ impl Expander { self.within_limit(db, |_this| ExpandResult::ok(Some(call_id))) } - fn enter_expand_inner( - db: &dyn DefDatabase, - call_id: MacroCallId, - error: Option, - ) -> ExpandResult>>> { - let macro_file = call_id.as_macro_file(); - let ExpandResult { value, err } = db.parse_macro_expansion(macro_file); - - ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) } - } - pub fn exit(&mut self, mut mark: Mark) { self.span_map = mark.span_map; self.current_file_id = mark.file_id; @@ -113,7 +103,7 @@ impl Expander { LowerCtx::new(db, self.span_map.clone(), self.current_file_id) } - pub(crate) fn to_source(&self, value: T) -> InFile { + pub(crate) fn in_file(&self, value: T) -> InFile { InFile { file_id: self.current_file_id, value } } @@ -164,26 +154,34 @@ impl Expander { return ExpandResult { value: None, err }; }; - let res = Self::enter_expand_inner(db, call_id, err); - match res.err { - // If proc-macro is disabled or unresolved, we want to expand to a missing expression - // instead of an empty tree which might end up in an empty block. - Some(ExpandError::UnresolvedProcMacro(_)) => res.map(|_| None), - _ => res.map(|value| { - value.and_then(|InFile { file_id, value }| { - let parse = value.cast::()?; + let macro_file = call_id.as_macro_file(); + let res = db.parse_macro_expansion(macro_file); + + let err = err.or(res.err); + ExpandResult { + value: match err { + // If proc-macro is disabled or unresolved, we want to expand to a missing expression + // instead of an empty tree which might end up in an empty block. + Some(ExpandError::UnresolvedProcMacro(_)) => None, + _ => (|| { + let parse = res.value.0.cast::()?; self.recursion_depth += 1; - let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id)); - let old_file_id = std::mem::replace(&mut self.current_file_id, file_id); + let old_span_map = std::mem::replace( + &mut self.span_map, + SpanMap::ExpansionSpanMap(res.value.1), + ); + let old_file_id = + std::mem::replace(&mut self.current_file_id, macro_file.into()); let mark = Mark { file_id: old_file_id, span_map: old_span_map, bomb: DropBomb::new("expansion mark dropped"), }; Some((mark, parse)) - }) - }), + })(), + }, + err, } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index 13af0b0218e89..4737b48703db3 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -585,9 +585,9 @@ fn find_local_import_locations( #[cfg(test)] mod tests { - use base_db::fixture::WithFixture; use hir_expand::db::ExpandDatabase; use syntax::ast::AstNode; + use test_fixture::WithFixture; use crate::test_db::TestDB; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs index 26d333f9a0b0b..aea7229bd6481 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs @@ -469,8 +469,9 @@ pub fn search_dependencies( #[cfg(test)] mod tests { - use base_db::{fixture::WithFixture, SourceDatabase, Upcast}; + use base_db::{SourceDatabase, Upcast}; use expect_test::{expect, Expect}; + use test_fixture::WithFixture; use crate::{db::DefDatabase, test_db::TestDB, ItemContainerId, Lookup}; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index ce83cb435e2e8..4902f24e2e3ab 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -102,8 +102,10 @@ pub struct ItemScope { // FIXME: Macro shadowing in one module is not properly handled. Non-item place macros will // be all resolved to the last one defined if shadowing happens. legacy_macros: FxHashMap>, - /// The derive macro invocations in this scope. + /// The attribute macro invocations in this scope. attr_macros: FxHashMap, MacroCallId>, + /// The macro invocations in this scope. + pub macro_invocations: FxHashMap, MacroCallId>, /// The derive macro invocations in this scope, keyed by the owner item over the actual derive attributes /// paired with the derive macro invocations for the specific attribute. derive_macros: FxHashMap, SmallVec<[DeriveMacroInvocation; 1]>>, @@ -345,6 +347,10 @@ impl ItemScope { self.attr_macros.insert(item, call); } + pub(crate) fn add_macro_invoc(&mut self, call: AstId, call_id: MacroCallId) { + self.macro_invocations.insert(call, call_id); + } + pub(crate) fn attr_macro_invocs( &self, ) -> impl Iterator, MacroCallId)> + '_ { @@ -692,6 +698,7 @@ impl ItemScope { use_imports_values, use_imports_types, use_imports_macros, + macro_invocations, } = self; types.shrink_to_fit(); values.shrink_to_fit(); @@ -709,6 +716,7 @@ impl ItemScope { derive_macros.shrink_to_fit(); extern_crate_decls.shrink_to_fit(); use_decls.shrink_to_fit(); + macro_invocations.shrink_to_fit(); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index 3d2cddffa3ba2..20e4e44339e9a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -29,6 +29,9 @@ //! //! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its //! surface syntax. +//! +//! Note that we cannot store [`span::Span`]s inside of this, as typing in an item invalidates its +//! encompassing span! mod lower; mod pretty; @@ -42,7 +45,7 @@ use std::{ }; use ast::{AstNode, HasName, StructKind}; -use base_db::{span::SyntaxContextId, CrateId}; +use base_db::CrateId; use either::Either; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, @@ -55,6 +58,7 @@ use la_arena::{Arena, Idx, IdxRange, RawIdx}; use profile::Count; use rustc_hash::FxHashMap; use smallvec::SmallVec; +use span::Span; use stdx::never; use syntax::{ast, match_ast, SyntaxKind}; use triomphe::Arc; @@ -280,7 +284,7 @@ struct ItemTreeData { mods: Arena, macro_calls: Arena, macro_rules: Arena, - macro_defs: Arena, + macro_defs: Arena, vis: ItemVisibilities, } @@ -513,7 +517,7 @@ mod_items! { Mod in mods -> ast::Module, MacroCall in macro_calls -> ast::MacroCall, MacroRules in macro_rules -> ast::MacroRules, - MacroDef in macro_defs -> ast::MacroDef, + Macro2 in macro_defs -> ast::MacroDef, } macro_rules! impl_index { @@ -746,7 +750,8 @@ pub struct MacroCall { pub path: Interned, pub ast_id: FileAstId, pub expand_to: ExpandTo, - pub call_site: SyntaxContextId, + // FIXME: We need to move this out. It invalidates the item tree when typing inside the macro call. + pub call_site: Span, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -758,7 +763,7 @@ pub struct MacroRules { /// "Macros 2.0" macro definition. #[derive(Debug, Clone, Eq, PartialEq)] -pub struct MacroDef { +pub struct Macro2 { pub name: Name, pub visibility: RawVisibilityId, pub ast_id: FileAstId, @@ -917,7 +922,7 @@ impl ModItem { | ModItem::Impl(_) | ModItem::Mod(_) | ModItem::MacroRules(_) - | ModItem::MacroDef(_) => None, + | ModItem::Macro2(_) => None, ModItem::MacroCall(call) => Some(AssocItem::MacroCall(*call)), ModItem::Const(konst) => Some(AssocItem::Const(*konst)), ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(*alias)), @@ -943,7 +948,7 @@ impl ModItem { ModItem::Mod(it) => tree[it.index()].ast_id().upcast(), ModItem::MacroCall(it) => tree[it.index()].ast_id().upcast(), ModItem::MacroRules(it) => tree[it.index()].ast_id().upcast(), - ModItem::MacroDef(it) => tree[it.index()].ast_id().upcast(), + ModItem::Macro2(it) => tree[it.index()].ast_id().upcast(), } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs index 83a2790ce8f1f..8e2fafe81b50f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs @@ -2,7 +2,7 @@ use std::collections::hash_map::Entry; -use hir_expand::{ast_id_map::AstIdMap, span::SpanMapRef, HirFileId}; +use hir_expand::{ast_id_map::AstIdMap, span_map::SpanMapRef, HirFileId}; use syntax::ast::{self, HasModuleItem, HasTypeBounds}; use crate::{ @@ -549,7 +549,7 @@ impl<'a> Ctx<'a> { path, ast_id, expand_to, - call_site: span_map.span_for_range(m.syntax().text_range()).ctx, + call_site: span_map.span_for_range(m.syntax().text_range()), }; Some(id(self.data().macro_calls.alloc(res))) } @@ -562,13 +562,13 @@ impl<'a> Ctx<'a> { Some(id(self.data().macro_rules.alloc(res))) } - fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option> { + fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option> { let name = m.name().map(|it| it.as_name())?; let ast_id = self.source_ast_id_map.ast_id(m); let visibility = self.lower_visibility(m); - let res = MacroDef { name, ast_id, visibility }; + let res = Macro2 { name, ast_id, visibility }; Some(id(self.data().macro_defs.alloc(res))) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs index 244111d202ceb..6d92fce07272f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs @@ -464,8 +464,8 @@ impl Printer<'_> { let MacroRules { name, ast_id: _ } = &self.tree[it]; wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db.upcast())); } - ModItem::MacroDef(it) => { - let MacroDef { name, visibility, ast_id: _ } = &self.tree[it]; + ModItem::Macro2(it) => { + let Macro2 { name, visibility, ast_id: _ } = &self.tree[it]; self.print_visibility(*visibility); wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast())); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs index 96c65b941c1d0..f97ae0d8e434d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs @@ -1,5 +1,5 @@ -use base_db::fixture::WithFixture; use expect_test::{expect, Expect}; +use test_fixture::WithFixture; use crate::{db::DefDatabase, test_db::TestDB}; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index b5333861cc8a6..22ba3aab4e9fd 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -63,7 +63,7 @@ use std::{ panic::{RefUnwindSafe, UnwindSafe}, }; -use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind}; +use base_db::{impl_intern_key, salsa, CrateId, Edition}; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, attrs::{Attr, AttrId, AttrInput}, @@ -72,24 +72,27 @@ use hir_expand::{ builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, eager::expand_eager_macro_input, + impl_intern_lookup, name::Name, - proc_macro::ProcMacroExpander, + proc_macro::{CustomProcMacroExpander, ProcMacroKind}, AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, }; use item_tree::ExternBlock; use la_arena::Idx; use nameres::DefMap; +use span::Span; use stdx::impl_from; use syntax::{ast, AstNode}; -pub use hir_expand::tt; +pub use hir_expand::{tt, Intern, Lookup}; use crate::{ builtin_type::BuiltinType, data::adt::VariantData, + db::DefDatabase, item_tree::{ - Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, + Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, Macro2, MacroRules, Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use, }, }; @@ -101,7 +104,7 @@ pub struct CrateRootModuleId { } impl CrateRootModuleId { - pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc { + pub fn def_map(&self, db: &dyn DefDatabase) -> Arc { db.crate_def_map(self.krate) } @@ -163,7 +166,7 @@ pub struct ModuleId { } impl ModuleId { - pub fn def_map(self, db: &dyn db::DefDatabase) -> Arc { + pub fn def_map(self, db: &dyn DefDatabase) -> Arc { match self.block { Some(block) => db.block_def_map(block), None => db.crate_def_map(self.krate), @@ -174,7 +177,7 @@ impl ModuleId { self.krate } - pub fn name(self, db: &dyn db::DefDatabase) -> Option { + pub fn name(self, db: &dyn DefDatabase) -> Option { let def_map = self.def_map(db); let parent = def_map[self.local_id].parent?; def_map[parent].children.iter().find_map(|(name, module_id)| { @@ -186,7 +189,7 @@ impl ModuleId { }) } - pub fn containing_module(self, db: &dyn db::DefDatabase) -> Option { + pub fn containing_module(self, db: &dyn DefDatabase) -> Option { self.def_map(db).containing_module(self.local_id) } @@ -263,20 +266,7 @@ impl Hash for AssocItemLoc { macro_rules! impl_intern { ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => { impl_intern_key!($id); - - impl Intern for $loc { - type ID = $id; - fn intern(self, db: &dyn db::DefDatabase) -> $id { - db.$intern(self) - } - } - - impl Lookup for $id { - type Data = $loc; - fn lookup(&self, db: &dyn db::DefDatabase) -> $loc { - db.$lookup(*self) - } - } + impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup); }; } @@ -376,9 +366,10 @@ pub struct Macro2Id(salsa::InternId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Macro2Loc { pub container: ModuleId, - pub id: ItemTreeId, + pub id: ItemTreeId, pub expander: MacroExpander, pub allow_internal_unsafe: bool, + pub edition: Edition, } impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2); @@ -389,19 +380,28 @@ pub struct MacroRulesLoc { pub container: ModuleId, pub id: ItemTreeId, pub expander: MacroExpander, - pub allow_internal_unsafe: bool, - pub local_inner: bool, + pub flags: MacroRulesLocFlags, + pub edition: Edition, } impl_intern!(MacroRulesId, MacroRulesLoc, intern_macro_rules, lookup_intern_macro_rules); +bitflags::bitflags! { + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + pub struct MacroRulesLocFlags: u8 { + const ALLOW_INTERNAL_UNSAFE = 1 << 0; + const LOCAL_INNER = 1 << 1; + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] pub struct ProcMacroId(salsa::InternId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct ProcMacroLoc { pub container: CrateRootModuleId, pub id: ItemTreeId, - pub expander: ProcMacroExpander, + pub expander: CustomProcMacroExpander, pub kind: ProcMacroKind, + pub edition: Edition, } impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro); @@ -510,7 +510,7 @@ pub enum MacroId { impl_from!(Macro2Id, MacroRulesId, ProcMacroId for MacroId); impl MacroId { - pub fn is_attribute(self, db: &dyn db::DefDatabase) -> bool { + pub fn is_attribute(self, db: &dyn DefDatabase) -> bool { matches!(self, MacroId::ProcMacroId(it) if it.lookup(db).kind == ProcMacroKind::Attr) } } @@ -722,7 +722,7 @@ impl PartialEq for InTypeConstLoc { } impl InTypeConstId { - pub fn source(&self, db: &dyn db::DefDatabase) -> ast::ConstArg { + pub fn source(&self, db: &dyn DefDatabase) -> ast::ConstArg { let src = self.lookup(db).id; let file_id = src.file_id; let root = &db.parse_or_expand(file_id); @@ -742,7 +742,7 @@ pub enum GeneralConstId { impl_from!(ConstId, ConstBlockId, InTypeConstId for GeneralConstId); impl GeneralConstId { - pub fn generic_def(self, db: &dyn db::DefDatabase) -> Option { + pub fn generic_def(self, db: &dyn DefDatabase) -> Option { match self { GeneralConstId::ConstId(it) => Some(it.into()), GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(), @@ -750,7 +750,7 @@ impl GeneralConstId { } } - pub fn name(self, db: &dyn db::DefDatabase) -> String { + pub fn name(self, db: &dyn DefDatabase) -> String { match self { GeneralConstId::ConstId(const_id) => db .const_data(const_id) @@ -933,7 +933,7 @@ pub enum VariantId { impl_from!(EnumVariantId, StructId, UnionId for VariantId); impl VariantId { - pub fn variant_data(self, db: &dyn db::DefDatabase) -> Arc { + pub fn variant_data(self, db: &dyn DefDatabase) -> Arc { match self { VariantId::StructId(it) => db.struct_data(it).variant_data.clone(), VariantId::UnionId(it) => db.union_data(it).variant_data.clone(), @@ -943,7 +943,7 @@ impl VariantId { } } - pub fn file_id(self, db: &dyn db::DefDatabase) -> HirFileId { + pub fn file_id(self, db: &dyn DefDatabase) -> HirFileId { match self { VariantId::EnumVariantId(it) => it.parent.lookup(db).id.file_id(), VariantId::StructId(it) => it.lookup(db).id.file_id(), @@ -960,22 +960,12 @@ impl VariantId { } } -trait Intern { - type ID; - fn intern(self, db: &dyn db::DefDatabase) -> Self::ID; -} - -pub trait Lookup { - type Data; - fn lookup(&self, db: &dyn db::DefDatabase) -> Self::Data; -} - pub trait HasModule { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId; + fn module(&self, db: &dyn DefDatabase) -> ModuleId; } impl HasModule for ItemContainerId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match *self { ItemContainerId::ModuleId(it) => it, ItemContainerId::ImplId(it) => it.lookup(db).container, @@ -986,13 +976,13 @@ impl HasModule for ItemContainerId { } impl HasModule for AssocItemLoc { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { self.container.module(db) } } impl HasModule for AdtId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { AdtId::StructId(it) => it.lookup(db).container, AdtId::UnionId(it) => it.lookup(db).container, @@ -1002,13 +992,13 @@ impl HasModule for AdtId { } impl HasModule for ExternCrateId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { self.lookup(db).container } } impl HasModule for VariantId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { VariantId::EnumVariantId(it) => it.parent.lookup(db).container, VariantId::StructId(it) => it.lookup(db).container, @@ -1018,7 +1008,7 @@ impl HasModule for VariantId { } impl HasModule for MacroId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { MacroId::MacroRulesId(it) => it.lookup(db).container, MacroId::Macro2Id(it) => it.lookup(db).container, @@ -1028,7 +1018,7 @@ impl HasModule for MacroId { } impl HasModule for TypeOwnerId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { TypeOwnerId::FunctionId(it) => it.lookup(db).module(db), TypeOwnerId::StaticId(it) => it.lookup(db).module(db), @@ -1045,7 +1035,7 @@ impl HasModule for TypeOwnerId { } impl HasModule for DefWithBodyId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { DefWithBodyId::FunctionId(it) => it.lookup(db).module(db), DefWithBodyId::StaticId(it) => it.lookup(db).module(db), @@ -1057,7 +1047,7 @@ impl HasModule for DefWithBodyId { } impl HasModule for GenericDefId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { GenericDefId::FunctionId(it) => it.lookup(db).module(db), GenericDefId::AdtId(it) => it.module(db), @@ -1072,13 +1062,13 @@ impl HasModule for GenericDefId { } impl HasModule for TypeAliasId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { self.lookup(db).module(db) } } impl HasModule for TraitId { - fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + fn module(&self, db: &dyn DefDatabase) -> ModuleId { self.lookup(db).container } } @@ -1087,7 +1077,7 @@ impl ModuleDefId { /// Returns the module containing `self` (or `self`, if `self` is itself a module). /// /// Returns `None` if `self` refers to a primitive type. - pub fn module(&self, db: &dyn db::DefDatabase) -> Option { + pub fn module(&self, db: &dyn DefDatabase) -> Option { Some(match self { ModuleDefId::ModuleId(id) => *id, ModuleDefId::FunctionId(id) => id.lookup(db).module(db), @@ -1105,7 +1095,7 @@ impl ModuleDefId { } impl AttrDefId { - pub fn krate(&self, db: &dyn db::DefDatabase) -> CrateId { + pub fn krate(&self, db: &dyn DefDatabase) -> CrateId { match self { AttrDefId::ModuleId(it) => it.krate, AttrDefId::FieldId(it) => it.parent.module(db).krate, @@ -1171,7 +1161,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> { return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation"))); }; - let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx; + let call_site = span_map.span_for_range(self.value.syntax().text_range()); macro_call_as_call_id_with_eager( db, @@ -1201,7 +1191,7 @@ impl AstIdWithPath { fn macro_call_as_call_id( db: &dyn ExpandDatabase, call: &AstIdWithPath, - call_site: SyntaxContextId, + call_site: Span, expand_to: ExpandTo, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option + Copy, @@ -1213,7 +1203,7 @@ fn macro_call_as_call_id( fn macro_call_as_call_id_with_eager( db: &dyn ExpandDatabase, call: &AstIdWithPath, - call_site: SyntaxContextId, + call_site: Span, expand_to: ExpandTo, krate: CrateId, resolver: impl FnOnce(path::ModPath) -> Option, @@ -1243,83 +1233,12 @@ fn macro_call_as_call_id_with_eager( Ok(res) } -pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId { - match id { - MacroId::Macro2Id(it) => { - let loc = it.lookup(db); - - let item_tree = loc.id.item_tree(db); - let makro = &item_tree[loc.id.value]; - let in_file = |m: FileAstId| InFile::new(loc.id.file_id(), m.upcast()); - MacroDefId { - krate: loc.container.krate, - kind: match loc.expander { - MacroExpander::Declarative => MacroDefKind::Declarative(in_file(makro.ast_id)), - MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file(makro.ast_id)), - MacroExpander::BuiltInAttr(it) => { - MacroDefKind::BuiltInAttr(it, in_file(makro.ast_id)) - } - MacroExpander::BuiltInDerive(it) => { - MacroDefKind::BuiltInDerive(it, in_file(makro.ast_id)) - } - MacroExpander::BuiltInEager(it) => { - MacroDefKind::BuiltInEager(it, in_file(makro.ast_id)) - } - }, - local_inner: false, - allow_internal_unsafe: loc.allow_internal_unsafe, - } - } - MacroId::MacroRulesId(it) => { - let loc = it.lookup(db); - - let item_tree = loc.id.item_tree(db); - let makro = &item_tree[loc.id.value]; - let in_file = |m: FileAstId| InFile::new(loc.id.file_id(), m.upcast()); - MacroDefId { - krate: loc.container.krate, - kind: match loc.expander { - MacroExpander::Declarative => MacroDefKind::Declarative(in_file(makro.ast_id)), - MacroExpander::BuiltIn(it) => MacroDefKind::BuiltIn(it, in_file(makro.ast_id)), - MacroExpander::BuiltInAttr(it) => { - MacroDefKind::BuiltInAttr(it, in_file(makro.ast_id)) - } - MacroExpander::BuiltInDerive(it) => { - MacroDefKind::BuiltInDerive(it, in_file(makro.ast_id)) - } - MacroExpander::BuiltInEager(it) => { - MacroDefKind::BuiltInEager(it, in_file(makro.ast_id)) - } - }, - local_inner: loc.local_inner, - allow_internal_unsafe: loc.allow_internal_unsafe, - } - } - MacroId::ProcMacroId(it) => { - let loc = it.lookup(db); - - let item_tree = loc.id.item_tree(db); - let makro = &item_tree[loc.id.value]; - MacroDefId { - krate: loc.container.krate, - kind: MacroDefKind::ProcMacro( - loc.expander, - loc.kind, - InFile::new(loc.id.file_id(), makro.ast_id), - ), - local_inner: false, - allow_internal_unsafe: false, - } - } - } -} - fn derive_macro_as_call_id( - db: &dyn db::DefDatabase, + db: &dyn DefDatabase, item_attr: &AstIdWithPath, derive_attr_index: AttrId, derive_pos: u32, - call_site: SyntaxContextId, + call_site: Span, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { @@ -1340,7 +1259,7 @@ fn derive_macro_as_call_id( } fn attr_macro_as_call_id( - db: &dyn db::DefDatabase, + db: &dyn DefDatabase, item_attr: &AstIdWithPath, macro_attr: &Attr, krate: CrateId, @@ -1349,7 +1268,7 @@ fn attr_macro_as_call_id( let arg = match macro_attr.input.as_deref() { Some(AttrInput::TokenTree(tt)) => { let mut tt = tt.as_ref().clone(); - tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span); Some(tt) } @@ -1364,7 +1283,7 @@ fn attr_macro_as_call_id( attr_args: arg.map(Arc::new), invoc_attr_index: macro_attr.id, }, - macro_attr.ctxt, + macro_attr.span, ) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs index a3505b65fe722..395b69d284f5b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs @@ -3,7 +3,7 @@ use std::cell::OnceCell; use hir_expand::{ ast_id_map::{AstIdMap, AstIdNode}, - span::{SpanMap, SpanMapRef}, + span_map::{SpanMap, SpanMapRef}, AstId, HirFileId, InFile, }; use syntax::ast; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 514219ee71505..d4798f4507d12 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -163,31 +163,43 @@ fn main() { ""; } fn test_assert_expand() { check( r#" -#[rustc_builtin_macro] -macro_rules! assert { - ($cond:expr) => ({ /* compiler built-in */ }); - ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ }) -} - +//- minicore: assert fn main() { assert!(true, "{} {:?}", arg1(a, b, c), arg2); } "#, - expect![[r##" -#[rustc_builtin_macro] -macro_rules! assert { - ($cond:expr) => ({ /* compiler built-in */ }); - ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ }) + expect![[r#" +fn main() { + { + if !(true ) { + $crate::panic::panic_2021!("{} {:?}", arg1(a, b, c), arg2); + } + }; +} +"#]], + ); } +// FIXME: This is the wrong expansion, see FIXME on `builtin_fn_macro::use_panic_2021` +#[test] +fn test_assert_expand_2015() { + check( + r#" +//- minicore: assert +//- /main.rs edition:2015 +fn main() { + assert!(true, "{} {:?}", arg1(a, b, c), arg2); +} +"#, + expect![[r#" fn main() { { if !(true ) { - $crate::panic!("{} {:?}", arg1(a, b, c), arg2); + $crate::panic::panic_2021!("{} {:?}", arg1(a, b, c), arg2); } }; } -"##]], +"#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs index 9bf2a50d57c96..f2046bfbce4de 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -1218,8 +1218,10 @@ m! { macro_rules! m { ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} ) } -#[doc = " Single Line Doc 1"] -#[doc = "\n MultiLines Doc\n "] fn bar() {} +#[doc = r" Single Line Doc 1"] +#[doc = r" + MultiLines Doc + "] fn bar() {} "##]], ); } @@ -1260,8 +1262,10 @@ m! { macro_rules! m { ($(#[$ m:meta])+) => ( $(#[$m])+ fn bar() {} ) } -#[doc = " 錦瑟無端五十弦,一弦一柱思華年。"] -#[doc = "\n 莊生曉夢迷蝴蝶,望帝春心託杜鵑。\n "] fn bar() {} +#[doc = r" 錦瑟無端五十弦,一弦一柱思華年。"] +#[doc = r" + 莊生曉夢迷蝴蝶,望帝春心託杜鵑。 + "] fn bar() {} "##]], ); } @@ -1281,7 +1285,7 @@ m! { macro_rules! m { ($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} ) } -#[doc = " \\ \" \'"] fn bar() {} +#[doc = r#" \ " '"#] fn bar() {} "##]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs index 7e7b400442187..e875950e4e5f9 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs @@ -18,7 +18,7 @@ macro_rules! m { ($($false:ident)*) => ($false); (double_dollar) => ($$); ($) => (m!($);); - ($($t:tt)*) => ($( ${ignore(t)} ${index()} )-*); + ($($t:tt)*) => ($( ${ignore($t)} ${index()} )-*); } m!($); "#, @@ -33,7 +33,7 @@ macro_rules! m { ($($false:ident)*) => ($false); (double_dollar) => ($$); ($) => (m!($);); - ($($t:tt)*) => ($( ${ignore(t)} ${index()} )-*); + ($($t:tt)*) => ($( ${ignore($t)} ${index()} )-*); } m!($); "#]], diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs index 967b5ad36babf..6560d0ec4664b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs @@ -77,13 +77,13 @@ fn test_metavar_exprs() { check( r#" macro_rules! m { - ( $( $t:tt )* ) => ( $( ${ignore(t)} -${index()} )-* ); + ( $( $t:tt )* ) => ( $( ${ignore($t)} -${index()} )-* ); } const _: i32 = m!(a b c); "#, expect![[r#" macro_rules! m { - ( $( $t:tt )* ) => ( $( ${ignore(t)} -${index()} )-* ); + ( $( $t:tt )* ) => ( $( ${ignore($t)} -${index()} )-* ); } const _: i32 = -0--1--2; "#]], @@ -96,7 +96,7 @@ fn count_basic() { r#" macro_rules! m { ($($t:ident),*) => { - ${count(t)} + ${count($t)} } } @@ -109,7 +109,7 @@ fn test() { expect![[r#" macro_rules! m { ($($t:ident),*) => { - ${count(t)} + ${count($t)} } } @@ -130,9 +130,9 @@ macro_rules! foo { ($( $( $($t:ident)* ),* );*) => { $( { - let depth_none = ${count(t)}; - let depth_zero = ${count(t, 0)}; - let depth_one = ${count(t, 1)}; + let depth_none = ${count($t)}; + let depth_zero = ${count($t, 0)}; + let depth_one = ${count($t, 1)}; } )* } @@ -150,9 +150,9 @@ macro_rules! foo { ($( $( $($t:ident)* ),* );*) => { $( { - let depth_none = ${count(t)}; - let depth_zero = ${count(t, 0)}; - let depth_one = ${count(t, 1)}; + let depth_none = ${count($t)}; + let depth_zero = ${count($t, 0)}; + let depth_one = ${count($t, 1)}; } )* } @@ -160,11 +160,11 @@ macro_rules! foo { fn bar() { { - let depth_none = 6; + let depth_none = 3; let depth_zero = 3; let depth_one = 6; } { - let depth_none = 3; + let depth_none = 1; let depth_zero = 1; let depth_one = 3; } @@ -178,12 +178,12 @@ fn count_depth_out_of_bounds() { check( r#" macro_rules! foo { - ($($t:ident)*) => { ${count(t, 1)} }; - ($( $( $l:literal )* );*) => { $(${count(l, 1)};)* } + ($($t:ident)*) => { ${count($t, 1)} }; + ($( $( $l:literal )* );*) => { $(${count($l, 1)};)* } } macro_rules! bar { - ($($t:ident)*) => { ${count(t, 1024)} }; - ($( $( $l:literal )* );*) => { $(${count(l, 8192)};)* } + ($($t:ident)*) => { ${count($t, 1024)} }; + ($( $( $l:literal )* );*) => { $(${count($l, 8192)};)* } } fn test() { @@ -195,19 +195,21 @@ fn test() { "#, expect![[r#" macro_rules! foo { - ($($t:ident)*) => { ${count(t, 1)} }; - ($( $( $l:literal )* );*) => { $(${count(l, 1)};)* } + ($($t:ident)*) => { ${count($t, 1)} }; + ($( $( $l:literal )* );*) => { $(${count($l, 1)};)* } } macro_rules! bar { - ($($t:ident)*) => { ${count(t, 1024)} }; - ($( $( $l:literal )* );*) => { $(${count(l, 8192)};)* } + ($($t:ident)*) => { ${count($t, 1024)} }; + ($( $( $l:literal )* );*) => { $(${count($l, 8192)};)* } } fn test() { - /* error: ${count} out of bounds */; - /* error: ${count} out of bounds */; - /* error: ${count} out of bounds */; - /* error: ${count} out of bounds */; + 2; + 2; + 1;; + 2; + 2; + 1;; } "#]], ); @@ -218,8 +220,8 @@ fn misplaced_count() { check( r#" macro_rules! foo { - ($($t:ident)*) => { $(${count(t)})* }; - ($l:literal) => { ${count(l)} } + ($($t:ident)*) => { $(${count($t)})* }; + ($l:literal) => { ${count($l)} } } fn test() { @@ -229,13 +231,13 @@ fn test() { "#, expect![[r#" macro_rules! foo { - ($($t:ident)*) => { $(${count(t)})* }; - ($l:literal) => { ${count(l)} } + ($($t:ident)*) => { $(${count($t)})* }; + ($l:literal) => { ${count($l)} } } fn test() { - /* error: ${count} misplaced */; - /* error: ${count} misplaced */; + 1 1 1; + 1; } "#]], ); @@ -246,13 +248,13 @@ fn malformed_count() { check( r#" macro_rules! too_many_args { - ($($t:ident)*) => { ${count(t, 1, leftover)} } + ($($t:ident)*) => { ${count($t, 1, leftover)} } } macro_rules! depth_suffixed { - ($($t:ident)*) => { ${count(t, 0usize)} } + ($($t:ident)*) => { ${count($t, 0usize)} } } macro_rules! depth_too_large { - ($($t:ident)*) => { ${count(t, 18446744073709551616)} } + ($($t:ident)*) => { ${count($t, 18446744073709551616)} } } fn test() { @@ -263,13 +265,13 @@ fn test() { "#, expect![[r#" macro_rules! too_many_args { - ($($t:ident)*) => { ${count(t, 1, leftover)} } + ($($t:ident)*) => { ${count($t, 1, leftover)} } } macro_rules! depth_suffixed { - ($($t:ident)*) => { ${count(t, 0usize)} } + ($($t:ident)*) => { ${count($t, 0usize)} } } macro_rules! depth_too_large { - ($($t:ident)*) => { ${count(t, 18446744073709551616)} } + ($($t:ident)*) => { ${count($t, 18446744073709551616)} } } fn test() { @@ -288,7 +290,7 @@ fn count_interaction_with_empty_binding() { r#" macro_rules! m { ($($t:ident),*) => { - ${count(t, 100)} + ${count($t, 100)} } } @@ -299,7 +301,7 @@ fn test() { expect![[r#" macro_rules! m { ($($t:ident),*) => { - ${count(t, 100)} + ${count($t, 100)} } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index be2a503d82b15..ee806361237a1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -16,9 +16,15 @@ mod proc_macros; use std::{iter, ops::Range, sync}; -use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase}; +use base_db::SourceDatabase; use expect_test::Expect; -use hir_expand::{db::ExpandDatabase, span::SpanMapRef, InFile, MacroFileId, MacroFileIdExt}; +use hir_expand::{ + db::ExpandDatabase, + proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind}, + span_map::SpanMapRef, + InFile, MacroFileId, MacroFileIdExt, +}; +use span::Span; use stdx::format_to; use syntax::{ ast::{self, edit::IndentLevel}, @@ -26,10 +32,10 @@ use syntax::{ SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT}, SyntaxNode, T, }; +use test_fixture::WithFixture; use crate::{ db::DefDatabase, - macro_id_to_def_id, nameres::{DefMap, MacroSubNs, ModuleSource}, resolver::HasResolver, src::HasSource, @@ -50,7 +56,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream .into(), ProcMacro { name: "identity_when_valid".into(), - kind: base_db::ProcMacroKind::Attr, + kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityWhenValidProcMacroExpander), }, )]; @@ -90,7 +96,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream .as_call_id_with_errors(&db, krate, |path| { resolver .resolve_path_as_macro(&db, &path, Some(MacroSubNs::Bang)) - .map(|(it, _)| macro_id_to_def_id(&db, it)) + .map(|(it, _)| db.macro_def(it)) }) .unwrap(); let macro_call_id = res.value.unwrap(); @@ -307,16 +313,16 @@ fn pretty_print_macro_expansion( // compile errors. #[derive(Debug)] struct IdentityWhenValidProcMacroExpander; -impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander { +impl ProcMacroExpander for IdentityWhenValidProcMacroExpander { fn expand( &self, subtree: &Subtree, _: Option<&Subtree>, _: &base_db::Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result { + _: Span, + _: Span, + _: Span, + ) -> Result { let (parse, _) = ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems); if parse.errors().is_empty() { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 9a9fa0e02b082..52a981fd19ebc 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -59,8 +59,11 @@ mod tests; use std::{cmp::Ord, ops::Deref}; -use base_db::{CrateId, Edition, FileId, ProcMacroKind}; -use hir_expand::{ast_id_map::FileAstId, name::Name, HirFileId, InFile, MacroCallId, MacroDefId}; +use base_db::{CrateId, Edition, FileId}; +use hir_expand::{ + ast_id_map::FileAstId, name::Name, proc_macro::ProcMacroKind, HirFileId, InFile, MacroCallId, + MacroDefId, +}; use itertools::Itertools; use la_arena::Arena; use profile::Count; @@ -97,7 +100,7 @@ pub struct DefMap { /// contains this block. block: Option, /// The modules and their data declared in this crate. - modules: Arena, + pub modules: Arena, krate: CrateId, /// The prelude module for this crate. This either comes from an import /// marked with the `prelude_import` attribute, or (in the normal case) from @@ -623,8 +626,9 @@ impl DefMap { self.diagnostics.as_slice() } - pub fn recursion_limit(&self) -> Option { - self.data.recursion_limit + pub fn recursion_limit(&self) -> u32 { + // 128 is the default in rustc + self.data.recursion_limit.unwrap_or(128) } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs index a7abf445918aa..6288b8366bf05 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs @@ -8,7 +8,6 @@ use crate::{ attr_macro_as_call_id, db::DefDatabase, item_scope::BuiltinShadowMode, - macro_id_to_def_id, nameres::path_resolution::ResolveMode, path::{ModPath, PathKind}, AstIdWithPath, LocalModuleId, UnresolvedMacro, @@ -63,7 +62,7 @@ impl DefMap { &ast_id, attr, self.krate, - macro_id_to_def_id(db, def), + db.macro_def(def), ))) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index b3a10a3869a43..3763bfcbcfaf2 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -5,7 +5,7 @@ use std::{cmp::Ordering, iter, mem}; -use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId}; +use base_db::{CrateId, Dependency, Edition, FileId}; use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ @@ -15,7 +15,7 @@ use hir_expand::{ builtin_derive_macro::find_builtin_derive, builtin_fn_macro::find_builtin_macro, name::{name, AsName, Name}, - proc_macro::ProcMacroExpander, + proc_macro::CustomProcMacroExpander, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, }; @@ -23,6 +23,7 @@ use itertools::{izip, Itertools}; use la_arena::Idx; use limit::Limit; use rustc_hash::{FxHashMap, FxHashSet}; +use span::{Span, SyntaxContextId}; use stdx::always; use syntax::{ast, SmolStr}; use triomphe::Arc; @@ -35,9 +36,9 @@ use crate::{ item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports}, item_tree::{ self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode, - MacroCall, MacroDef, MacroRules, Mod, ModItem, ModKind, TreeId, + Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId, }, - macro_call_as_call_id, macro_call_as_call_id_with_eager, macro_id_to_def_id, + macro_call_as_call_id, macro_call_as_call_id_with_eager, nameres::{ diagnostics::DefDiagnostic, mod_resolution::ModDir, @@ -53,8 +54,9 @@ use crate::{ AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId, - MacroRulesId, MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, - StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId, UseLoc, + MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId, + ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, + UnresolvedMacro, UseId, UseLoc, }; static GLOB_RECURSION_LIMIT: Limit = Limit::new(100); @@ -86,16 +88,21 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI // FIXME: a hacky way to create a Name from string. let name = tt::Ident { text: it.name.clone(), - span: tt::SpanData { + span: Span { range: syntax::TextRange::empty(syntax::TextSize::new(0)), - anchor: base_db::span::SpanAnchor { + anchor: span::SpanAnchor { file_id: FileId::BOGUS, - ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID, + ast_id: span::ROOT_ERASED_FILE_AST_ID, }, ctx: SyntaxContextId::ROOT, }, }; - (name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32))) + ( + name.as_name(), + CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId( + idx as u32, + )), + ) }) .collect()) } @@ -222,13 +229,13 @@ enum MacroDirectiveKind { FnLike { ast_id: AstIdWithPath, expand_to: ExpandTo, - call_site: SyntaxContextId, + call_site: Span, }, Derive { ast_id: AstIdWithPath, derive_attr: AttrId, derive_pos: usize, - call_site: SyntaxContextId, + call_site: Span, }, Attr { ast_id: AstIdWithPath, @@ -253,7 +260,7 @@ struct DefCollector<'a> { /// built by the build system, and is the list of proc. macros we can actually expand. It is /// empty when proc. macro support is disabled (in which case we still do name resolution for /// them). - proc_macros: Result, Box>, + proc_macros: Result, Box>, is_proc_macro: bool, from_glob_import: PerNsGlobImports, /// If we fail to resolve an attribute on a `ModItem`, we fall back to ignoring the attribute. @@ -545,6 +552,8 @@ impl DefCollector<'_> { Edition::Edition2015 => name![rust_2015], Edition::Edition2018 => name![rust_2018], Edition::Edition2021 => name![rust_2021], + // FIXME: update this when rust_2024 exists + Edition::Edition2024 => name![rust_2021], }; let path_kind = match self.def_map.data.edition { @@ -603,18 +612,21 @@ impl DefCollector<'_> { let (expander, kind) = match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) { Ok(Some(&(_, expander))) => (expander, kind), - _ => (ProcMacroExpander::dummy(), kind), + _ => (CustomProcMacroExpander::dummy(), kind), }; - let proc_macro_id = - ProcMacroLoc { container: self.def_map.crate_root(), id, expander, kind } - .intern(self.db); + let proc_macro_id = ProcMacroLoc { + container: self.def_map.crate_root(), + id, + expander, + kind, + edition: self.def_map.data.edition, + } + .intern(self.db); self.define_proc_macro(def.name.clone(), proc_macro_id); let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); if let ProcMacroKind::CustomDerive { helpers } = def.kind { - crate_data - .exported_derives - .insert(macro_id_to_def_id(self.db, proc_macro_id.into()), helpers); + crate_data.exported_derives.insert(self.db.macro_def(proc_macro_id.into()), helpers); } crate_data.fn_proc_macro_mapping.insert(fn_id, proc_macro_id); } @@ -1125,10 +1137,7 @@ impl DefCollector<'_> { BuiltinShadowMode::Module, Some(subns), ); - resolved_res - .resolved_def - .take_macros() - .map(|it| (it, macro_id_to_def_id(self.db, it))) + resolved_res.resolved_def.take_macros().map(|it| (it, self.db.macro_def(it))) }; let resolver_def_id = |path| resolver(path).map(|(_, it)| it); @@ -1143,6 +1152,9 @@ impl DefCollector<'_> { resolver_def_id, ); if let Ok(Some(call_id)) = call_id { + self.def_map.modules[directive.module_id] + .scope + .add_macro_invoc(ast_id.ast_id, call_id); push_resolved(directive, call_id); res = ReachedFixedPoint::No; @@ -1299,14 +1311,13 @@ impl DefCollector<'_> { // Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute. let call_id = attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def); - let loc: MacroCallLoc = self.db.lookup_intern_macro_call(call_id); // If proc attribute macro expansion is disabled, skip expanding it here if !self.db.expand_proc_attr_macros() { self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( directive.module_id, - loc.kind, - loc.def.krate, + self.db.lookup_intern_macro_call(call_id).kind, + def.krate, )); return recollect_without(self); } @@ -1314,14 +1325,14 @@ impl DefCollector<'_> { // Skip #[test]/#[bench] expansion, which would merely result in more memory usage // due to duplicating functions into macro expansions if matches!( - loc.def.kind, + def.kind, MacroDefKind::BuiltInAttr(expander, _) if expander.is_test() || expander.is_bench() ) { return recollect_without(self); } - if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind { + if let MacroDefKind::ProcMacro(exp, ..) = def.kind { if exp.is_dummy() { // If there's no expander for the proc macro (e.g. // because proc macros are disabled, or building the @@ -1329,8 +1340,8 @@ impl DefCollector<'_> { // expansion like we would if it was disabled self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( directive.module_id, - loc.kind, - loc.def.krate, + self.db.lookup_intern_macro_call(call_id).kind, + def.krate, )); return recollect_without(self); @@ -1436,10 +1447,7 @@ impl DefCollector<'_> { BuiltinShadowMode::Module, Some(MacroSubNs::Bang), ); - resolved_res - .resolved_def - .take_macros() - .map(|it| macro_id_to_def_id(self.db, it)) + resolved_res.resolved_def.take_macros().map(|it| self.db.macro_def(it)) }, ); if let Err(UnresolvedMacro { path }) = macro_call_as_call_id { @@ -1645,7 +1653,7 @@ impl ModCollector<'_, '_> { ), ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container), ModItem::MacroRules(id) => self.collect_macro_rules(id, module), - ModItem::MacroDef(id) => self.collect_macro_def(id, module), + ModItem::Macro2(id) => self.collect_macro_def(id, module), ModItem::Impl(imp) => { let impl_id = ImplLoc { container: module, id: ItemTreeId::new(self.tree_id, imp) } @@ -2090,11 +2098,11 @@ impl ModCollector<'_, '_> { // FIXME: a hacky way to create a Name from string. name = tt::Ident { text: it.clone(), - span: tt::SpanData { + span: Span { range: syntax::TextRange::empty(syntax::TextSize::new(0)), - anchor: base_db::span::SpanAnchor { + anchor: span::SpanAnchor { file_id: FileId::BOGUS, - ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID, + ast_id: span::ROOT_ERASED_FILE_AST_ID, }, ctx: SyntaxContextId::ROOT, }, @@ -2136,12 +2144,16 @@ impl ModCollector<'_, '_> { }; let allow_internal_unsafe = attrs.by_key("allow_internal_unsafe").exists(); + let mut flags = MacroRulesLocFlags::empty(); + flags.set(MacroRulesLocFlags::LOCAL_INNER, local_inner); + flags.set(MacroRulesLocFlags::ALLOW_INTERNAL_UNSAFE, allow_internal_unsafe); + let macro_id = MacroRulesLoc { container: module, id: ItemTreeId::new(self.tree_id, id), - local_inner, - allow_internal_unsafe, + flags, expander, + edition: self.def_collector.def_map.data.edition, } .intern(self.def_collector.db); self.def_collector.define_macro_rules( @@ -2152,7 +2164,7 @@ impl ModCollector<'_, '_> { ); } - fn collect_macro_def(&mut self, id: FileItemTreeId, module: ModuleId) { + fn collect_macro_def(&mut self, id: FileItemTreeId, module: ModuleId) { let krate = self.def_collector.def_map.krate; let mac = &self.item_tree[id]; let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast()); @@ -2207,6 +2219,7 @@ impl ModCollector<'_, '_> { id: ItemTreeId::new(self.tree_id, id), expander, allow_internal_unsafe, + edition: self.def_collector.def_map.data.edition, } .intern(self.def_collector.db); self.def_collector.define_macro_def( @@ -2220,7 +2233,7 @@ impl ModCollector<'_, '_> { Arc::get_mut(&mut self.def_collector.def_map.data) .unwrap() .exported_derives - .insert(macro_id_to_def_id(self.def_collector.db, macro_id.into()), helpers); + .insert(self.def_collector.db.macro_def(macro_id.into()), helpers); } } } @@ -2259,7 +2272,7 @@ impl ModCollector<'_, '_> { Some(MacroSubNs::Bang), ) }) - .map(|it| macro_id_to_def_id(self.def_collector.db, it)) + .map(|it| self.def_collector.db.macro_def(it)) }) }, |path| { @@ -2271,7 +2284,7 @@ impl ModCollector<'_, '_> { BuiltinShadowMode::Module, Some(MacroSubNs::Bang), ); - resolved_res.resolved_def.take_macros().map(|it| macro_id_to_def_id(db, it)) + resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it)) }, ) { // FIXME: if there were errors, this mightve been in the eager expansion from an @@ -2279,10 +2292,13 @@ impl ModCollector<'_, '_> { if res.err.is_none() { // Legacy macros need to be expanded immediately, so that any macros they produce // are in scope. - if let Some(val) = res.value { + if let Some(call_id) = res.value { + self.def_collector.def_map.modules[self.module_id] + .scope + .add_macro_invoc(ast_id.ast_id, call_id); self.def_collector.collect_macro_expansion( self.module_id, - val, + call_id, self.macro_depth + 1, container, ); @@ -2296,7 +2312,7 @@ impl ModCollector<'_, '_> { self.def_collector.unresolved_macros.push(MacroDirective { module_id: self.module_id, depth: self.macro_depth + 1, - kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site }, + kind: MacroDirectiveKind::FnLike { ast_id, expand_to, call_site }, container, }); } @@ -2363,8 +2379,10 @@ impl ModCollector<'_, '_> { #[cfg(test)] mod tests { + use base_db::SourceDatabase; + use test_fixture::WithFixture; + use crate::{db::DefDatabase, test_db::TestDB}; - use base_db::{fixture::WithFixture, SourceDatabase}; use super::*; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs index 751b7beaac153..c126fdac1c62f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs @@ -19,11 +19,13 @@ pub enum ProcMacroKind { } impl ProcMacroKind { - pub(super) fn to_basedb_kind(&self) -> base_db::ProcMacroKind { + pub(super) fn to_basedb_kind(&self) -> hir_expand::proc_macro::ProcMacroKind { match self { - ProcMacroKind::CustomDerive { .. } => base_db::ProcMacroKind::CustomDerive, - ProcMacroKind::FnLike => base_db::ProcMacroKind::FuncLike, - ProcMacroKind::Attr => base_db::ProcMacroKind::Attr, + ProcMacroKind::CustomDerive { .. } => { + hir_expand::proc_macro::ProcMacroKind::CustomDerive + } + ProcMacroKind::FnLike => hir_expand::proc_macro::ProcMacroKind::FuncLike, + ProcMacroKind::Attr => hir_expand::proc_macro::ProcMacroKind::Attr, } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs index b2ffbbe4c5d8f..17e82dc16c420 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs @@ -4,8 +4,9 @@ mod macros; mod mod_resolution; mod primitives; -use base_db::{fixture::WithFixture, SourceDatabase}; +use base_db::SourceDatabase; use expect_test::{expect, Expect}; +use test_fixture::WithFixture; use triomphe::Arc; use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB}; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs index 78cb78e833ec5..6efced02718a7 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs @@ -1,11 +1,8 @@ use base_db::{SourceDatabase, SourceDatabaseExt}; +use test_fixture::WithFixture; use triomphe::Arc; -use crate::{ - db::DefDatabase, - nameres::tests::{TestDB, WithFixture}, - AdtId, ModuleDefId, -}; +use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId}; fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) { let (mut db, pos) = TestDB::with_position(ra_fixture_initial); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs index e64fa0b46f136..48fe43450a710 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs @@ -1,6 +1,12 @@ -use super::*; +use expect_test::expect; +use test_fixture::WithFixture; + use itertools::Itertools; +use crate::nameres::tests::check; + +use super::*; + #[test] fn macro_rules_are_globally_visible() { check( diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index 2ac1516ec07be..301391516d64e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -2,7 +2,10 @@ use std::{fmt, hash::BuildHasherDefault}; use base_db::CrateId; -use hir_expand::name::{name, Name}; +use hir_expand::{ + name::{name, Name}, + MacroDefId, +}; use indexmap::IndexMap; use intern::Interned; use rustc_hash::FxHashSet; @@ -406,6 +409,15 @@ impl Resolver { .take_macros_import() } + pub fn resolve_path_as_macro_def( + &self, + db: &dyn DefDatabase, + path: &ModPath, + expected_macro_kind: Option, + ) -> Option { + self.resolve_path_as_macro(db, path, expected_macro_kind).map(|(it, _)| db.macro_def(it)) + } + /// Returns a set of names available in the current scope. /// /// Note that this is a somewhat fuzzy concept -- internally, the compiler diff --git a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs index f5803653c73be..49688c5ee9c27 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs @@ -2,7 +2,7 @@ use std::iter; -use hir_expand::{span::SpanMapRef, InFile}; +use hir_expand::{span_map::SpanMapRef, InFile}; use la_arena::ArenaMap; use syntax::ast; use triomphe::Arc; diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml index 361bbec4318f3..506a188a211dc 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml @@ -15,7 +15,7 @@ doctest = false cov-mark = "2.0.0-pre.1" tracing.workspace = true either.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true la-arena.workspace = true itertools.workspace = true hashbrown.workspace = true @@ -32,6 +32,10 @@ profile.workspace = true tt.workspace = true mbe.workspace = true limit.workspace = true +span.workspace = true [dev-dependencies] expect-test = "1.4.0" + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs index be0b72f9dfa43..d0d229e1319ff 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs @@ -5,6 +5,8 @@ //! item as an ID. That way, id's don't change unless the set of items itself //! changes. +// FIXME: Consider moving this into the span crate + use std::{ any::type_name, fmt, @@ -17,9 +19,9 @@ use profile::Count; use rustc_hash::FxHasher; use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; -use crate::db; +use crate::db::ExpandDatabase; -pub use base_db::span::ErasedFileAstId; +pub use span::ErasedFileAstId; /// `AstId` points to an AST node in any file. /// @@ -27,13 +29,13 @@ pub use base_db::span::ErasedFileAstId; pub type AstId = crate::InFile>; impl AstId { - pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N { + pub fn to_node(&self, db: &dyn ExpandDatabase) -> N { self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) } - pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile { + pub fn to_in_file_node(&self, db: &dyn ExpandDatabase) -> crate::InFile { crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))) } - pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr { + pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> AstPtr { db.ast_id_map(self.file_id).get(self.value) } } @@ -41,7 +43,7 @@ impl AstId { pub type ErasedAstId = crate::InFile; impl ErasedAstId { - pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr { + pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> SyntaxNodePtr { db.ast_id_map(self.file_id).get_erased(self.value) } } @@ -197,6 +199,19 @@ impl AstIdMap { FileAstId { raw, covariant: PhantomData } } + pub fn ast_id_for_ptr(&self, ptr: AstPtr) -> FileAstId { + let ptr = ptr.syntax_node_ptr(); + let hash = hash_ptr(&ptr); + match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) { + Some((&raw, &())) => FileAstId { raw, covariant: PhantomData }, + None => panic!( + "Can't find {:?} in AstIdMap:\n{:?}", + ptr, + self.arena.iter().map(|(_id, i)| i).collect::>(), + ), + } + } + pub fn get(&self, id: FileAstId) -> AstPtr { AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap() } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs index b8fc30c91189a..bd0f81881ee4c 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs @@ -1,19 +1,20 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. use std::{fmt, ops}; -use base_db::{span::SyntaxContextId, CrateId}; +use base_db::CrateId; use cfg::CfgExpr; use either::Either; use intern::Interned; use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; use smallvec::{smallvec, SmallVec}; +use span::Span; use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode}; use triomphe::Arc; use crate::{ db::ExpandDatabase, mod_path::ModPath, - span::SpanMapRef, + span_map::SpanMapRef, tt::{self, Subtree}, InFile, }; @@ -52,7 +53,7 @@ impl RawAttrs { id, input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), path: Interned::new(ModPath::from(crate::name!(doc))), - ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx, + span: span_map.span_for_range(comment.syntax().text_range()), }), }); let entries: Arc<[Attr]> = Arc::from_iter(entries); @@ -119,7 +120,7 @@ impl RawAttrs { let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| { let tree = Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(attr.first()?.first_span()), token_trees: attr.to_vec(), }; Attr::from_tt(db, &tree, index.with_cfg_attr(idx)) @@ -176,7 +177,7 @@ pub struct Attr { pub id: AttrId, pub path: Interned, pub input: Option>, - pub ctxt: SyntaxContextId, + pub span: Span, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -205,6 +206,7 @@ impl Attr { id: AttrId, ) -> Option { let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?); + let span = span_map.span_for_range(ast.syntax().text_range()); let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { let value = match lit.kind() { ast::LiteralKind::String(string) => string.value()?.into(), @@ -212,12 +214,12 @@ impl Attr { }; Some(Interned::new(AttrInput::Literal(value))) } else if let Some(tt) = ast.token_tree() { - let tree = syntax_node_to_token_tree(tt.syntax(), span_map); + let tree = syntax_node_to_token_tree(tt.syntax(), span_map, span); Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) } else { None }; - Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx }) + Some(Attr { id, path, input, span }) } fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option { @@ -265,7 +267,7 @@ impl Attr { pub fn parse_path_comma_token_tree<'a>( &'a self, db: &'a dyn ExpandDatabase, - ) -> Option + 'a> { + ) -> Option + 'a> { let args = self.token_tree_value()?; if args.delimiter.kind != DelimiterKind::Parenthesis { @@ -281,7 +283,7 @@ impl Attr { // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation // here or maybe just parse a mod path from a token tree directly let subtree = tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(tts.first()?.first_span()), token_trees: tts.to_vec(), }; let (parse, span_map) = @@ -293,7 +295,7 @@ impl Attr { return None; } let path = meta.path()?; - let call_site = span_map.span_at(path.syntax().text_range().start()).ctx; + let call_site = span_map.span_at(path.syntax().text_range().start()); Some(( ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?, call_site, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs index de58a495fef4f..55157abe671ad 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs @@ -1,12 +1,7 @@ //! Builtin attributes. +use span::{MacroCallId, Span}; -use base_db::{ - span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, - FileId, -}; -use syntax::{TextRange, TextSize}; - -use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind}; +use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind}; macro_rules! register_builtin { ($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => { @@ -106,7 +101,12 @@ fn derive_attr_expand( MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => { attr_args } - _ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)), + _ => { + return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan { + open: loc.call_site, + close: loc.call_site, + })) + } }; pseudo_derive_attr_expansion(tt, derives, loc.call_site) } @@ -114,20 +114,13 @@ fn derive_attr_expand( pub fn pseudo_derive_attr_expansion( tt: &tt::Subtree, args: &tt::Subtree, - call_site: SyntaxContextId, + call_site: Span, ) -> ExpandResult { let mk_leaf = |char| { tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char, spacing: tt::Spacing::Alone, - span: tt::SpanData { - range: TextRange::empty(TextSize::new(0)), - anchor: base_db::span::SpanAnchor { - file_id: FileId::BOGUS, - ast_id: ROOT_ERASED_FILE_AST_ID, - }, - ctx: call_site, - }, + span: call_site, })) }; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs index 410aa4d289ebc..8f240ef07320a 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs @@ -1,20 +1,21 @@ //! Builtin derives. -use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin}; +use base_db::{CrateOrigin, LangCrateOrigin}; use itertools::izip; use rustc_hash::FxHashSet; +use span::{MacroCallId, Span}; use stdx::never; use tracing::debug; use crate::{ hygiene::span_with_def_site_ctxt, name::{AsName, Name}, - span::SpanMapRef, + span_map::SpanMapRef, tt, }; use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}; -use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId}; +use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult}; macro_rules! register_builtin { ( $($trait:ident => $expand:ident),* ) => { @@ -35,7 +36,7 @@ macro_rules! register_builtin { $( BuiltinDeriveExpander::$trait => $expand, )* }; - let span = db.lookup_intern_macro_call(id).span(db); + let span = db.lookup_intern_macro_call(id).call_site; let span = span_with_def_site_ctxt(db, span, id); expander(db, id, span, tt, token_map) } @@ -73,16 +74,16 @@ enum VariantShape { Unit, } -fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator { +fn tuple_field_iterator(span: Span, n: usize) -> impl Iterator { (0..n).map(move |it| tt::Ident::new(format!("f{it}"), span)) } impl VariantShape { - fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree { + fn as_pattern(&self, path: tt::Subtree, span: Span) -> tt::Subtree { self.as_pattern_map(path, span, |it| quote!(span => #it)) } - fn field_names(&self, span: SpanData) -> Vec { + fn field_names(&self, span: Span) -> Vec { match self { VariantShape::Struct(s) => s.clone(), VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(), @@ -93,7 +94,7 @@ impl VariantShape { fn as_pattern_map( &self, path: tt::Subtree, - span: SpanData, + span: Span, field_map: impl Fn(&tt::Ident) -> tt::Subtree, ) -> tt::Subtree { match self { @@ -143,11 +144,11 @@ enum AdtShape { } impl AdtShape { - fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec { + fn as_pattern(&self, span: Span, name: &tt::Ident) -> Vec { self.as_pattern_map(name, |it| quote!(span =>#it), span) } - fn field_names(&self, span: SpanData) -> Vec> { + fn field_names(&self, span: Span) -> Vec> { match self { AdtShape::Struct(s) => { vec![s.field_names(span)] @@ -166,7 +167,7 @@ impl AdtShape { &self, name: &tt::Ident, field_map: impl Fn(&tt::Ident) -> tt::Subtree, - span: SpanData, + span: Span, ) -> Vec { match self { AdtShape::Struct(s) => { @@ -199,7 +200,7 @@ struct BasicAdtInfo { fn parse_adt( tm: SpanMapRef<'_>, adt: &ast::Adt, - call_site: SpanData, + call_site: Span, ) -> Result { let (name, generic_param_list, shape) = match adt { ast::Adt::Struct(it) => ( @@ -245,7 +246,7 @@ fn parse_adt( match this { Some(it) => { param_type_set.insert(it.as_name()); - mbe::syntax_node_to_token_tree(it.syntax(), tm) + mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site) } None => { tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site }) @@ -253,15 +254,15 @@ fn parse_adt( } }; let bounds = match ¶m { - ast::TypeOrConstParam::Type(it) => { - it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) - } + ast::TypeOrConstParam::Type(it) => it + .type_bound_list() + .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)), ast::TypeOrConstParam::Const(_) => None, }; let ty = if let ast::TypeOrConstParam::Const(param) = param { let ty = param .ty() - .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm)) + .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm, call_site)) .unwrap_or_else(|| { tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site }) }); @@ -297,7 +298,7 @@ fn parse_adt( let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name(); param_type_set.contains(&name).then_some(p) }) - .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) + .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)) .collect(); let name_token = name_to_token(tm, name)?; Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types }) @@ -349,7 +350,7 @@ fn name_to_token( /// therefore does not get bound by the derived trait. fn expand_simple_derive( // FIXME: use - invoc_span: SpanData, + invoc_span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, trait_path: tt::Subtree, @@ -397,7 +398,7 @@ fn expand_simple_derive( ExpandResult::ok(expanded) } -fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree { +fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: Span) -> tt::TokenTree { // FIXME: make hygiene works for builtin derive macro // such that $crate can be used here. let cg = db.crate_graph(); @@ -416,7 +417,7 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) fn copy_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { @@ -427,7 +428,7 @@ fn copy_expand( fn clone_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { @@ -470,13 +471,13 @@ fn clone_expand( } /// This function exists since `quote! {span => => }` doesn't work. -fn fat_arrow(span: SpanData) -> tt::Subtree { +fn fat_arrow(span: Span) -> tt::Subtree { let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span }; quote! {span => #eq> } } /// This function exists since `quote! {span => && }` doesn't work. -fn and_and(span: SpanData) -> tt::Subtree { +fn and_and(span: Span) -> tt::Subtree { let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span }; quote! {span => #and& } } @@ -484,7 +485,7 @@ fn and_and(span: SpanData) -> tt::Subtree { fn default_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { @@ -529,7 +530,7 @@ fn default_expand( fn debug_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { @@ -607,7 +608,7 @@ fn debug_expand( fn hash_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { @@ -660,7 +661,7 @@ fn hash_expand( fn eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { @@ -671,7 +672,7 @@ fn eq_expand( fn partial_eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { @@ -725,7 +726,7 @@ fn partial_eq_expand( fn self_and_other_patterns( adt: &BasicAdtInfo, name: &tt::Ident, - span: SpanData, + span: Span, ) -> (Vec, Vec) { let self_patterns = adt.shape.as_pattern_map( name, @@ -749,7 +750,7 @@ fn self_and_other_patterns( fn ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { @@ -760,7 +761,7 @@ fn ord_expand( left: tt::Subtree, right: tt::Subtree, rest: tt::Subtree, - span: SpanData, + span: Span, ) -> tt::Subtree { let fat_arrow1 = fat_arrow(span); let fat_arrow2 = fat_arrow(span); @@ -813,7 +814,7 @@ fn ord_expand( fn partial_ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, - span: SpanData, + span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { @@ -824,7 +825,7 @@ fn partial_ord_expand( left: tt::Subtree, right: tt::Subtree, rest: tt::Subtree, - span: SpanData, + span: Span, ) -> tt::Subtree { let fat_arrow1 = fat_arrow(span); let fat_arrow2 = fat_arrow(span); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs index c8f04bfee54f1..f99a891762332 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs @@ -1,13 +1,11 @@ //! Builtin macro -use base_db::{ - span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, - AnchoredPath, Edition, FileId, -}; +use base_db::{AnchoredPath, Edition, FileId}; use cfg::CfgExpr; use either::Either; use itertools::Itertools; use mbe::{parse_exprs_with_sep, parse_to_token_tree}; +use span::{Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use syntax::{ ast::{self, AstToken}, SmolStr, @@ -15,10 +13,11 @@ use syntax::{ use crate::{ db::ExpandDatabase, - hygiene::span_with_def_site_ctxt, - name, quote, + hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt}, + name::{self, known}, + quote, tt::{self, DelimSpan}, - ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc, + ExpandError, ExpandResult, HirFileIdExt, MacroCallId, }; macro_rules! register_builtin { @@ -44,7 +43,7 @@ macro_rules! register_builtin { $( BuiltinFnLikeExpander::$kind => $expand, )* }; - let span = db.lookup_intern_macro_call(id).span(db); + let span = db.lookup_intern_macro_call(id).call_site; let span = span_with_def_site_ctxt(db, span, id); expander(db, id, tt, span) } @@ -61,7 +60,7 @@ macro_rules! register_builtin { $( EagerExpander::$e_kind => $e_expand, )* }; - let span = db.lookup_intern_macro_call(id).span(db); + let span = db.lookup_intern_macro_call(id).call_site; let span = span_with_def_site_ctxt(db, span, id); expander(db, id, tt, span) } @@ -109,6 +108,7 @@ register_builtin! { (format_args, FormatArgs) => format_args_expand, (const_format_args, ConstFormatArgs) => format_args_expand, (format_args_nl, FormatArgsNl) => format_args_nl_expand, + (quote, Quote) => quote_expand, EAGER: (compile_error, CompileError) => compile_error_expand, @@ -122,7 +122,7 @@ register_builtin! { (option_env, OptionEnv) => option_env_expand } -fn mk_pound(span: SpanData) -> tt::Subtree { +fn mk_pound(span: Span) -> tt::Subtree { crate::quote::IntoTt::to_subtree( vec![crate::tt::Leaf::Punct(crate::tt::Punct { char: '#', @@ -138,7 +138,7 @@ fn module_path_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { // Just return a dummy result. ExpandResult::ok(quote! {span => @@ -150,13 +150,13 @@ fn line_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { // dummy implementation for type-checking purposes // Note that `line!` and `column!` will never be implemented properly, as they are by definition // not incremental ExpandResult::ok(tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(span), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: "0u32".into(), span, @@ -168,7 +168,7 @@ fn log_syntax_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { ExpandResult::ok(quote! {span =>}) } @@ -177,7 +177,7 @@ fn trace_macros_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { ExpandResult::ok(quote! {span =>}) } @@ -186,7 +186,7 @@ fn stringify_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { let pretty = ::tt::pretty(&tt.token_trees); @@ -198,32 +198,38 @@ fn stringify_expand( } fn assert_expand( - _db: &dyn ExpandDatabase, - _id: MacroCallId, + db: &dyn ExpandDatabase, + id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { - let args = parse_exprs_with_sep(tt, ','); + let call_site_span = span_with_call_site_ctxt(db, span, id); + let args = parse_exprs_with_sep(tt, ',', call_site_span); let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; let expanded = match &*args { [cond, panic_args @ ..] => { let comma = tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(call_site_span), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone, - span, + span: call_site_span, }))], }; let cond = cond.clone(); let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma); - quote! {span =>{ + let mac = if use_panic_2021(db, span) { + quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) } + } else { + quote! {call_site_span => #dollar_crate::panic!(##panic_args) } + }; + quote! {call_site_span =>{ if !(#cond) { - #dollar_crate::panic!(##panic_args); + #mac; } }} } - [] => quote! {span =>{}}, + [] => quote! {call_site_span =>{}}, }; ExpandResult::ok(expanded) @@ -233,7 +239,7 @@ fn file_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { // FIXME: RA purposefully lacks knowledge of absolute file names // so just return "". @@ -250,7 +256,7 @@ fn format_args_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { format_args_expand_general(db, id, tt, "", span) } @@ -259,7 +265,7 @@ fn format_args_nl_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { format_args_expand_general(db, id, tt, "\\n", span) } @@ -270,7 +276,7 @@ fn format_args_expand_general( tt: &tt::Subtree, // FIXME: Make use of this so that mir interpretation works properly _end_string: &str, - span: SpanData, + span: Span, ) -> ExpandResult { let pound = mk_pound(span); let mut tt = tt.clone(); @@ -284,7 +290,7 @@ fn asm_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { // We expand all assembly snippets to `format_args!` invocations to get format syntax // highlighting for them. @@ -314,7 +320,7 @@ fn global_asm_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { // Expand to nothing (at item-level) ExpandResult::ok(quote! {span =>}) @@ -324,7 +330,7 @@ fn cfg_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { let loc = db.lookup_intern_macro_call(id); let expr = CfgExpr::parse(tt); @@ -337,19 +343,25 @@ fn panic_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { - let loc: MacroCallLoc = db.lookup_intern_macro_call(id); let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; + let call_site_span = span_with_call_site_ctxt(db, span, id); + + let mac = + if use_panic_2021(db, call_site_span) { known::panic_2021 } else { known::panic_2015 }; + // Expand to a macro call `$crate::panic::panic_{edition}` - let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { - quote!(span =>#dollar_crate::panic::panic_2021!) - } else { - quote!(span =>#dollar_crate::panic::panic_2015!) - }; + let mut call = quote!(call_site_span =>#dollar_crate::panic::#mac!); // Pass the original arguments - call.token_trees.push(tt::TokenTree::Subtree(tt.clone())); + let mut subtree = tt.clone(); + subtree.delimiter = tt::Delimiter { + open: call_site_span, + close: call_site_span, + kind: tt::DelimiterKind::Parenthesis, + }; + call.token_trees.push(tt::TokenTree::Subtree(subtree)); ExpandResult::ok(call) } @@ -357,22 +369,52 @@ fn unreachable_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { - let loc: MacroCallLoc = db.lookup_intern_macro_call(id); - // Expand to a macro call `$crate::panic::unreachable_{edition}` let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; - let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { - quote!(span =>#dollar_crate::panic::unreachable_2021!) + let call_site_span = span_with_call_site_ctxt(db, span, id); + + let mac = if use_panic_2021(db, call_site_span) { + known::unreachable_2021 } else { - quote!(span =>#dollar_crate::panic::unreachable_2015!) + known::unreachable_2015 }; + // Expand to a macro call `$crate::panic::panic_{edition}` + let mut call = quote!(call_site_span =>#dollar_crate::panic::#mac!); + // Pass the original arguments - call.token_trees.push(tt::TokenTree::Subtree(tt.clone())); + let mut subtree = tt.clone(); + subtree.delimiter = tt::Delimiter { + open: call_site_span, + close: call_site_span, + kind: tt::DelimiterKind::Parenthesis, + }; + call.token_trees.push(tt::TokenTree::Subtree(subtree)); ExpandResult::ok(call) } +fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool { + // To determine the edition, we check the first span up the expansion + // stack that does not have #[allow_internal_unstable(edition_panic)]. + // (To avoid using the edition of e.g. the assert!() or debug_assert!() definition.) + loop { + let Some(expn) = db.lookup_intern_syntax_context(span.ctx).outer_expn else { + break false; + }; + let expn = db.lookup_intern_macro_call(expn); + // FIXME: Record allow_internal_unstable in the macro def (not been done yet because it + // would consume quite a bit extra memory for all call locs...) + // if let Some(features) = expn.def.allow_internal_unstable { + // if features.iter().any(|&f| f == sym::edition_panic) { + // span = expn.call_site; + // continue; + // } + // } + break expn.def.edition >= Edition::Edition2021; + } +} + fn unquote_str(lit: &tt::Literal) -> Option { let lit = ast::make::tokens::literal(&lit.to_string()); let token = ast::String::cast(lit)?; @@ -395,7 +437,7 @@ fn compile_error_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { let err = match &*tt.token_trees { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { @@ -412,7 +454,7 @@ fn concat_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { let mut err = None; let mut text = String::new(); @@ -459,7 +501,7 @@ fn concat_bytes_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { let mut bytes = Vec::new(); let mut err = None; @@ -543,7 +585,7 @@ fn concat_idents_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { let mut err = None; let mut ident = String::new(); @@ -596,7 +638,7 @@ fn include_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { let file_id = match include_input_to_file_id(db, arg_id, tt) { Ok(it) => it, @@ -629,11 +671,11 @@ fn include_bytes_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, _tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { // FIXME: actually read the file here if the user asked for macro expansion let res = tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(span), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: r#"b"""#.into(), span, @@ -646,7 +688,7 @@ fn include_str_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { let path = match parse_string(tt) { Ok(it) => it, @@ -681,7 +723,7 @@ fn env_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { let key = match parse_string(tt) { Ok(it) => it, @@ -713,7 +755,7 @@ fn option_env_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, - span: SpanData, + span: Span, ) -> ExpandResult { let key = match parse_string(tt) { Ok(it) => it, @@ -729,3 +771,15 @@ fn option_env_expand( ExpandResult::ok(expanded) } + +fn quote_expand( + _db: &dyn ExpandDatabase, + _arg_id: MacroCallId, + _tt: &tt::Subtree, + span: Span, +) -> ExpandResult { + ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: span, close: span }), + ExpandError::other("quote! is not implemented"), + ) +} diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs new file mode 100644 index 0000000000000..67b7df198e93e --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs @@ -0,0 +1,42 @@ +//! Defines a unit of change that can applied to the database to get the next +//! state. Changes are transactional. +use base_db::{salsa::Durability, CrateGraph, FileChange, SourceDatabaseExt, SourceRoot}; +use span::FileId; +use triomphe::Arc; + +use crate::{db::ExpandDatabase, proc_macro::ProcMacros}; + +#[derive(Debug, Default)] +pub struct Change { + pub source_change: FileChange, + pub proc_macros: Option, +} + +impl Change { + pub fn new() -> Self { + Self::default() + } + + pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) { + self.source_change.apply(db); + if let Some(proc_macros) = self.proc_macros { + db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); + } + } + + pub fn change_file(&mut self, file_id: FileId, new_text: Option>) { + self.source_change.change_file(file_id, new_text) + } + + pub fn set_crate_graph(&mut self, graph: CrateGraph) { + self.source_change.set_crate_graph(graph) + } + + pub fn set_proc_macros(&mut self, proc_macros: ProcMacros) { + self.proc_macros = Some(proc_macros); + } + + pub fn set_roots(&mut self, roots: Vec) { + self.source_change.set_roots(roots) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs index 935669d49b5b3..f7a26e436dee7 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs @@ -1,14 +1,16 @@ //! Defines database & queries for macro expansion. +use std::sync::OnceLock; + use base_db::{ salsa::{self, debug::DebugQueryTable}, - span::SyntaxContextId, - CrateId, Edition, FileId, SourceDatabase, + CrateId, Edition, FileId, SourceDatabase, VersionReq, }; use either::Either; use limit::Limit; use mbe::{syntax_node_to_token_tree, ValueResult}; use rustc_hash::FxHashSet; +use span::{Span, SyntaxContextId}; use syntax::{ ast::{self, HasAttrs}, AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, @@ -21,11 +23,16 @@ use crate::{ builtin_attr_macro::pseudo_derive_attr_expansion, builtin_fn_macro::EagerExpander, fixup::{self, reverse_fixups, SyntaxFixupUndoInfo}, - hygiene::{apply_mark, SyntaxContextData, Transparency}, - span::{RealSpanMap, SpanMap, SpanMapRef}, - tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, - ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId, - MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander, + hygiene::{ + apply_mark, span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt, + SyntaxContextData, Transparency, + }, + proc_macro::ProcMacros, + span_map::{RealSpanMap, SpanMap, SpanMapRef}, + tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, + CustomProcMacroExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, + HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, + MacroFileId, }; /// Total limit on the number of tokens produced by any macro invocation. @@ -39,10 +46,13 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576); #[derive(Debug, Clone, Eq, PartialEq)] /// Old-style `macro_rules` or the new macros 2.0 pub struct DeclarativeMacroExpander { - pub mac: mbe::DeclarativeMacro, + pub mac: mbe::DeclarativeMacro, pub transparency: Transparency, } +// FIXME: Remove this once we drop support for 1.76 +static REQUIREMENT: OnceLock = OnceLock::new(); + impl DeclarativeMacroExpander { pub fn expand( &self, @@ -50,25 +60,61 @@ impl DeclarativeMacroExpander { tt: tt::Subtree, call_id: MacroCallId, ) -> ExpandResult { + let loc = db.lookup_intern_macro_call(call_id); + let toolchain = &db.crate_graph()[loc.def.krate].toolchain; + let new_meta_vars = toolchain.as_ref().map_or(false, |version| { + REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( + &base_db::Version { + pre: base_db::Prerelease::EMPTY, + build: base_db::BuildMetadata::EMPTY, + major: version.major, + minor: version.minor, + patch: version.patch, + }, + ) + }); match self.mac.err() { Some(e) => ExpandResult::new( - tt::Subtree::empty(tt::DelimSpan::DUMMY), + tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }), ExpandError::other(format!("invalid macro definition: {e}")), ), None => self .mac - .expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency)) + .expand( + &tt, + |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency), + new_meta_vars, + loc.call_site, + ) .map_err(Into::into), } } - pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult { + pub fn expand_unhygienic( + &self, + db: &dyn ExpandDatabase, + tt: tt::Subtree, + krate: CrateId, + call_site: Span, + ) -> ExpandResult { + let toolchain = &db.crate_graph()[krate].toolchain; + let new_meta_vars = toolchain.as_ref().map_or(false, |version| { + REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( + &base_db::Version { + pre: base_db::Prerelease::EMPTY, + build: base_db::BuildMetadata::EMPTY, + major: version.major, + minor: version.minor, + patch: version.patch, + }, + ) + }); match self.mac.err() { Some(e) => ExpandResult::new( - tt::Subtree::empty(tt::DelimSpan::DUMMY), + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), ExpandError::other(format!("invalid macro definition: {e}")), ), - None => self.mac.expand(&tt, |_| ()).map_err(Into::into), + None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into), } } } @@ -86,11 +132,15 @@ pub enum TokenExpander { /// `derive(Copy)` and such. BuiltInDerive(BuiltinDeriveExpander), /// The thing we love the most here in rust-analyzer -- procedural macros. - ProcMacro(ProcMacroExpander), + ProcMacro(CustomProcMacroExpander), } #[salsa::query_group(ExpandDatabaseStorage)] pub trait ExpandDatabase: SourceDatabase { + /// The proc macros. + #[salsa::input] + fn proc_macros(&self) -> Arc; + fn ast_id_map(&self, file_id: HirFileId) -> Arc; /// Main public API -- parses a hir file, not caring whether it's a real @@ -164,7 +214,20 @@ pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap { } pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc { - Arc::new(RealSpanMap::from_file(db, file_id)) + use syntax::ast::HasModuleItem; + let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)]; + let ast_id_map = db.ast_id_map(file_id.into()); + let tree = db.parse(file_id).tree(); + pairs.extend( + tree.items() + .map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())), + ); + + Arc::new(RealSpanMap::from_file( + file_id, + pairs.into_boxed_slice(), + tree.syntax().text_range().end(), + )) } /// This expands the given macro call, but with different arguments. This is @@ -184,12 +247,13 @@ pub fn expand_speculative( // Build the subtree and token mapping for the speculative args let (mut tt, undo_info) = match loc.kind { - MacroCallKind::FnLike { .. } => { - (mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE) - } + MacroCallKind::FnLike { .. } => ( + mbe::syntax_node_to_token_tree(speculative_args, span_map, loc.call_site), + SyntaxFixupUndoInfo::NONE, + ), MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { let censor = censor_for_macro_input(&loc, speculative_args); - let mut fixups = fixup::fixup_syntax(span_map, speculative_args); + let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site); fixups.append.retain(|it, _| match it { syntax::NodeOrToken::Node(it) => !censor.contains(it), syntax::NodeOrToken::Token(_) => true, @@ -201,6 +265,7 @@ pub fn expand_speculative( span_map, fixups.append, fixups.remove, + loc.call_site, ), fixups.undo_info, ) @@ -222,8 +287,9 @@ pub fn expand_speculative( }?; match attr.token_tree() { Some(token_tree) => { - let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map); - tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + let mut tree = + syntax_node_to_token_tree(token_tree.syntax(), span_map, loc.call_site); + tree.delimiter = tt::Delimiter::invisible_spanned(loc.call_site); Some(tree) } @@ -237,17 +303,16 @@ pub fn expand_speculative( // Otherwise the expand query will fetch the non speculative attribute args and pass those instead. let mut speculative_expansion = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => { - tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; - let call_site = loc.span(db); + tt.delimiter = tt::Delimiter::invisible_spanned(loc.call_site); expander.expand( db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref(), - call_site, - call_site, - call_site, + span_with_def_site_ctxt(db, loc.def.span, actual_macro_call), + span_with_call_site_ctxt(db, loc.def.span, actual_macro_call), + span_with_mixed_site_ctxt(db, loc.def.span, actual_macro_call), ) } MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { @@ -258,9 +323,12 @@ pub fn expand_speculative( let adt = ast::Adt::cast(speculative_args.clone()).unwrap(); expander.expand(db, actual_macro_call, &adt, span_map) } - MacroDefKind::Declarative(it) => { - db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt) - } + MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand_unhygienic( + db, + tt, + loc.def.krate, + loc.call_site, + ), MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into), MacroDefKind::BuiltInEager(it, _) => { it.expand(db, actual_macro_call, &tt).map_err(Into::into) @@ -410,12 +478,13 @@ fn macro_arg( MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(), }; let (mut tt, undo_info) = match loc.kind { - MacroCallKind::FnLike { .. } => { - (mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE) - } + MacroCallKind::FnLike { .. } => ( + mbe::syntax_node_to_token_tree(&syntax, map.as_ref(), loc.call_site), + SyntaxFixupUndoInfo::NONE, + ), MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { let censor = censor_for_macro_input(&loc, &syntax); - let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax); + let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site); fixups.append.retain(|it, _| match it { syntax::NodeOrToken::Node(it) => !censor.contains(it), syntax::NodeOrToken::Token(_) => true, @@ -427,6 +496,7 @@ fn macro_arg( map.as_ref(), fixups.append.clone(), fixups.remove.clone(), + loc.call_site, ); reverse_fixups(&mut tt, &fixups.undo_info); } @@ -436,6 +506,7 @@ fn macro_arg( map, fixups.append, fixups.remove, + loc.call_site, ), fixups.undo_info, ) @@ -444,7 +515,7 @@ fn macro_arg( if loc.def.is_proc_macro() { // proc macros expect their inputs without parentheses, MBEs expect it with them included - tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + tt.delimiter.kind = tt::DelimiterKind::Invisible; } if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { @@ -506,7 +577,8 @@ fn decl_macro_expander( def_crate: CrateId, id: AstId, ) -> Arc { - let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021; + let crate_data = &db.crate_graph()[def_crate]; + let is_2021 = crate_data.edition >= Edition::Edition2021; let (root, map) = parse_with_map(db, id.file_id); let root = root.syntax_node(); @@ -530,13 +602,29 @@ fn decl_macro_expander( _ => None, } }; + let toolchain = crate_data.toolchain.as_ref(); + let new_meta_vars = toolchain.as_ref().map_or(false, |version| { + REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( + &base_db::Version { + pre: base_db::Prerelease::EMPTY, + build: base_db::BuildMetadata::EMPTY, + major: version.major, + minor: version.minor, + patch: version.patch, + }, + ) + }); let (mac, transparency) = match id.to_ptr(db).to_node(&root) { ast::Macro::MacroRules(macro_rules) => ( match macro_rules.token_tree() { Some(arg) => { - let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); - let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + map.as_ref(), + map.span_for_range(macro_rules.macro_rules_token().unwrap().text_range()), + ); + let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars); mac } None => mbe::DeclarativeMacro::from_err( @@ -549,8 +637,12 @@ fn decl_macro_expander( ast::Macro::MacroDef(macro_def) => ( match macro_def.body() { Some(arg) => { - let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); - let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021); + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + map.as_ref(), + map.span_for_range(macro_def.macro_token().unwrap().text_range()), + ); + let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars); mac } None => mbe::DeclarativeMacro::from_err( @@ -601,7 +693,7 @@ fn macro_expand( let Some((macro_arg, undo_info)) = value else { return ExpandResult { value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + delimiter: tt::Delimiter::invisible_spanned(loc.call_site), token_trees: Vec::new(), }), // FIXME: We should make sure to enforce an invariant that invalid macro @@ -660,7 +752,7 @@ fn macro_expand( // Skip checking token tree limit for include! macro call if !loc.def.is_include() { // Set a hard limit for the expanded tt - if let Err(value) = check_tt_count(&tt) { + if let Err(value) = check_tt_count(&tt, loc.call_site) { return value; } } @@ -673,7 +765,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult ExpandResult None, }; - let call_site = loc.span(db); let ExpandResult { value: mut tt, err } = expander.expand( db, loc.def.krate, loc.krate, ¯o_arg, attr_arg, - // FIXME - call_site, - call_site, - // FIXME - call_site, + span_with_def_site_ctxt(db, loc.def.span, id), + span_with_call_site_ctxt(db, loc.def.span, id), + span_with_mixed_site_ctxt(db, loc.def.span, id), ); // Set a hard limit for the expanded tt - if let Err(value) = check_tt_count(&tt) { + if let Err(value) = check_tt_count(&tt, loc.call_site) { return value; } @@ -730,12 +819,12 @@ fn token_tree_to_syntax_node( mbe::token_tree_to_syntax_node(tt, entry_point) } -fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult>> { +fn check_tt_count(tt: &tt::Subtree, call_site: Span) -> Result<(), ExpandResult>> { let count = tt.count(); if TOKEN_LIMIT.check(count).is_err() { Err(ExpandResult { value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + delimiter: tt::Delimiter::invisible_spanned(call_site), token_trees: vec![], }), err: Some(ExpandError::other(format!( diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs index 8d55240aef57b..da85c2ec7ac8f 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs @@ -18,7 +18,8 @@ //! //! //! See the full discussion : -use base_db::{span::SyntaxContextId, CrateId}; +use base_db::CrateId; +use span::Span; use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent}; use triomphe::Arc; @@ -26,9 +27,9 @@ use crate::{ ast::{self, AstNode}, db::ExpandDatabase, mod_path::ModPath, - span::SpanMapRef, - EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId, - MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, + span_map::SpanMapRef, + EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern, + MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, }; pub fn expand_eager_macro_input( @@ -36,7 +37,7 @@ pub fn expand_eager_macro_input( krate: CrateId, macro_call: InFile, def: MacroDefId, - call_site: SyntaxContextId, + call_site: Span, resolver: &dyn Fn(ModPath) -> Option, ) -> ExpandResult> { let ast_map = db.ast_id_map(macro_call.file_id); @@ -48,13 +49,14 @@ pub fn expand_eager_macro_input( // When `lazy_expand` is called, its *parent* file must already exist. // Here we store an eager macro id for the argument expanded subtree // for that purpose. - let arg_id = db.intern_macro_call(MacroCallLoc { + let arg_id = MacroCallLoc { def, krate, eager: None, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, call_site, - }); + } + .intern(db); let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = db.parse_macro_expansion(arg_id.as_macro_file()); @@ -81,9 +83,9 @@ pub fn expand_eager_macro_input( return ExpandResult { value: None, err }; }; - let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map); + let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, call_site); - subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE; + subtree.delimiter.kind = crate::tt::DelimiterKind::Invisible; let loc = MacroCallLoc { def, @@ -93,7 +95,7 @@ pub fn expand_eager_macro_input( call_site, }; - ExpandResult { value: Some(db.intern_macro_call(loc)), err } + ExpandResult { value: Some(loc.intern(db)), err } } fn lazy_expand( @@ -101,7 +103,7 @@ fn lazy_expand( def: &MacroDefId, macro_call: InFile, krate: CrateId, - call_site: SyntaxContextId, + call_site: Span, ) -> ExpandResult<(InFile>, Arc)> { let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); @@ -121,7 +123,7 @@ fn eager_macro_recur( mut offset: TextSize, curr: InFile, krate: CrateId, - call_site: SyntaxContextId, + call_site: Span, macro_resolver: &dyn Fn(ModPath) -> Option, ) -> ExpandResult> { let original = curr.value.clone_for_update(); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs index 89f0685d5b679..d0a1bef11c3b7 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs @@ -1,11 +1,8 @@ //! Things to wrap other things in file ids. use std::iter; -use base_db::{ - span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId}, - FileId, FileRange, -}; use either::Either; +use span::{FileId, FileRange, HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId}; use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize}; use crate::{db, ExpansionInfo, MacroFileIdExt}; @@ -345,7 +342,7 @@ impl InFile { } impl InFile { - pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option> { + pub fn original_ast_node_rooted(self, db: &dyn db::ExpandDatabase) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, // as we don't have node inputs otherwise and therefore can't find an `N` node in the input let file_id = match self.file_id.repr() { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs index 346cd39a7675d..d241d94b8c40a 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs @@ -1,23 +1,19 @@ //! To make attribute macros work reliably when typing, we need to take care to //! fix up syntax errors in the code we're passing to them. -use base_db::{ - span::{ErasedFileAstId, SpanAnchor, SpanData}, - FileId, -}; -use la_arena::RawIdx; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::SmallVec; +use span::{ErasedFileAstId, Span, SpanAnchor, SpanData, FIXUP_ERASED_FILE_AST_ID_MARKER}; use stdx::never; use syntax::{ ast::{self, AstNode, HasLoopBody}, match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, }; use triomphe::Arc; -use tt::{Spacing, Span}; +use tt::Spacing; use crate::{ - span::SpanMapRef, + span_map::SpanMapRef, tt::{Ident, Leaf, Punct, Subtree}, }; @@ -42,28 +38,30 @@ impl SyntaxFixupUndoInfo { pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None }; } -// censoring -> just don't convert the node -// replacement -> censor + append -// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how -// to remove later -const FIXUP_DUMMY_FILE: FileId = FileId::from_raw(FileId::MAX_FILE_ID); -const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::from_u32(!0)); +// We mark spans with `FIXUP_DUMMY_AST_ID` to indicate that they are fake. +const FIXUP_DUMMY_AST_ID: ErasedFileAstId = FIXUP_ERASED_FILE_AST_ID_MARKER; const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0)); +// If the fake span has this range end, that means that the range start is an index into the +// `original` list in `SyntaxFixupUndoInfo`. const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0); -pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups { +pub(crate) fn fixup_syntax( + span_map: SpanMapRef<'_>, + node: &SyntaxNode, + call_site: Span, +) -> SyntaxFixups { let mut append = FxHashMap::::default(); let mut remove = FxHashSet::::default(); let mut preorder = node.preorder(); let mut original = Vec::new(); let dummy_range = FIXUP_DUMMY_RANGE; - // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as - // the index into the replacement vec but only if the end points to !0 - let dummy_anchor = SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID }; - let fake_span = |range| SpanData { - range: dummy_range, - anchor: dummy_anchor, - ctx: span_map.span_for_range(range).ctx, + let fake_span = |range| { + let span = span_map.span_for_range(range); + SpanData { + range: dummy_range, + anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor }, + ctx: span.ctx, + } }; while let Some(event) = preorder.next() { let syntax::WalkEvent::Enter(node) = event else { continue }; @@ -72,15 +70,16 @@ pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> Synta if can_handle_error(&node) && has_error_to_handle(&node) { remove.insert(node.clone().into()); // the node contains an error node, we have to completely replace it by something valid - let original_tree = mbe::syntax_node_to_token_tree(&node, span_map); + let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site); let idx = original.len() as u32; original.push(original_tree); + let span = span_map.span_for_range(node_range); let replacement = Leaf::Ident(Ident { text: "__ra_fixup".into(), span: SpanData { range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END), - anchor: dummy_anchor, - ctx: span_map.span_for_range(node_range).ctx, + anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor }, + ctx: span.ctx, }, }); append.insert(node.clone().into(), vec![replacement]); @@ -301,9 +300,10 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool { pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) { let Some(undo_info) = undo_info.original.as_deref() else { return }; let undo_info = &**undo_info; + #[allow(deprecated)] if never!( - tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE - || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE + tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID + || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID ) { tt.delimiter.close = SpanData::DUMMY; tt.delimiter.open = SpanData::DUMMY; @@ -319,7 +319,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { .filter(|tt| match tt { tt::TokenTree::Leaf(leaf) => { let span = leaf.span(); - let is_real_leaf = span.anchor.file_id != FIXUP_DUMMY_FILE; + let is_real_leaf = span.anchor.ast_id != FIXUP_DUMMY_AST_ID; let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END; is_real_leaf || is_replaced_node } @@ -327,8 +327,8 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { }) .flat_map(|tt| match tt { tt::TokenTree::Subtree(mut tt) => { - if tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE - || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE + if tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID + || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID { // Even though fixup never creates subtrees with fixup spans, the old proc-macro server // might copy them if the proc-macro asks for it, so we need to filter those out @@ -339,7 +339,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { SmallVec::from_const([tt.into()]) } tt::TokenTree::Leaf(leaf) => { - if leaf.span().anchor.file_id == FIXUP_DUMMY_FILE { + if leaf.span().anchor.ast_id == FIXUP_DUMMY_AST_ID { // we have a fake node here, we need to replace it again with the original let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone(); if original.delimiter.kind == tt::DelimiterKind::Invisible { @@ -360,11 +360,12 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { mod tests { use base_db::FileId; use expect_test::{expect, Expect}; + use syntax::TextRange; use triomphe::Arc; use crate::{ fixup::reverse_fixups, - span::{RealSpanMap, SpanMap}, + span_map::{RealSpanMap, SpanMap}, tt, }; @@ -397,12 +398,17 @@ mod tests { fn check(ra_fixture: &str, mut expect: Expect) { let parsed = syntax::SourceFile::parse(ra_fixture); let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); - let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node()); + let fixups = super::fixup_syntax( + span_map.as_ref(), + &parsed.syntax_node(), + span_map.span_for_range(TextRange::empty(0.into())), + ); let mut tt = mbe::syntax_node_to_token_tree_modified( &parsed.syntax_node(), span_map.as_ref(), fixups.append, fixups.remove, + span_map.span_for_range(TextRange::empty(0.into())), ); let actual = format!("{tt}\n"); @@ -422,8 +428,11 @@ mod tests { // the fixed-up + reversed version should be equivalent to the original input // modulo token IDs and `Punct`s' spacing. - let original_as_tt = - mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref()); + let original_as_tt = mbe::syntax_node_to_token_tree( + &parsed.syntax_node(), + span_map.as_ref(), + span_map.span_for_range(TextRange::empty(0.into())), + ); assert!( check_subtree_eq(&tt, &original_as_tt), "different token tree:\n{tt:?}\n\n{original_as_tt:?}" diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs index 7b03709aced00..57921543c4b4b 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs @@ -2,9 +2,12 @@ //! //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at //! this moment, this is horribly incomplete and handles only `$crate`. + +// FIXME: Consider moving this into the span crate. + use std::iter; -use base_db::span::{MacroCallId, SpanData, SyntaxContextId}; +use span::{MacroCallId, Span, SyntaxContextId}; use crate::db::ExpandDatabase; @@ -78,37 +81,29 @@ pub enum Transparency { Opaque, } -pub fn span_with_def_site_ctxt( - db: &dyn ExpandDatabase, - span: SpanData, - expn_id: MacroCallId, -) -> SpanData { +pub fn span_with_def_site_ctxt(db: &dyn ExpandDatabase, span: Span, expn_id: MacroCallId) -> Span { span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque) } -pub fn span_with_call_site_ctxt( - db: &dyn ExpandDatabase, - span: SpanData, - expn_id: MacroCallId, -) -> SpanData { +pub fn span_with_call_site_ctxt(db: &dyn ExpandDatabase, span: Span, expn_id: MacroCallId) -> Span { span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent) } pub fn span_with_mixed_site_ctxt( db: &dyn ExpandDatabase, - span: SpanData, + span: Span, expn_id: MacroCallId, -) -> SpanData { +) -> Span { span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent) } fn span_with_ctxt_from_mark( db: &dyn ExpandDatabase, - span: SpanData, + span: Span, expn_id: MacroCallId, transparency: Transparency, -) -> SpanData { - SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span } +) -> Span { + Span { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span } } pub(super) fn apply_mark( @@ -121,7 +116,7 @@ pub(super) fn apply_mark( return apply_mark_internal(db, ctxt, Some(call_id), transparency); } - let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site; + let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site.ctx; let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { call_site_ctxt.normalize_to_macros_2_0(db) } else { @@ -154,15 +149,16 @@ fn apply_mark_internal( transparency: Transparency, ) -> SyntaxContextId { let syntax_context_data = db.lookup_intern_syntax_context(ctxt); - let mut opaque = syntax_context_data.opaque; - let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent; + let mut opaque = handle_self_ref(ctxt, syntax_context_data.opaque); + let mut opaque_and_semitransparent = + handle_self_ref(ctxt, syntax_context_data.opaque_and_semitransparent); if transparency >= Transparency::Opaque { let parent = opaque; + // Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with + // salsa when interning, so we use a sentinel value that effectively means the current + // syntax context. let new_opaque = SyntaxContextId::SELF_REF; - // But we can't just grab the to be allocated ID either as that would not deduplicate - // things! - // So we need a new salsa store type here ... opaque = db.intern_syntax_context(SyntaxContextData { outer_expn: call_id, outer_transparency: transparency, @@ -174,6 +170,9 @@ fn apply_mark_internal( if transparency >= Transparency::SemiTransparent { let parent = opaque_and_semitransparent; + // Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with + // salsa when interning, so we use a sentinel value that effectively means the current + // syntax context. let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF; opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData { outer_expn: call_id, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index d7819b315c494..b5197d4c25d22 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -6,20 +6,21 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -pub mod db; pub mod ast_id_map; -pub mod name; -pub mod hygiene; +pub mod attrs; pub mod builtin_attr_macro; pub mod builtin_derive_macro; pub mod builtin_fn_macro; -pub mod proc_macro; -pub mod quote; +pub mod db; pub mod eager; -pub mod mod_path; -pub mod attrs; -pub mod span; pub mod files; +pub mod change; +pub mod hygiene; +pub mod mod_path; +pub mod name; +pub mod proc_macro; +pub mod quote; +pub mod span_map; mod fixup; use attrs::collect_attrs; @@ -27,11 +28,9 @@ use triomphe::Arc; use std::{fmt, hash::Hash}; -use base_db::{ - span::{HirFileIdRepr, SpanData, SyntaxContextId}, - CrateId, FileId, FileRange, ProcMacroKind, -}; +use base_db::{CrateId, Edition, FileId}; use either::Either; +use span::{FileRange, HirFileIdRepr, Span, SyntaxContextId}; use syntax::{ ast::{self, AstNode}, SyntaxNode, SyntaxToken, TextRange, TextSize, @@ -42,35 +41,86 @@ use crate::{ builtin_attr_macro::BuiltinAttrExpander, builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, - db::TokenExpander, + db::{ExpandDatabase, TokenExpander}, fixup::SyntaxFixupUndoInfo, + hygiene::SyntaxContextData, mod_path::ModPath, - proc_macro::ProcMacroExpander, - span::{ExpansionSpanMap, SpanMap}, + proc_macro::{CustomProcMacroExpander, ProcMacroKind}, + span_map::{ExpansionSpanMap, SpanMap}, }; pub use crate::ast_id_map::{AstId, ErasedAstId, ErasedFileAstId}; pub use crate::files::{InFile, InMacroFile, InRealFile}; -pub use base_db::span::{HirFileId, MacroCallId, MacroFileId}; pub use mbe::ValueResult; +pub use span::{HirFileId, MacroCallId, MacroFileId}; -pub type DeclarativeMacro = ::mbe::DeclarativeMacro; +pub type DeclarativeMacro = ::mbe::DeclarativeMacro; pub mod tt { - pub use base_db::span::SpanData; - pub use tt::{DelimiterKind, Spacing, Span, SpanAnchor}; - - pub type Delimiter = ::tt::Delimiter; - pub type DelimSpan = ::tt::DelimSpan; - pub type Subtree = ::tt::Subtree; - pub type Leaf = ::tt::Leaf; - pub type Literal = ::tt::Literal; - pub type Punct = ::tt::Punct; - pub type Ident = ::tt::Ident; - pub type TokenTree = ::tt::TokenTree; + pub use span::Span; + pub use tt::{DelimiterKind, Spacing}; + + pub type Delimiter = ::tt::Delimiter; + pub type DelimSpan = ::tt::DelimSpan; + pub type Subtree = ::tt::Subtree; + pub type Leaf = ::tt::Leaf; + pub type Literal = ::tt::Literal; + pub type Punct = ::tt::Punct; + pub type Ident = ::tt::Ident; + pub type TokenTree = ::tt::TokenTree; +} + +#[macro_export] +macro_rules! impl_intern_lookup { + ($db:ident, $id:ident, $loc:ident, $intern:ident, $lookup:ident) => { + impl $crate::Intern for $loc { + type Database<'db> = dyn $db + 'db; + type ID = $id; + fn intern<'db>(self, db: &Self::Database<'db>) -> $id { + db.$intern(self) + } + } + + impl $crate::Lookup for $id { + type Database<'db> = dyn $db + 'db; + type Data = $loc; + fn lookup<'db>(&self, db: &Self::Database<'db>) -> $loc { + db.$lookup(*self) + } + } + }; +} + +// ideally these would be defined in base-db, but the orphan rule doesn't let us +pub trait Intern { + type Database<'db>: ?Sized; + type ID; + fn intern<'db>(self, db: &Self::Database<'db>) -> Self::ID; +} + +pub trait Lookup { + type Database<'db>: ?Sized; + type Data; + fn lookup<'db>(&self, db: &Self::Database<'db>) -> Self::Data; } +impl_intern_lookup!( + ExpandDatabase, + MacroCallId, + MacroCallLoc, + intern_macro_call, + lookup_intern_macro_call +); + +impl_intern_lookup!( + ExpandDatabase, + SyntaxContextId, + SyntaxContextData, + intern_syntax_context, + lookup_intern_syntax_context +); + pub type ExpandResult = ValueResult; #[derive(Debug, PartialEq, Eq, Clone, Hash)] @@ -117,18 +167,20 @@ pub struct MacroCallLoc { pub krate: CrateId, /// Some if this is a macro call for an eager macro. Note that this is `None` /// for the eager input macro file. + // FIXME: This seems bad to save in an interned structure eager: Option>, pub kind: MacroCallKind, - pub call_site: SyntaxContextId, + pub call_site: Span, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct MacroDefId { pub krate: CrateId, + pub edition: Edition, pub kind: MacroDefKind, pub local_inner: bool, pub allow_internal_unsafe: bool, - // pub def_site: SyntaxContextId, + pub span: Span, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -138,7 +190,7 @@ pub enum MacroDefKind { BuiltInAttr(BuiltinAttrExpander, AstId), BuiltInDerive(BuiltinDeriveExpander, AstId), BuiltInEager(EagerExpander, AstId), - ProcMacro(ProcMacroExpander, ProcMacroKind, AstId), + ProcMacro(CustomProcMacroExpander, ProcMacroKind, AstId), } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -179,40 +231,39 @@ pub enum MacroCallKind { pub trait HirFileIdExt { /// Returns the original file of this macro call hierarchy. - fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId; + fn original_file(self, db: &dyn ExpandDatabase) -> FileId; /// Returns the original file of this macro call hierarchy while going into the included file if /// one of the calls comes from an `include!``. - fn original_file_respecting_includes(self, db: &dyn db::ExpandDatabase) -> FileId; + fn original_file_respecting_includes(self, db: &dyn ExpandDatabase) -> FileId; /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. - fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option>; + fn original_call_node(self, db: &dyn ExpandDatabase) -> Option>; /// Return expansion information if it is a macro-expansion file - fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option; + fn expansion_info(self, db: &dyn ExpandDatabase) -> Option; - fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase) - -> Option>; + fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option>; } impl HirFileIdExt for HirFileId { - fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId { + fn original_file(self, db: &dyn ExpandDatabase) -> FileId { let mut file_id = self; loop { match file_id.repr() { HirFileIdRepr::FileId(id) => break id, HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { - file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id(); + file_id = macro_call_id.lookup(db).kind.file_id(); } } } } - fn original_file_respecting_includes(mut self, db: &dyn db::ExpandDatabase) -> FileId { + fn original_file_respecting_includes(mut self, db: &dyn ExpandDatabase) -> FileId { loop { match self.repr() { - base_db::span::HirFileIdRepr::FileId(id) => break id, - base_db::span::HirFileIdRepr::MacroFile(file) => { + HirFileIdRepr::FileId(id) => break id, + HirFileIdRepr::MacroFile(file) => { let loc = db.lookup_intern_macro_call(file.macro_call_id); if loc.def.is_include() { if let Some(eager) = &loc.eager { @@ -231,7 +282,7 @@ impl HirFileIdExt for HirFileId { } } - fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option> { + fn original_call_node(self, db: &dyn ExpandDatabase) -> Option> { let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db); loop { match call.file_id.repr() { @@ -246,14 +297,11 @@ impl HirFileIdExt for HirFileId { } /// Return expansion information if it is a macro-expansion file - fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option { + fn expansion_info(self, db: &dyn ExpandDatabase) -> Option { Some(ExpansionInfo::new(db, self.macro_file()?)) } - fn as_builtin_derive_attr_node( - &self, - db: &dyn db::ExpandDatabase, - ) -> Option> { + fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option> { let macro_file = self.macro_file()?; let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let attr = match loc.def.kind { @@ -265,32 +313,32 @@ impl HirFileIdExt for HirFileId { } pub trait MacroFileIdExt { - fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32; + fn expansion_level(self, db: &dyn ExpandDatabase) -> u32; /// If this is a macro call, returns the syntax node of the call. - fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile; + fn call_node(self, db: &dyn ExpandDatabase) -> InFile; - fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo; + fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo; - fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool; - fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_builtin_derive(&self, db: &dyn ExpandDatabase) -> bool; + fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool; /// Return whether this file is an include macro - fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool; - fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_eager(&self, db: &dyn ExpandDatabase) -> bool; /// Return whether this file is an attr macro - fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool; /// Return whether this file is the pseudo expansion of the derive attribute. /// See [`crate::builtin_attr_macro::derive_attr_expand`]. - fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool; } impl MacroFileIdExt for MacroFileId { - fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile { + fn call_node(self, db: &dyn ExpandDatabase) -> InFile { db.lookup_intern_macro_call(self.macro_call_id).to_node(db) } - fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { + fn expansion_level(self, db: &dyn ExpandDatabase) -> u32 { let mut level = 0; let mut macro_file = self; loop { @@ -305,39 +353,39 @@ impl MacroFileIdExt for MacroFileId { } /// Return expansion information if it is a macro-expansion file - fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo { + fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo { ExpansionInfo::new(db, self) } - fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool { matches!( db.lookup_intern_macro_call(self.macro_call_id).def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) ) } - fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_builtin_derive(&self, db: &dyn ExpandDatabase) -> bool { matches!( db.lookup_intern_macro_call(self.macro_call_id).def.kind, MacroDefKind::BuiltInDerive(..) ) } - fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool { db.lookup_intern_macro_call(self.macro_call_id).def.is_include() } - fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_eager(&self, db: &dyn ExpandDatabase) -> bool { let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) } - fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool { let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); matches!(loc.kind, MacroCallKind::Attr { .. }) } - fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool { let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); loc.def.is_attribute_derive() } @@ -346,15 +394,15 @@ impl MacroFileIdExt for MacroFileId { impl MacroDefId { pub fn as_lazy_macro( self, - db: &dyn db::ExpandDatabase, + db: &dyn ExpandDatabase, krate: CrateId, kind: MacroCallKind, - call_site: SyntaxContextId, + call_site: Span, ) -> MacroCallId { - db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site }) + MacroCallLoc { def: self, krate, eager: None, kind, call_site }.intern(db) } - pub fn definition_range(&self, db: &dyn db::ExpandDatabase) -> InFile { + pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile { match self.kind { MacroDefKind::Declarative(id) | MacroDefKind::BuiltIn(_, id) @@ -419,19 +467,7 @@ impl MacroDefId { } impl MacroCallLoc { - pub fn span(&self, db: &dyn db::ExpandDatabase) -> SpanData { - let ast_id = self.kind.erased_ast_id(); - let file_id = self.kind.file_id(); - let range = db.ast_id_map(file_id).get_erased(ast_id).text_range(); - match file_id.repr() { - HirFileIdRepr::FileId(file_id) => db.real_span_map(file_id).span_for_range(range), - HirFileIdRepr::MacroFile(m) => { - db.parse_macro_expansion(m).value.1.span_at(range.start()) - } - } - } - - pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile { + pub fn to_node(&self, db: &dyn ExpandDatabase) -> InFile { match self.kind { MacroCallKind::FnLike { ast_id, .. } => { ast_id.with_value(ast_id.to_node(db).syntax().clone()) @@ -498,7 +534,7 @@ impl MacroCallKind { } } - fn erased_ast_id(&self) -> ErasedFileAstId { + pub fn erased_ast_id(&self) -> ErasedFileAstId { match *self { MacroCallKind::FnLike { ast_id: InFile { value, .. }, .. } => value.erase(), MacroCallKind::Derive { ast_id: InFile { value, .. }, .. } => value.erase(), @@ -509,7 +545,7 @@ impl MacroCallKind { /// Returns the original file range that best describes the location of this macro call. /// /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives. - pub fn original_call_range_with_body(self, db: &dyn db::ExpandDatabase) -> FileRange { + pub fn original_call_range_with_body(self, db: &dyn ExpandDatabase) -> FileRange { let mut kind = self; let file_id = loop { match kind.file_id().repr() { @@ -534,7 +570,7 @@ impl MacroCallKind { /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives /// get only the specific derive that is being referred to. - pub fn original_call_range(self, db: &dyn db::ExpandDatabase) -> FileRange { + pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange { let mut kind = self; let file_id = loop { match kind.file_id().repr() { @@ -573,7 +609,7 @@ impl MacroCallKind { FileRange { range, file_id } } - fn arg(&self, db: &dyn db::ExpandDatabase) -> InFile> { + fn arg(&self, db: &dyn ExpandDatabase) -> InFile> { match self { MacroCallKind::FnLike { ast_id, .. } => { ast_id.to_in_file_node(db).map(|it| Some(it.token_tree()?.syntax().clone())) @@ -617,7 +653,7 @@ impl ExpansionInfo { /// Maps the passed in file range down into a macro expansion if it is the input to a macro call. pub fn map_range_down<'a>( &'a self, - span: SpanData, + span: Span, ) -> Option + 'a>> { let tokens = self .exp_map @@ -630,7 +666,7 @@ impl ExpansionInfo { /// Looks up the span at the given offset. pub fn span_for_offset( &self, - db: &dyn db::ExpandDatabase, + db: &dyn ExpandDatabase, offset: TextSize, ) -> (FileRange, SyntaxContextId) { debug_assert!(self.expanded.value.text_range().contains(offset)); @@ -646,12 +682,12 @@ impl ExpansionInfo { /// Maps up the text range out of the expansion hierarchy back into the original file its from. pub fn map_node_range_up( &self, - db: &dyn db::ExpandDatabase, + db: &dyn ExpandDatabase, range: TextRange, ) -> Option<(FileRange, SyntaxContextId)> { debug_assert!(self.expanded.value.text_range().contains_range(range)); let mut spans = self.exp_map.spans_for_range(range); - let SpanData { range, anchor, ctx } = spans.next()?; + let Span { range, anchor, ctx } = spans.next()?; let mut start = range.start(); let mut end = range.end(); @@ -676,7 +712,7 @@ impl ExpansionInfo { /// Maps up the text range out of the expansion into is macro call. pub fn map_range_up_once( &self, - db: &dyn db::ExpandDatabase, + db: &dyn ExpandDatabase, token: TextRange, ) -> InFile> { debug_assert!(self.expanded.value.text_range().contains_range(token)); @@ -705,7 +741,7 @@ impl ExpansionInfo { } } - pub fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { + pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let arg_tt = loc.kind.arg(db); @@ -718,7 +754,7 @@ impl ExpansionInfo { let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { ( Arc::new(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + delimiter: tt::Delimiter::invisible_spanned(loc.call_site), token_trees: Vec::new(), }), SyntaxFixupUndoInfo::NONE, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs index 9534b5039f682..30b8c189f52dd 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs @@ -9,10 +9,11 @@ use crate::{ db::ExpandDatabase, hygiene::{marks_rev, SyntaxContextExt, Transparency}, name::{known, AsName, Name}, - span::SpanMapRef, + span_map::SpanMapRef, }; -use base_db::{span::SyntaxContextId, CrateId}; +use base_db::CrateId; use smallvec::SmallVec; +use span::SyntaxContextId; use syntax::{ast, AstNode}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index a321f94cd7553..3d8d01e25566c 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -318,6 +318,10 @@ pub mod known { new_lower_hex, new_upper_hex, from_usize, + panic_2015, + panic_2021, + unreachable_2015, + unreachable_2021, // Components of known path (type name) Iterator, IntoIterator, @@ -384,6 +388,7 @@ pub mod known { log_syntax, module_path, option_env, + quote, std_panic, stringify, trace_macros, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs index de577796831fd..25c78fade824f 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs @@ -1,18 +1,64 @@ //! Proc Macro Expander stub -use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; +use core::fmt; +use std::{panic::RefUnwindSafe, sync}; + +use base_db::{CrateId, Env}; +use rustc_hash::FxHashMap; +use span::Span; use stdx::never; +use syntax::SmolStr; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct ProcMacroId(pub u32); + +#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] +pub enum ProcMacroKind { + CustomDerive, + FuncLike, + Attr, +} + +pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { + fn expand( + &self, + subtree: &tt::Subtree, + attrs: Option<&tt::Subtree>, + env: &Env, + def_site: Span, + call_site: Span, + mixed_site: Span, + ) -> Result; +} + +#[derive(Debug)] +pub enum ProcMacroExpansionError { + Panic(String), + /// Things like "proc macro server was killed by OOM". + System(String), +} + +pub type ProcMacroLoadResult = Result, String>; + +pub type ProcMacros = FxHashMap; + +#[derive(Debug, Clone)] +pub struct ProcMacro { + pub name: SmolStr, + pub kind: ProcMacroKind, + pub expander: sync::Arc, +} + #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] -pub struct ProcMacroExpander { +pub struct CustomProcMacroExpander { proc_macro_id: ProcMacroId, } const DUMMY_ID: u32 = !0; -impl ProcMacroExpander { +impl CustomProcMacroExpander { pub fn new(proc_macro_id: ProcMacroId) -> Self { assert_ne!(proc_macro_id.0, DUMMY_ID); Self { proc_macro_id } @@ -33,9 +79,9 @@ impl ProcMacroExpander { calling_crate: CrateId, tt: &tt::Subtree, attr_arg: Option<&tt::Subtree>, - def_site: SpanData, - call_site: SpanData, - mixed_site: SpanData, + def_site: Span, + call_site: Span, + mixed_site: Span, ) -> ExpandResult { match self.proc_macro_id { ProcMacroId(DUMMY_ID) => ExpandResult::new( diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs index acbde26c8ddf0..9bdd75f9d2240 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs @@ -1,6 +1,8 @@ //! A simplified version of quote-crate like quasi quote macro -use base_db::span::SpanData; +use span::Span; + +use crate::name::Name; // A helper macro quote macro // FIXME: @@ -130,12 +132,12 @@ macro_rules! quote { } pub(crate) trait IntoTt { - fn to_subtree(self, span: SpanData) -> crate::tt::Subtree; + fn to_subtree(self, span: Span) -> crate::tt::Subtree; fn to_tokens(self) -> Vec; } impl IntoTt for Vec { - fn to_subtree(self, span: SpanData) -> crate::tt::Subtree { + fn to_subtree(self, span: Span) -> crate::tt::Subtree { crate::tt::Subtree { delimiter: crate::tt::Delimiter::invisible_spanned(span), token_trees: self, @@ -148,7 +150,7 @@ impl IntoTt for Vec { } impl IntoTt for crate::tt::Subtree { - fn to_subtree(self, _: SpanData) -> crate::tt::Subtree { + fn to_subtree(self, _: Span) -> crate::tt::Subtree { self } @@ -158,39 +160,39 @@ impl IntoTt for crate::tt::Subtree { } pub(crate) trait ToTokenTree { - fn to_token(self, span: SpanData) -> crate::tt::TokenTree; + fn to_token(self, span: Span) -> crate::tt::TokenTree; } impl ToTokenTree for crate::tt::TokenTree { - fn to_token(self, _: SpanData) -> crate::tt::TokenTree { + fn to_token(self, _: Span) -> crate::tt::TokenTree { self } } impl ToTokenTree for &crate::tt::TokenTree { - fn to_token(self, _: SpanData) -> crate::tt::TokenTree { + fn to_token(self, _: Span) -> crate::tt::TokenTree { self.clone() } } impl ToTokenTree for crate::tt::Subtree { - fn to_token(self, _: SpanData) -> crate::tt::TokenTree { + fn to_token(self, _: Span) -> crate::tt::TokenTree { self.into() } } macro_rules! impl_to_to_tokentrees { - ($($span:ident: $ty:ty => $this:ident $im:block);*) => { + ($($span:ident: $ty:ty => $this:ident $im:block;)*) => { $( impl ToTokenTree for $ty { - fn to_token($this, $span: SpanData) -> crate::tt::TokenTree { + fn to_token($this, $span: Span) -> crate::tt::TokenTree { let leaf: crate::tt::Leaf = $im.into(); leaf.into() } } impl ToTokenTree for &$ty { - fn to_token($this, $span: SpanData) -> crate::tt::TokenTree { + fn to_token($this, $span: Span) -> crate::tt::TokenTree { let leaf: crate::tt::Leaf = $im.clone().into(); leaf.into() } @@ -209,20 +211,19 @@ impl_to_to_tokentrees! { _span: crate::tt::Ident => self { self }; _span: crate::tt::Punct => self { self }; span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}; - span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}} + span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}; + span: Name => self { crate::tt::Ident{text: self.to_smol_str(), span}}; } #[cfg(test)] mod tests { use crate::tt; - use base_db::{ - span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, - FileId, - }; + use base_db::FileId; use expect_test::expect; + use span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use syntax::{TextRange, TextSize}; - const DUMMY: tt::SpanData = tt::SpanData { + const DUMMY: tt::Span = tt::Span { range: TextRange::empty(TextSize::new(0)), anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }, ctx: SyntaxContextId::ROOT, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span.rs deleted file mode 100644 index fe476a40febf6..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-expand/src/span.rs +++ /dev/null @@ -1,124 +0,0 @@ -//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well -//! as associating spans with text ranges in a particular file. -use base_db::{ - span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, - FileId, -}; -use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize}; -use triomphe::Arc; - -use crate::db::ExpandDatabase; - -pub type ExpansionSpanMap = mbe::SpanMap; - -/// Spanmap for a macro file or a real file -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum SpanMap { - /// Spanmap for a macro file - ExpansionSpanMap(Arc), - /// Spanmap for a real file - RealSpanMap(Arc), -} - -#[derive(Copy, Clone)] -pub enum SpanMapRef<'a> { - /// Spanmap for a macro file - ExpansionSpanMap(&'a ExpansionSpanMap), - /// Spanmap for a real file - RealSpanMap(&'a RealSpanMap), -} - -impl mbe::SpanMapper for SpanMap { - fn span_for(&self, range: TextRange) -> SpanData { - self.span_for_range(range) - } -} -impl mbe::SpanMapper for SpanMapRef<'_> { - fn span_for(&self, range: TextRange) -> SpanData { - self.span_for_range(range) - } -} -impl mbe::SpanMapper for RealSpanMap { - fn span_for(&self, range: TextRange) -> SpanData { - self.span_for_range(range) - } -} - -impl SpanMap { - pub fn span_for_range(&self, range: TextRange) -> SpanData { - match self { - Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), - Self::RealSpanMap(span_map) => span_map.span_for_range(range), - } - } - - pub fn as_ref(&self) -> SpanMapRef<'_> { - match self { - Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map), - Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map), - } - } -} - -impl SpanMapRef<'_> { - pub fn span_for_range(self, range: TextRange) -> SpanData { - match self { - Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), - Self::RealSpanMap(span_map) => span_map.span_for_range(range), - } - } -} - -#[derive(PartialEq, Eq, Hash, Debug)] -pub struct RealSpanMap { - file_id: FileId, - /// Invariant: Sorted vec over TextSize - // FIXME: SortedVec<(TextSize, ErasedFileAstId)>? - pairs: Box<[(TextSize, ErasedFileAstId)]>, - end: TextSize, -} - -impl RealSpanMap { - /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id). - pub fn absolute(file_id: FileId) -> Self { - RealSpanMap { - file_id, - pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]), - end: TextSize::new(!0), - } - } - - pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self { - let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]; - let ast_id_map = db.ast_id_map(file_id.into()); - let tree = db.parse(file_id).tree(); - pairs - .extend(tree.items().map(|item| { - (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase()) - })); - RealSpanMap { - file_id, - pairs: pairs.into_boxed_slice(), - end: tree.syntax().text_range().end(), - } - } - - pub fn span_for_range(&self, range: TextRange) -> SpanData { - assert!( - range.end() <= self.end, - "range {range:?} goes beyond the end of the file {:?}", - self.end - ); - let start = range.start(); - let idx = self - .pairs - .binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less)) - .unwrap_err(); - let (offset, ast_id) = self.pairs[idx - 1]; - SpanData { - range: range - offset, - anchor: SpanAnchor { file_id: self.file_id, ast_id }, - ctx: SyntaxContextId::ROOT, - } - } -} diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs new file mode 100644 index 0000000000000..4ec6e657f9ed9 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs @@ -0,0 +1,65 @@ +//! Span maps for real files and macro expansions. +use span::Span; +use syntax::TextRange; +use triomphe::Arc; + +pub use span::RealSpanMap; + +pub type ExpansionSpanMap = span::SpanMap; + +/// Spanmap for a macro file or a real file +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum SpanMap { + /// Spanmap for a macro file + ExpansionSpanMap(Arc), + /// Spanmap for a real file + RealSpanMap(Arc), +} + +#[derive(Copy, Clone)] +pub enum SpanMapRef<'a> { + /// Spanmap for a macro file + ExpansionSpanMap(&'a ExpansionSpanMap), + /// Spanmap for a real file + RealSpanMap(&'a RealSpanMap), +} + +impl mbe::SpanMapper for SpanMap { + fn span_for(&self, range: TextRange) -> Span { + self.span_for_range(range) + } +} +impl mbe::SpanMapper for SpanMapRef<'_> { + fn span_for(&self, range: TextRange) -> Span { + self.span_for_range(range) + } +} + +impl SpanMap { + pub fn span_for_range(&self, range: TextRange) -> Span { + match self { + // FIXME: Is it correct for us to only take the span at the start? This feels somewhat + // wrong. The context will be right, but the range could be considered wrong. See + // https://github.com/rust-lang/rust/issues/23480, we probably want to fetch the span at + // the start and end, then merge them like rustc does in `Span::to + Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), + Self::RealSpanMap(span_map) => span_map.span_for_range(range), + } + } + + pub fn as_ref(&self) -> SpanMapRef<'_> { + match self { + Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map), + Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map), + } + } +} + +impl SpanMapRef<'_> { + pub fn span_for_range(self, range: TextRange) -> Span { + match self { + Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), + Self::RealSpanMap(span_map) => span_map.span_for_range(range), + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml index bbcb76a43ffef..1873e7bfe6a5c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml @@ -14,14 +14,14 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" itertools.workspace = true -arrayvec = "0.7.2" +arrayvec.workspace = true bitflags.workspace = true smallvec.workspace = true ena = "0.14.0" either.workspace = true oorandom = "11.1.3" tracing.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true scoped-tls = "1.0.0" chalk-solve = { version = "0.95.0", default-features = false } chalk-ir = "0.95.0" @@ -54,6 +54,10 @@ project-model = { path = "../project-model" } # local deps test-utils.workspace = true +test-fixture.workspace = true [features] in-rust-tree = ["rustc-dependencies/in-rust-tree"] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs index b395e7f4a8135..ac82208708aea 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs @@ -1,6 +1,7 @@ -use base_db::{fixture::WithFixture, FileId}; +use base_db::FileId; use chalk_ir::Substitution; use hir_def::db::DefDatabase; +use test_fixture::WithFixture; use test_utils::skip_slow_tests; use crate::{ diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 6f724e4587440..8053300ad220c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -217,6 +217,10 @@ pub enum InferenceDiagnostic { name: Name, /// Contains the type the field resolves to field_with_same_name: Option, + assoc_func_with_same_name: Option, + }, + UnresolvedAssocItem { + id: ExprOrPatId, }, // FIXME: This should be emitted in body lowering BreakOutsideOfLoop { @@ -1200,6 +1204,12 @@ impl<'a> InferenceContext<'a> { path: &ModPath, ) -> (Ty, Option) { let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0); + let ty = match ty.kind(Interner) { + TyKind::Alias(AliasTy::Projection(proj_ty)) => { + self.db.normalize_projection(proj_ty.clone(), self.table.trait_env.clone()) + } + _ => ty, + }; match remaining { None => { let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index a5e77a12d8c50..84954ca7e9043 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -1575,11 +1575,30 @@ impl InferenceContext<'_> { } None => None, }; + + let assoc_func_with_same_name = method_resolution::iterate_method_candidates( + &canonicalized_receiver.value, + self.db, + self.table.trait_env.clone(), + self.get_traits_in_scope().as_ref().left_or_else(|&it| it), + VisibleFromModule::Filter(self.resolver.module()), + Some(method_name), + method_resolution::LookupMode::Path, + |_ty, item, visible| { + if visible { + Some(item) + } else { + None + } + }, + ); + self.result.diagnostics.push(InferenceDiagnostic::UnresolvedMethodCall { expr: tgt_expr, receiver: receiver_ty.clone(), name: method_name.clone(), field_with_same_name: field_with_same_name_exists, + assoc_func_with_same_name, }); ( receiver_ty, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs index 49fb78f67a656..e61a070265a4b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs @@ -340,6 +340,9 @@ impl InferenceContext<'_> { }, ); let res = res.or(not_visible); + if res.is_none() { + self.push_diagnostic(InferenceDiagnostic::UnresolvedAssocItem { id }); + } let (item, visible) = res?; let (def, container) = match item { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs index 5e3a86c80e3fc..9937113685ca0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs @@ -1,9 +1,9 @@ use std::collections::HashMap; -use base_db::fixture::WithFixture; use chalk_ir::{AdtId, TyKind}; use either::Either; use hir_def::db::DefDatabase; +use test_fixture::WithFixture; use triomphe::Arc; use crate::{ diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs index ff30dc6dade55..b0f929279a5c7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs @@ -1,6 +1,7 @@ -use base_db::{fixture::WithFixture, FileId}; +use base_db::FileId; use hir_def::db::DefDatabase; use syntax::{TextRange, TextSize}; +use test_fixture::WithFixture; use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution}; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index 1446e83fa8876..c8cc61cc21b48 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -12,7 +12,7 @@ mod diagnostics; use std::{collections::HashMap, env}; -use base_db::{fixture::WithFixture, FileRange, SourceDatabaseExt}; +use base_db::{FileRange, SourceDatabaseExt}; use expect_test::Expect; use hir_def::{ body::{Body, BodySourceMap, SyntheticSyntax}, @@ -30,6 +30,7 @@ use syntax::{ ast::{self, AstNode, HasName}, SyntaxNode, }; +use test_fixture::WithFixture; use tracing_subscriber::{layer::SubscriberExt, Registry}; use tracing_tree::HierarchicalLayer; use triomphe::Arc; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs index 28e84e480d775..82d934009f36e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs @@ -1,4 +1,5 @@ -use base_db::{fixture::WithFixture, SourceDatabaseExt}; +use base_db::SourceDatabaseExt; +use test_fixture::WithFixture; use triomphe::Arc; use crate::{db::HirDatabase, test_db::TestDB}; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs index 7234af2d68348..548f782f4f2eb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs @@ -1154,6 +1154,40 @@ fn main() { ); } +#[test] +fn generic_alias_with_qualified_path() { + check_types( + r#" +type Wrap = T; + +struct S; + +trait Schematic { + type Props; +} + +impl Schematic for S { + type Props = X; +} + +enum X { + A { cool: u32, stuff: u32 }, + B, +} + +fn main() { + let wrapped = Wrap::<::Props>::A { + cool: 100, + stuff: 100, + }; + + if let Wrap::<::Props>::A { cool, ..} = &wrapped {} + //^^^^ &u32 +} +"#, + ); +} + #[test] fn type_mismatch_pat_const_reference() { check_no_mismatches( diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml index 4c1dfbc294e5b..e4e4bcea6108d 100644 --- a/src/tools/rust-analyzer/crates/hir/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml @@ -12,9 +12,9 @@ rust-version.workspace = true doctest = false [dependencies] -rustc-hash = "1.1.0" +rustc-hash.workspace = true either.workspace = true -arrayvec = "0.7.2" +arrayvec.workspace = true itertools.workspace = true smallvec.workspace = true triomphe.workspace = true @@ -33,3 +33,6 @@ tt.workspace = true [features] in-rust-tree = [] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs index 185853353181d..d60d20f5b7eeb 100644 --- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs @@ -11,7 +11,7 @@ use hir_def::{ }; use hir_expand::{ name::Name, - span::{RealSpanMap, SpanMapRef}, + span_map::{RealSpanMap, SpanMapRef}, }; use hir_ty::db::HirDatabase; use syntax::{ast, AstNode}; diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs index d98e3decd21ed..7204868464b30 100644 --- a/src/tools/rust-analyzer/crates/hir/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir/src/db.rs @@ -24,6 +24,6 @@ pub use hir_def::db::{ pub use hir_expand::db::{ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery, - ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery, + ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacrosQuery, RealSpanMapQuery, }; pub use hir_ty::db::*; diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index 1cb36f9b021fe..bf29a53913d1d 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -8,7 +8,7 @@ pub use hir_ty::diagnostics::{CaseType, IncorrectCase}; use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use either::Either; -use hir_def::path::ModPath; +use hir_def::{path::ModPath, AssocItemId}; use hir_expand::{name::Name, HirFileId, InFile}; use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange}; @@ -62,6 +62,7 @@ diagnostics![ UndeclaredLabel, UnimplementedBuiltinMacro, UnreachableLabel, + UnresolvedAssocItem, UnresolvedExternCrate, UnresolvedField, UnresolvedImport, @@ -215,6 +216,12 @@ pub struct UnresolvedMethodCall { pub receiver: Type, pub name: Name, pub field_with_same_name: Option, + pub assoc_func_with_same_name: Option, +} + +#[derive(Debug)] +pub struct UnresolvedAssocItem { + pub expr_or_pat: InFile>>>, } #[derive(Debug)] diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index e0230fa3761b2..09b56e1382419 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -37,7 +37,7 @@ mod display; use std::{iter, mem::discriminant, ops::ControlFlow}; use arrayvec::ArrayVec; -use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind}; +use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId}; use either::Either; use hir_def::{ body::{BodyDiagnostic, SyntheticSyntax}, @@ -47,7 +47,6 @@ use hir_def::{ item_tree::ItemTreeNode, lang_item::LangItemTarget, layout::{self, ReprOptions, TargetDataLayout}, - macro_id_to_def_id, nameres::{self, diagnostics::DefDiagnostic}, path::ImportAlias, per_ns::PerNs, @@ -59,7 +58,7 @@ use hir_def::{ Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, }; -use hir_expand::{attrs::collect_attrs, name::name, MacroCallKind}; +use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind}; use hir_ty::{ all_super_traits, autoderef, check_orphan_rules, consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt}, @@ -125,8 +124,10 @@ pub use { }, hir_expand::{ attrs::{Attr, AttrId}, + change::Change, hygiene::{marks_rev, SyntaxContextExt}, name::{known, Name}, + proc_macro::ProcMacros, tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId, MacroFileIdExt, }, @@ -146,7 +147,7 @@ use { hir_def::path::Path, hir_expand::{ name::AsName, - span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef}, + span_map::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef}, }, }; @@ -808,7 +809,7 @@ impl Module { } fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec, m: Macro) { - let id = macro_id_to_def_id(db.upcast(), m.id); + let id = db.macro_def(m.id); if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) { if let Some(e) = expander.mac.err() { let Some(ast) = id.ast_id().left() else { @@ -1679,6 +1680,7 @@ impl DefWithBody { receiver, name, field_with_same_name, + assoc_func_with_same_name, } => { let expr = expr_syntax(*expr); @@ -1690,10 +1692,18 @@ impl DefWithBody { field_with_same_name: field_with_same_name .clone() .map(|ty| Type::new(db, DefWithBodyId::from(self), ty)), + assoc_func_with_same_name: assoc_func_with_same_name.clone(), } .into(), ) } + &hir_ty::InferenceDiagnostic::UnresolvedAssocItem { id } => { + let expr_or_pat = match id { + ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left), + ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right), + }; + acc.push(UnresolvedAssocItem { expr_or_pat }.into()) + } &hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, @@ -2784,9 +2794,13 @@ impl AsAssocItem for DefWithBody { } } -fn as_assoc_item(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option +fn as_assoc_item<'db, ID, DEF, CTOR, AST>( + db: &(dyn HirDatabase + 'db), + ctor: CTOR, + id: ID, +) -> Option where - ID: Lookup>, + ID: Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc>, DEF: From, CTOR: FnOnce(DEF) -> AssocItem, AST: ItemTreeNode, @@ -3520,7 +3534,7 @@ impl Impl { let src = self.source(db)?; let macro_file = src.file_id.macro_file()?; - let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let loc = macro_file.macro_call_id.lookup(db.upcast()); let (derive_attr, derive_index) = match loc.kind { MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => { let module_id = self.id.lookup(db.upcast()).container; @@ -4652,6 +4666,9 @@ impl Callable { pub fn return_type(&self) -> Type { self.ty.derived(self.sig.ret().clone()) } + pub fn sig(&self) -> &CallableSig { + &self.sig + } } fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option { diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index a03ff22074577..fdc604a006fc7 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -13,7 +13,6 @@ use either::Either; use hir_def::{ hir::Expr, lower::LowerCtx, - macro_id_to_def_id, nameres::MacroSubNs, resolver::{self, HasResolver, Resolver, TypeNs}, type_ref::Mutability, @@ -40,8 +39,8 @@ use crate::{ source_analyzer::{resolve_hir_path, SourceAnalyzer}, Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, - Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, ToolModule, Trait, Type, - TypeAlias, TypeParam, VariantDef, + Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Struct, ToolModule, Trait, + Type, TypeAlias, TypeParam, VariantDef, }; pub enum DescendPreference { @@ -229,6 +228,14 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { pub fn to_module_defs(&self, file: FileId) -> impl Iterator { self.imp.to_module_def(file) } + + pub fn to_struct_def(&self, s: &ast::Struct) -> Option { + self.imp.to_def(s).map(Struct::from) + } + + pub fn to_impl_def(&self, i: &ast::Impl) -> Option { + self.imp.to_def(i).map(Impl::from) + } } impl<'db> SemanticsImpl<'db> { @@ -341,9 +348,7 @@ impl<'db> SemanticsImpl<'db> { let macro_call = InFile::new(file_id, actual_macro_call); let krate = resolver.krate(); let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { - resolver - .resolve_path_as_macro(self.db.upcast(), &path, Some(MacroSubNs::Bang)) - .map(|(it, _)| macro_id_to_def_id(self.db.upcast(), it)) + resolver.resolve_path_as_macro_def(self.db.upcast(), &path, Some(MacroSubNs::Bang)) })?; hir_expand::db::expand_speculative( self.db.upcast(), @@ -512,8 +517,7 @@ impl<'db> SemanticsImpl<'db> { } /// Descend the token into its macro call if it is part of one, returning the tokens in the - /// expansion that it is associated with. If `offset` points into the token's range, it will - /// be considered for the mapping in case of inline format args. + /// expansion that it is associated with. pub fn descend_into_macros( &self, mode: DescendPreference, @@ -674,7 +678,7 @@ impl<'db> SemanticsImpl<'db> { _ => 0, }; // FIXME: here, the attribute's text range is used to strip away all - // entries from the start of the attribute "list" up the the invoking + // entries from the start of the attribute "list" up the invoking // attribute. But in // ``` // mod foo { @@ -850,7 +854,7 @@ impl<'db> SemanticsImpl<'db> { /// Attempts to map the node out of macro expanded files. /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_ast_node(&self, node: N) -> Option { - self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map( + self.wrap_node_infile(node).original_ast_node_rooted(self.db.upcast()).map( |InRealFile { file_id, value }| { self.cache(find_root(value.syntax()), file_id.into()); value diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index d05118bbc28b4..54b4d81012f3e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -16,7 +16,6 @@ use hir_def::{ hir::{BindingId, ExprId, Pat, PatId}, lang_item::LangItem, lower::LowerCtx, - macro_id_to_def_id, nameres::MacroSubNs, path::{ModPath, Path, PathKind}, resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, @@ -771,9 +770,7 @@ impl SourceAnalyzer { ) -> Option { let krate = self.resolver.krate(); let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { - self.resolver - .resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang)) - .map(|(it, _)| macro_id_to_def_id(db.upcast(), it)) + self.resolver.resolve_path_as_macro_def(db.upcast(), &path, Some(MacroSubNs::Bang)) })?; // why the 64? Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64) @@ -1163,9 +1160,40 @@ fn resolve_hir_path_qualifier( resolver: &Resolver, path: &Path, ) -> Option { - resolver - .resolve_path_in_type_ns_fully(db.upcast(), &path) - .map(|ty| match ty { + (|| { + let (ty, unresolved) = match path.type_anchor() { + Some(type_ref) => { + let (_, res) = + TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner()) + .lower_ty_ext(type_ref); + res.map(|ty_ns| (ty_ns, path.segments().first())) + } + None => { + let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?; + match remaining_idx { + Some(remaining_idx) => { + if remaining_idx + 1 == path.segments().len() { + Some((ty, path.segments().last())) + } else { + None + } + } + None => Some((ty, None)), + } + } + }?; + + // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type + // within the trait's associated types. + if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) { + if let Some(type_alias_id) = + db.trait_data(trait_id).associated_type_by_name(unresolved.name) + { + return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); + } + } + + let res = match ty { TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()), TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => { @@ -1176,11 +1204,28 @@ fn resolve_hir_path_qualifier( TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()), TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), TypeNs::TraitAliasId(it) => PathResolution::Def(TraitAlias::from(it).into()), - }) - .or_else(|| { - resolver - .resolve_module_path_in_items(db.upcast(), path.mod_path()?) - .take_types() - .map(|it| PathResolution::Def(it.into())) - }) + }; + match unresolved { + Some(unresolved) => resolver + .generic_def() + .and_then(|def| { + hir_ty::associated_type_shorthand_candidates( + db, + def, + res.in_type_ns()?, + |name, id| (name == unresolved.name).then_some(id), + ) + }) + .map(TypeAlias::from) + .map(Into::into) + .map(PathResolution::Def), + None => Some(res), + } + })() + .or_else(|| { + resolver + .resolve_module_path_in_items(db.upcast(), path.mod_path()?) + .take_types() + .map(|it| PathResolution::Def(it.into())) + }) } diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs index a2a30edeb0397..4da0dfba6755f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs +++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs @@ -2,13 +2,14 @@ use base_db::FileRange; use hir_def::{ + db::DefDatabase, item_scope::ItemInNs, src::{HasChildSource, HasSource}, AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, }; use hir_expand::{HirFileId, InFile}; -use hir_ty::db::HirDatabase; +use hir_ty::{db::HirDatabase, display::HirDisplay}; use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr}; use crate::{Module, ModuleDef, Semantics}; @@ -230,9 +231,12 @@ impl<'a> SymbolCollector<'a> { fn collect_from_impl(&mut self, impl_id: ImplId) { let impl_data = self.db.impl_data(impl_id); - for &assoc_item_id in &impl_data.items { - self.push_assoc_item(assoc_item_id) - } + let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string())); + self.with_container_name(impl_name, |s| { + for &assoc_item_id in &impl_data.items { + s.push_assoc_item(assoc_item_id) + } + }) } fn collect_from_trait(&mut self, trait_id: TraitId) { @@ -274,9 +278,9 @@ impl<'a> SymbolCollector<'a> { } } - fn push_decl(&mut self, id: L, is_assoc: bool) + fn push_decl<'db, L>(&mut self, id: L, is_assoc: bool) where - L: Lookup + Into, + L: Lookup = dyn DefDatabase + 'db> + Into, ::Data: HasSource, <::Data as HasSource>::Value: HasName, { diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml index a622ec1a9532e..4d4bac5fb9664 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml @@ -31,7 +31,11 @@ expect-test = "1.4.0" # local deps test-utils.workspace = true +test-fixture.workspace = true sourcegen.workspace = true [features] in-rust-tree = [] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs index f508c42c53e4b..1f785b5d0a818 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs @@ -281,11 +281,8 @@ mod tests { use super::*; use hir::Semantics; - use ide_db::{ - assists::AssistResolveStrategy, - base_db::{fixture::WithFixture, FileRange}, - RootDatabase, - }; + use ide_db::{assists::AssistResolveStrategy, base_db::FileRange, RootDatabase}; + use test_fixture::WithFixture; use crate::tests::{ check_assist, check_assist_by_label, check_assist_not_applicable, check_assist_target, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs index 0f2d1057c0a45..b7b00e7ed0688 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -16,11 +16,14 @@ use syntax::{ edit_in_place::{AttrsOwnerEdit, Indent}, make, HasName, }, - ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, + AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, }; use text_edit::TextRange; -use crate::assist_context::{AssistContext, Assists}; +use crate::{ + assist_context::{AssistContext, Assists}, + utils, +}; // Assist: bool_to_enum // @@ -73,7 +76,7 @@ pub(crate) fn bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let usages = definition.usages(&ctx.sema).all(); add_enum_def(edit, ctx, &usages, target_node, &target_module); - replace_usages(edit, ctx, &usages, definition, &target_module); + replace_usages(edit, ctx, usages, definition, &target_module); }, ) } @@ -169,8 +172,8 @@ fn replace_bool_expr(edit: &mut SourceChangeBuilder, expr: ast::Expr) { /// Converts an expression of type `bool` to one of the new enum type. fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr { - let true_expr = make::expr_path(make::path_from_text("Bool::True")).clone_for_update(); - let false_expr = make::expr_path(make::path_from_text("Bool::False")).clone_for_update(); + let true_expr = make::expr_path(make::path_from_text("Bool::True")); + let false_expr = make::expr_path(make::path_from_text("Bool::False")); if let ast::Expr::Literal(literal) = &expr { match literal.kind() { @@ -184,7 +187,6 @@ fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr { make::tail_only_block_expr(true_expr), Some(ast::ElseBranch::Block(make::tail_only_block_expr(false_expr))), ) - .clone_for_update() } } @@ -192,21 +194,19 @@ fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr { fn replace_usages( edit: &mut SourceChangeBuilder, ctx: &AssistContext<'_>, - usages: &UsageSearchResult, + usages: UsageSearchResult, target_definition: Definition, target_module: &hir::Module, ) { - for (file_id, references) in usages.iter() { - edit.edit_file(*file_id); + for (file_id, references) in usages { + edit.edit_file(file_id); - let refs_with_imports = - augment_references_with_imports(edit, ctx, references, target_module); + let refs_with_imports = augment_references_with_imports(ctx, references, target_module); refs_with_imports.into_iter().rev().for_each( - |FileReferenceWithImport { range, old_name, new_name, import_data }| { + |FileReferenceWithImport { range, name, import_data }| { // replace the usages in patterns and expressions - if let Some(ident_pat) = old_name.syntax().ancestors().find_map(ast::IdentPat::cast) - { + if let Some(ident_pat) = name.syntax().ancestors().find_map(ast::IdentPat::cast) { cov_mark::hit!(replaces_record_pat_shorthand); let definition = ctx.sema.to_def(&ident_pat).map(Definition::Local); @@ -214,36 +214,35 @@ fn replace_usages( replace_usages( edit, ctx, - &def.usages(&ctx.sema).all(), + def.usages(&ctx.sema).all(), target_definition, target_module, ) } - } else if let Some(initializer) = find_assignment_usage(&new_name) { + } else if let Some(initializer) = find_assignment_usage(&name) { cov_mark::hit!(replaces_assignment); replace_bool_expr(edit, initializer); - } else if let Some((prefix_expr, inner_expr)) = find_negated_usage(&new_name) { + } else if let Some((prefix_expr, inner_expr)) = find_negated_usage(&name) { cov_mark::hit!(replaces_negation); edit.replace( prefix_expr.syntax().text_range(), format!("{} == Bool::False", inner_expr), ); - } else if let Some((record_field, initializer)) = old_name + } else if let Some((record_field, initializer)) = name .as_name_ref() .and_then(ast::RecordExprField::for_field_name) .and_then(|record_field| ctx.sema.resolve_record_field(&record_field)) .and_then(|(got_field, _, _)| { - find_record_expr_usage(&new_name, got_field, target_definition) + find_record_expr_usage(&name, got_field, target_definition) }) { cov_mark::hit!(replaces_record_expr); - let record_field = edit.make_mut(record_field); let enum_expr = bool_expr_to_enum_expr(initializer); - record_field.replace_expr(enum_expr); - } else if let Some(pat) = find_record_pat_field_usage(&old_name) { + utils::replace_record_field_expr(ctx, edit, record_field, enum_expr); + } else if let Some(pat) = find_record_pat_field_usage(&name) { match pat { ast::Pat::IdentPat(ident_pat) => { cov_mark::hit!(replaces_record_pat); @@ -253,7 +252,7 @@ fn replace_usages( replace_usages( edit, ctx, - &def.usages(&ctx.sema).all(), + def.usages(&ctx.sema).all(), target_definition, target_module, ) @@ -270,40 +269,44 @@ fn replace_usages( } _ => (), } - } else if let Some((ty_annotation, initializer)) = find_assoc_const_usage(&new_name) - { + } else if let Some((ty_annotation, initializer)) = find_assoc_const_usage(&name) { edit.replace(ty_annotation.syntax().text_range(), "Bool"); replace_bool_expr(edit, initializer); - } else if let Some(receiver) = find_method_call_expr_usage(&new_name) { + } else if let Some(receiver) = find_method_call_expr_usage(&name) { edit.replace( receiver.syntax().text_range(), format!("({} == Bool::True)", receiver), ); - } else if new_name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() { + } else if name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() { // for any other usage in an expression, replace it with a check that it is the true variant - if let Some((record_field, expr)) = new_name - .as_name_ref() - .and_then(ast::RecordExprField::for_field_name) - .and_then(|record_field| { - record_field.expr().map(|expr| (record_field, expr)) - }) + if let Some((record_field, expr)) = + name.as_name_ref().and_then(ast::RecordExprField::for_field_name).and_then( + |record_field| record_field.expr().map(|expr| (record_field, expr)), + ) { - record_field.replace_expr( + utils::replace_record_field_expr( + ctx, + edit, + record_field, make::expr_bin_op( expr, ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated: false }), make::expr_path(make::path_from_text("Bool::True")), - ) - .clone_for_update(), + ), ); } else { - edit.replace(range, format!("{} == Bool::True", new_name.text())); + edit.replace(range, format!("{} == Bool::True", name.text())); } } // add imports across modules where needed if let Some((import_scope, path)) = import_data { - insert_use(&import_scope, path, &ctx.config.insert_use); + let scope = match import_scope.clone() { + ImportScope::File(it) => ImportScope::File(edit.make_mut(it)), + ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)), + ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)), + }; + insert_use(&scope, path, &ctx.config.insert_use); } }, ) @@ -312,37 +315,31 @@ fn replace_usages( struct FileReferenceWithImport { range: TextRange, - old_name: ast::NameLike, - new_name: ast::NameLike, + name: ast::NameLike, import_data: Option<(ImportScope, ast::Path)>, } fn augment_references_with_imports( - edit: &mut SourceChangeBuilder, ctx: &AssistContext<'_>, - references: &[FileReference], + references: Vec, target_module: &hir::Module, ) -> Vec { let mut visited_modules = FxHashSet::default(); references - .iter() + .into_iter() .filter_map(|FileReference { range, name, .. }| { let name = name.clone().into_name_like()?; - ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module())) + ctx.sema.scope(name.syntax()).map(|scope| (range, name, scope.module())) }) .map(|(range, name, ref_module)| { - let old_name = name.clone(); - let new_name = edit.make_mut(name.clone()); - // if the referenced module is not the same as the target one and has not been seen before, add an import let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module && !visited_modules.contains(&ref_module) { visited_modules.insert(ref_module); - let import_scope = - ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema); + let import_scope = ImportScope::find_insert_use_container(name.syntax(), &ctx.sema); let path = ref_module .find_use_path_prefixed( ctx.sema.db, @@ -360,7 +357,7 @@ fn augment_references_with_imports( None }; - FileReferenceWithImport { range, old_name, new_name, import_data } + FileReferenceWithImport { range, name, import_data } }) .collect() } @@ -405,13 +402,12 @@ fn find_record_expr_usage( let record_field = ast::RecordExprField::for_field_name(name_ref)?; let initializer = record_field.expr()?; - if let Definition::Field(expected_field) = target_definition { - if got_field != expected_field { - return None; + match target_definition { + Definition::Field(expected_field) if got_field == expected_field => { + Some((record_field, initializer)) } + _ => None, } - - Some((record_field, initializer)) } fn find_record_pat_field_usage(name: &ast::NameLike) -> Option { @@ -466,12 +462,9 @@ fn add_enum_def( let indent = IndentLevel::from_node(&insert_before); enum_def.reindent_to(indent); - ted::insert_all( - ted::Position::before(&edit.make_syntax_mut(insert_before)), - vec![ - enum_def.syntax().clone().into(), - make::tokens::whitespace(&format!("\n\n{indent}")).into(), - ], + edit.insert( + insert_before.text_range().start(), + format!("{}\n\n{indent}", enum_def.syntax().text()), ); } @@ -800,6 +793,78 @@ fn main() { ) } + #[test] + fn local_var_init_struct_usage() { + check_assist( + bool_to_enum, + r#" +struct Foo { + foo: bool, +} + +fn main() { + let $0foo = true; + let s = Foo { foo }; +} +"#, + r#" +struct Foo { + foo: bool, +} + +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn main() { + let foo = Bool::True; + let s = Foo { foo: foo == Bool::True }; +} +"#, + ) + } + + #[test] + fn local_var_init_struct_usage_in_macro() { + check_assist( + bool_to_enum, + r#" +struct Struct { + boolean: bool, +} + +macro_rules! identity { + ($body:expr) => { + $body + } +} + +fn new() -> Struct { + let $0boolean = true; + identity![Struct { boolean }] +} +"#, + r#" +struct Struct { + boolean: bool, +} + +macro_rules! identity { + ($body:expr) => { + $body + } +} + +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn new() -> Struct { + let boolean = Bool::True; + identity![Struct { boolean: boolean == Bool::True }] +} +"#, + ) + } + #[test] fn field_struct_basic() { cov_mark::check!(replaces_record_expr); @@ -1321,6 +1386,46 @@ fn main() { ) } + #[test] + fn field_in_macro() { + check_assist( + bool_to_enum, + r#" +struct Struct { + $0boolean: bool, +} + +fn boolean(x: Struct) { + let Struct { boolean } = x; +} + +macro_rules! identity { ($body:expr) => { $body } } + +fn new() -> Struct { + identity!(Struct { boolean: true }) +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +struct Struct { + boolean: Bool, +} + +fn boolean(x: Struct) { + let Struct { boolean } = x; +} + +macro_rules! identity { ($body:expr) => { $body } } + +fn new() -> Struct { + identity!(Struct { boolean: Bool::True }) +} +"#, + ) + } + #[test] fn field_non_bool() { cov_mark::check!(not_applicable_non_bool_field); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index e7c884dcb706c..874b81d3b637a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -1,12 +1,8 @@ use hir::TypeInfo; -use stdx::format_to; use syntax::{ - ast::{self, AstNode}, - NodeOrToken, - SyntaxKind::{ - BLOCK_EXPR, BREAK_EXPR, CLOSURE_EXPR, COMMENT, LOOP_EXPR, MATCH_ARM, MATCH_GUARD, - PATH_EXPR, RETURN_EXPR, - }, + ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName}, + ted, NodeOrToken, + SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR}, SyntaxNode, }; @@ -66,98 +62,140 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op .as_ref() .map_or(false, |it| matches!(it, ast::Expr::FieldExpr(_) | ast::Expr::MethodCallExpr(_))); - let reference_modifier = match ty.filter(|_| needs_adjust) { - Some(receiver_type) if receiver_type.is_mutable_reference() => "&mut ", - Some(receiver_type) if receiver_type.is_reference() => "&", - _ => "", - }; - - let var_modifier = match parent { - Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => "mut ", - _ => "", - }; - let anchor = Anchor::from(&to_extract)?; - let indent = anchor.syntax().prev_sibling_or_token()?.as_token()?.clone(); let target = to_extract.syntax().text_range(); acc.add( AssistId("extract_variable", AssistKind::RefactorExtract), "Extract into variable", target, move |edit| { - let field_shorthand = - match to_extract.syntax().parent().and_then(ast::RecordExprField::cast) { - Some(field) => field.name_ref(), - None => None, - }; - - let mut buf = String::new(); - - let var_name = match &field_shorthand { - Some(it) => it.to_string(), - None => suggest_name::for_variable(&to_extract, &ctx.sema), + let field_shorthand = to_extract + .syntax() + .parent() + .and_then(ast::RecordExprField::cast) + .filter(|field| field.name_ref().is_some()); + + let (var_name, expr_replace) = match field_shorthand { + Some(field) => (field.to_string(), field.syntax().clone()), + None => ( + suggest_name::for_variable(&to_extract, &ctx.sema), + to_extract.syntax().clone(), + ), }; - let expr_range = match &field_shorthand { - Some(it) => it.syntax().text_range().cover(to_extract.syntax().text_range()), - None => to_extract.syntax().text_range(), + + let ident_pat = match parent { + Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => { + make::ident_pat(false, true, make::name(&var_name)) + } + _ => make::ident_pat(false, false, make::name(&var_name)), }; - match anchor { - Anchor::Before(_) | Anchor::Replace(_) => { - format_to!(buf, "let {var_modifier}{var_name} = {reference_modifier}") + let to_extract = match ty.as_ref().filter(|_| needs_adjust) { + Some(receiver_type) if receiver_type.is_mutable_reference() => { + make::expr_ref(to_extract, true) } - Anchor::WrapInBlock(_) => { - format_to!(buf, "{{ let {var_name} = {reference_modifier}") + Some(receiver_type) if receiver_type.is_reference() => { + make::expr_ref(to_extract, false) } + _ => to_extract, }; - format_to!(buf, "{to_extract}"); - if let Anchor::Replace(stmt) = anchor { - cov_mark::hit!(test_extract_var_expr_stmt); - if stmt.semicolon_token().is_none() { - buf.push(';'); - } - match ctx.config.snippet_cap { - Some(cap) => { - let snip = buf.replace( - &format!("let {var_modifier}{var_name}"), - &format!("let {var_modifier}$0{var_name}"), - ); - edit.replace_snippet(cap, expr_range, snip) + let expr_replace = edit.make_syntax_mut(expr_replace); + let let_stmt = + make::let_stmt(ident_pat.into(), None, Some(to_extract)).clone_for_update(); + let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update(); + + match anchor { + Anchor::Before(place) => { + let prev_ws = place.prev_sibling_or_token().and_then(|it| it.into_token()); + let indent_to = IndentLevel::from_node(&place); + let insert_place = edit.make_syntax_mut(place); + + // Adjust ws to insert depending on if this is all inline or on separate lines + let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with("\n")) { + format!("\n{indent_to}") + } else { + format!(" ") + }; + + ted::insert_all_raw( + ted::Position::before(insert_place), + vec![ + let_stmt.syntax().clone().into(), + make::tokens::whitespace(&trailing_ws).into(), + ], + ); + + ted::replace(expr_replace, name_expr.syntax()); + + if let Some(cap) = ctx.config.snippet_cap { + if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() { + if let Some(name) = ident_pat.name() { + edit.add_tabstop_before(cap, name); + } + } } - None => edit.replace(expr_range, buf), } - return; - } + Anchor::Replace(stmt) => { + cov_mark::hit!(test_extract_var_expr_stmt); - buf.push(';'); - - // We want to maintain the indent level, - // but we do not want to duplicate possible - // extra newlines in the indent block - let text = indent.text(); - if text.starts_with('\n') { - buf.push('\n'); - buf.push_str(text.trim_start_matches('\n')); - } else { - buf.push_str(text); - } + let stmt_replace = edit.make_mut(stmt); + ted::replace(stmt_replace.syntax(), let_stmt.syntax()); - edit.replace(expr_range, var_name.clone()); - let offset = anchor.syntax().text_range().start(); - match ctx.config.snippet_cap { - Some(cap) => { - let snip = buf.replace( - &format!("let {var_modifier}{var_name}"), - &format!("let {var_modifier}$0{var_name}"), - ); - edit.insert_snippet(cap, offset, snip) + if let Some(cap) = ctx.config.snippet_cap { + if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() { + if let Some(name) = ident_pat.name() { + edit.add_tabstop_before(cap, name); + } + } + } } - None => edit.insert(offset, buf), - } + Anchor::WrapInBlock(to_wrap) => { + let indent_to = to_wrap.indent_level(); + + let block = if to_wrap.syntax() == &expr_replace { + // Since `expr_replace` is the same that needs to be wrapped in a block, + // we can just directly replace it with a block + let block = + make::block_expr([let_stmt.into()], Some(name_expr)).clone_for_update(); + ted::replace(expr_replace, block.syntax()); + + block + } else { + // `expr_replace` is a descendant of `to_wrap`, so both steps need to be + // handled seperately, otherwise we wrap the wrong expression + let to_wrap = edit.make_mut(to_wrap); + + // Replace the target expr first so that we don't need to find where + // `expr_replace` is in the wrapped `to_wrap` + ted::replace(expr_replace, name_expr.syntax()); + + // Wrap `to_wrap` in a block + let block = make::block_expr([let_stmt.into()], Some(to_wrap.clone())) + .clone_for_update(); + ted::replace(to_wrap.syntax(), block.syntax()); + + block + }; + + if let Some(cap) = ctx.config.snippet_cap { + // Adding a tabstop to `name` requires finding the let stmt again, since + // the existing `let_stmt` is not actually added to the tree + let pat = block.statements().find_map(|stmt| { + let ast::Stmt::LetStmt(let_stmt) = stmt else { return None }; + let_stmt.pat() + }); + + if let Some(ast::Pat::IdentPat(ident_pat)) = pat { + if let Some(name) = ident_pat.name() { + edit.add_tabstop_before(cap, name); + } + } + } - if let Anchor::WrapInBlock(_) = anchor { - edit.insert(anchor.syntax().text_range().end(), " }"); + // fixup indentation of block + block.indent(indent_to); + } } }, ) @@ -181,7 +219,7 @@ fn valid_target_expr(node: SyntaxNode) -> Option { enum Anchor { Before(SyntaxNode), Replace(ast::ExprStmt), - WrapInBlock(SyntaxNode), + WrapInBlock(ast::Expr), } impl Anchor { @@ -204,16 +242,16 @@ impl Anchor { } if let Some(parent) = node.parent() { - if parent.kind() == CLOSURE_EXPR { + if let Some(parent) = ast::ClosureExpr::cast(parent.clone()) { cov_mark::hit!(test_extract_var_in_closure_no_block); - return Some(Anchor::WrapInBlock(node)); + return parent.body().map(Anchor::WrapInBlock); } - if parent.kind() == MATCH_ARM { + if let Some(parent) = ast::MatchArm::cast(parent) { if node.kind() == MATCH_GUARD { cov_mark::hit!(test_extract_var_in_match_guard); } else { cov_mark::hit!(test_extract_var_in_match_arm_no_block); - return Some(Anchor::WrapInBlock(node)); + return parent.expr().map(Anchor::WrapInBlock); } } } @@ -229,13 +267,6 @@ impl Anchor { None }) } - - fn syntax(&self) -> &SyntaxNode { - match self { - Anchor::Before(it) | Anchor::WrapInBlock(it) => it, - Anchor::Replace(stmt) => stmt.syntax(), - } - } } #[cfg(test)] @@ -502,7 +533,10 @@ fn main() { fn main() { let x = true; let tuple = match x { - true => { let $0var_name = 2 + 2; (var_name, true) } + true => { + let $0var_name = 2 + 2; + (var_name, true) + } _ => (0, false) }; } @@ -579,7 +613,10 @@ fn main() { "#, r#" fn main() { - let lambda = |x: u32| { let $0var_name = x * 2; var_name }; + let lambda = |x: u32| { + let $0var_name = x * 2; + var_name + }; } "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs index f4fa6a74c6b94..0d34502add941 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -2,22 +2,25 @@ use std::ops::Not; use crate::{ assist_context::{AssistContext, Assists}, - utils::convert_param_list_to_arg_list, + utils::{convert_param_list_to_arg_list, suggest_name}, }; use either::Either; use hir::{db::HirDatabase, HasVisibility}; use ide_db::{ assists::{AssistId, GroupLabel}, path_transform::PathTransform, + FxHashMap, FxHashSet, }; +use itertools::Itertools; use syntax::{ ast::{ self, edit::{self, AstNodeEdit}, - make, AssocItem, HasGenericParams, HasName, HasVisibility as astHasVisibility, Path, + make, AssocItem, GenericArgList, GenericParamList, HasGenericParams, HasName, + HasTypeBounds, HasVisibility as astHasVisibility, Path, }, ted::{self, Position}, - AstNode, NodeOrToken, SyntaxKind, + AstNode, NodeOrToken, SmolStr, SyntaxKind, }; // Assist: generate_delegate_trait @@ -77,7 +80,7 @@ use syntax::{ // } // // fn method_(&mut self) -> bool { -// ::method_( &mut self.a ) +// ::method_(&mut self.a) // } // } // ``` @@ -98,6 +101,7 @@ pub(crate) fn generate_delegate_trait(acc: &mut Assists, ctx: &AssistContext<'_> } /// A utility object that represents a struct's field. +#[derive(Debug)] struct Field { name: String, ty: ast::Type, @@ -111,44 +115,33 @@ impl Field { f: Either, ) -> Option { let db = ctx.sema.db; - let name: String; - let range: syntax::TextRange; - let ty: ast::Type; let module = ctx.sema.to_module_def(ctx.file_id())?; - match f { + let (name, range, ty) = match f { Either::Left(f) => { - name = f.name()?.to_string(); - ty = f.ty()?; - range = f.syntax().text_range(); + let name = f.name()?.to_string(); + (name, f.syntax().text_range(), f.ty()?) } Either::Right((f, l)) => { - name = l.fields().position(|it| it == f)?.to_string(); - ty = f.ty()?; - range = f.syntax().text_range(); + let name = l.fields().position(|it| it == f)?.to_string(); + (name, f.syntax().text_range(), f.ty()?) } }; let hir_ty = ctx.sema.resolve_type(&ty)?; let type_impls = hir::Impl::all_for_type(db, hir_ty.clone()); let mut impls = Vec::with_capacity(type_impls.len()); - let type_param = hir_ty.as_type_param(db); - if let Some(tp) = type_param { + if let Some(tp) = hir_ty.as_type_param(db) { for tb in tp.trait_bounds(db) { - impls.push(Delegee::Bound(BoundCase(tb))); + impls.push(Delegee::Bound(tb)); } }; for imp in type_impls { - match imp.trait_(db) { - Some(tr) => { - if tr.is_visible_from(db, module) { - impls.push(Delegee::Impls(ImplCase(tr, imp))) - } - } - None => (), + if let Some(tr) = imp.trait_(db).filter(|tr| tr.is_visible_from(db, module)) { + impls.push(Delegee::Impls(tr, imp)) } } @@ -161,19 +154,17 @@ impl Field { /// actually implements the trait and the second way is when the field /// has a bound type parameter. We handle these cases in different ways /// hence the enum. +#[derive(Debug)] enum Delegee { - Bound(BoundCase), - Impls(ImplCase), + Bound(hir::Trait), + Impls(hir::Trait, hir::Impl), } -struct BoundCase(hir::Trait); -struct ImplCase(hir::Trait, hir::Impl); - impl Delegee { fn signature(&self, db: &dyn HirDatabase) -> String { let mut s = String::new(); - let (Delegee::Bound(BoundCase(it)) | Delegee::Impls(ImplCase(it, _))) = self; + let (Delegee::Bound(it) | Delegee::Impls(it, _)) = self; for m in it.module(db).path_to_root(db).iter().rev() { if let Some(name) = m.name(db) { @@ -200,25 +191,33 @@ impl Struct { pub(crate) fn delegate(&self, field: Field, acc: &mut Assists, ctx: &AssistContext<'_>) { let db = ctx.db(); + for delegee in &field.impls { - // FIXME : We can omit already implemented impl_traits - // But we don't know what the &[hir::Type] argument should look like. + let trait_ = match delegee { + Delegee::Bound(b) => b, + Delegee::Impls(i, _) => i, + }; - // let trait_ = match delegee { - // Delegee::Bound(b) => b.0, - // Delegee::Impls(i) => i.1, - // }; + // Skip trait that has `Self` type, which cannot be delegated + // + // See [`test_self_ty`] + if has_self_type(*trait_, ctx).is_some() { + continue; + } + // FIXME : We can omit already implemented impl_traits + // But we don't know what the &[hir::Type] argument should look like. // if self.hir_ty.impls_trait(db, trait_, &[]) { // continue; // } let signature = delegee.signature(db); + let Some(delegate) = generate_impl(ctx, self, &field.ty, &field.name, delegee) else { continue; }; acc.add_group( - &GroupLabel("Delegate trait impl for field...".to_owned()), + &GroupLabel(format!("Generate delegate impls for field `{}`", field.name)), AssistId("generate_delegate_trait", ide_db::assists::AssistKind::Generate), format!("Generate delegate impl `{}` for `{}`", signature, field.name), field.range, @@ -241,46 +240,40 @@ fn generate_impl( delegee: &Delegee, ) -> Option { let delegate: ast::Impl; - let source: ast::Impl; - let genpar: Option; let db = ctx.db(); - let base_path = make::path_from_text(&field_ty.to_string().as_str()); - let s_path = make::ext::ident_path(&strukt.name.to_string()); + let ast_strukt = &strukt.strukt; + let strukt_ty = make::ty_path(make::ext::ident_path(&strukt.name.to_string())); match delegee { Delegee::Bound(delegee) => { - let in_file = ctx.sema.source(delegee.0.to_owned())?; - let source: ast::Trait = in_file.value; + let bound_def = ctx.sema.source(delegee.to_owned())?.value; + let bound_params = bound_def.generic_param_list(); + let strukt_params = ast_strukt.generic_param_list(); delegate = make::impl_trait( - delegee.0.is_unsafe(db), - None, - None, - strukt.strukt.generic_param_list(), - None, - delegee.0.is_auto(db), - make::ty(&delegee.0.name(db).to_smol_str()), - make::ty_path(s_path), - source.where_clause(), - strukt.strukt.where_clause(), + delegee.is_unsafe(db), + bound_params.clone(), + bound_params.map(|params| params.to_generic_args()), + strukt_params.clone(), + strukt_params.map(|params| params.to_generic_args()), + delegee.is_auto(db), + make::ty(&delegee.name(db).to_smol_str()), + strukt_ty, + bound_def.where_clause(), + ast_strukt.where_clause(), None, ) .clone_for_update(); - genpar = source.generic_param_list(); - let delegate_assoc_items = delegate.get_or_create_assoc_item_list(); - let gen_args: String = - genpar.map_or_else(String::new, |params| params.to_generic_args().to_string()); - // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths let qualified_path_type = make::path_from_text(&format!( - "<{} as {}{}>", - base_path.to_string(), - delegee.0.name(db).to_smol_str(), - gen_args.to_string() + "<{} as {}>", + field_ty.to_string(), + delegate.trait_()?.to_string() )); - match source.assoc_item_list() { + let delegate_assoc_items = delegate.get_or_create_assoc_item_list(); + match bound_def.assoc_item_list() { Some(ai) => { ai.assoc_items() .filter(|item| matches!(item, AssocItem::MacroCall(_)).not()) @@ -295,66 +288,394 @@ fn generate_impl( None => {} }; - let target = ctx.sema.scope(strukt.strukt.syntax())?; - let source = ctx.sema.scope(source.syntax())?; - - let transform = - PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone()); + let target_scope = ctx.sema.scope(strukt.strukt.syntax())?; + let source_scope = ctx.sema.scope(bound_def.syntax())?; + let transform = PathTransform::generic_transformation(&target_scope, &source_scope); transform.apply(&delegate.syntax()); } - Delegee::Impls(delegee) => { - let in_file = ctx.sema.source(delegee.1.to_owned())?; - source = in_file.value; + Delegee::Impls(trait_, old_impl) => { + let old_impl = ctx.sema.source(old_impl.to_owned())?.value; + + // `old_trait_args` contains names of generic args for trait in `old_impl` + let old_trait_args = old_impl + .trait_()? + .generic_arg_list() + .map(|l| l.generic_args().map(|arg| arg.to_string())) + .map_or_else(|| FxHashSet::default(), |it| it.collect()); + + let old_impl_params = old_impl.generic_param_list(); + + // Resolve conflicts with generic parameters in strukt. + // These generics parameters will also be used in `field_ty` and `where_clauses`, + // so we should substitude arguments in them as well. + let (renamed_strukt_params, field_ty, ty_where_clause) = if let Some(strukt_params) = + resolve_conflicts_for_strukt(ast_strukt, old_impl_params.as_ref()) + { + let strukt_args = strukt_params.to_generic_args(); + let field_ty = + subst_name_in_strukt(ctx, ast_strukt, field_ty, strukt_args.clone())?; + let wc = ast_strukt + .where_clause() + .and_then(|wc| Some(subst_name_in_strukt(ctx, ast_strukt, &wc, strukt_args)?)); + (Some(strukt_params), field_ty, wc) + } else { + (None, field_ty.clone_for_update(), None) + }; + + // Some generics used in `field_ty` may be instantiated, so they are no longer + // `generics`. We should remove them from generics params, and use the rest params. + let trait_gen_params = + remove_instantiated_params(&old_impl.self_ty()?, old_impl_params, &old_trait_args); + + // Generate generic args that applied to current impl, this step will also remove unused params + let args_for_impl = + get_args_for_impl(&old_impl, &field_ty, &trait_gen_params, &old_trait_args); + + let mut trait_gen_args = old_impl.trait_()?.generic_arg_list(); + if let Some(arg_list) = &mut trait_gen_args { + *arg_list = arg_list.clone_for_update(); + transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &arg_list.syntax())?; + } + + let mut type_gen_args = + renamed_strukt_params.clone().map(|params| params.to_generic_args()); + if let Some(type_args) = &mut type_gen_args { + *type_args = type_args.clone_for_update(); + transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &type_args.syntax())?; + } + + let path_type = make::ty(&trait_.name(db).to_smol_str()).clone_for_update(); + transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &path_type.syntax())?; + delegate = make::impl_trait( - delegee.0.is_unsafe(db), - source.generic_param_list(), - None, - None, - None, - delegee.0.is_auto(db), - make::ty(&delegee.0.name(db).to_smol_str()), - make::ty_path(s_path), - source.where_clause(), - strukt.strukt.where_clause(), + trait_.is_unsafe(db), + trait_gen_params, + trait_gen_args, + renamed_strukt_params, + type_gen_args, + trait_.is_auto(db), + path_type, + strukt_ty, + old_impl.where_clause().map(|wc| wc.clone_for_update()), + ty_where_clause, None, ) .clone_for_update(); - genpar = source.generic_param_list(); - let delegate_assoc_items = delegate.get_or_create_assoc_item_list(); - let gen_args: String = - genpar.map_or_else(String::new, |params| params.to_generic_args().to_string()); // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths let qualified_path_type = make::path_from_text(&format!( - "<{} as {}{}>", - base_path.to_string().as_str(), - delegee.0.name(db).to_smol_str(), - gen_args.to_string().as_str() + "<{} as {}>", + field_ty.to_string(), + delegate.trait_()?.to_string() )); - source + let delegate_assoc_items = delegate.get_or_create_assoc_item_list(); + for item in old_impl .get_or_create_assoc_item_list() .assoc_items() .filter(|item| matches!(item, AssocItem::MacroCall(_)).not()) - .for_each(|item| { - let assoc = process_assoc_item(item, qualified_path_type.clone(), &field_name); - if let Some(assoc) = assoc { - delegate_assoc_items.add_item(assoc); - } - }); - - let target = ctx.sema.scope(strukt.strukt.syntax())?; - let source = ctx.sema.scope(source.syntax())?; + { + let assoc = process_assoc_item( + transform_assoc_item(ctx, ast_strukt, &old_impl, &args_for_impl, item)?, + qualified_path_type.clone(), + &field_name, + )?; + + delegate_assoc_items.add_item(assoc); + } - let transform = - PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone()); - transform.apply(&delegate.syntax()); + // Remove unused where clauses + if let Some(wc) = delegate.where_clause() { + remove_useless_where_clauses(&delegate, wc)?; + } } } Some(delegate) } +fn transform_assoc_item( + ctx: &AssistContext<'_>, + strukt: &ast::Struct, + old_impl: &ast::Impl, + args: &Option, + item: AssocItem, +) -> Option { + let source_scope = ctx.sema.scope(&item.syntax()).unwrap(); + let target_scope = ctx.sema.scope(&strukt.syntax())?; + let hir_old_impl = ctx.sema.to_impl_def(old_impl)?; + let item = item.clone_for_update(); + let transform = args.as_ref().map_or_else( + || PathTransform::generic_transformation(&target_scope, &source_scope), + |args| { + PathTransform::impl_transformation( + &target_scope, + &source_scope, + hir_old_impl, + args.clone(), + ) + }, + ); + transform.apply(&item.syntax()); + Some(item) +} + +fn transform_impl( + ctx: &AssistContext<'_>, + strukt: &ast::Struct, + old_impl: &ast::Impl, + args: &Option, + syntax: &syntax::SyntaxNode, +) -> Option<()> { + let source_scope = ctx.sema.scope(&old_impl.self_ty()?.syntax())?; + let target_scope = ctx.sema.scope(&strukt.syntax())?; + let hir_old_impl = ctx.sema.to_impl_def(old_impl)?; + + let transform = args.as_ref().map_or_else( + || PathTransform::generic_transformation(&target_scope, &source_scope), + |args| { + PathTransform::impl_transformation( + &target_scope, + &source_scope, + hir_old_impl, + args.clone(), + ) + }, + ); + + transform.apply(&syntax); + Some(()) +} + +fn remove_instantiated_params( + self_ty: &ast::Type, + old_impl_params: Option, + old_trait_args: &FxHashSet, +) -> Option { + match self_ty { + ast::Type::PathType(path_type) => { + old_impl_params.and_then(|gpl| { + // Remove generic parameters in field_ty (which is instantiated). + let new_gpl = gpl.clone_for_update(); + + path_type + .path()? + .segments() + .filter_map(|seg| seg.generic_arg_list()) + .flat_map(|it| it.generic_args()) + // However, if the param is also used in the trait arguments, it shouldn't be removed. + .filter(|arg| !old_trait_args.contains(&arg.to_string())) + .for_each(|arg| { + new_gpl.remove_generic_arg(&arg); + }); + (new_gpl.generic_params().count() > 0).then_some(new_gpl) + }) + } + _ => old_impl_params, + } +} + +fn remove_useless_where_clauses(delegate: &ast::Impl, wc: ast::WhereClause) -> Option<()> { + let trait_args = + delegate.trait_()?.generic_arg_list().map(|trait_args| trait_args.generic_args()); + let strukt_args = + delegate.self_ty()?.generic_arg_list().map(|strukt_args| strukt_args.generic_args()); + let used_generic_names = match (trait_args, strukt_args) { + (None, None) => None, + (None, Some(y)) => Some(y.map(|arg| arg.to_string()).collect::>()), + (Some(x), None) => Some(x.map(|arg| arg.to_string()).collect::>()), + (Some(x), Some(y)) => Some(x.chain(y).map(|arg| arg.to_string()).collect::>()), + }; + + // Keep clauses that have generic clauses after substitution, and remove the rest + if let Some(used_generic_names) = used_generic_names { + wc.predicates() + .filter(|pred| { + pred.syntax() + .descendants_with_tokens() + .filter_map(|e| e.into_token()) + .find(|e| { + e.kind() == SyntaxKind::IDENT && used_generic_names.contains(&e.to_string()) + }) + .is_none() + }) + .for_each(|pred| { + wc.remove_predicate(pred); + }); + } else { + wc.predicates().for_each(|pred| wc.remove_predicate(pred)); + } + + if wc.predicates().count() == 0 { + // Remove useless whitespaces + wc.syntax() + .siblings_with_tokens(syntax::Direction::Prev) + .skip(1) + .take_while(|node_or_tok| node_or_tok.kind() == SyntaxKind::WHITESPACE) + .for_each(|ws| ted::remove(ws)); + wc.syntax() + .siblings_with_tokens(syntax::Direction::Next) + .skip(1) + .take_while(|node_or_tok| node_or_tok.kind() == SyntaxKind::WHITESPACE) + .for_each(|ws| ted::remove(ws)); + ted::insert( + ted::Position::after(wc.syntax()), + NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)), + ); + // Remove where clause + ted::remove(wc.syntax()); + } + + Some(()) +} + +fn get_args_for_impl( + old_impl: &ast::Impl, + field_ty: &ast::Type, + trait_params: &Option, + old_trait_args: &FxHashSet, +) -> Option { + // Generate generic args that should be apply to current impl + // + // For exmaple, if we have `impl Trait for B`, and `b: B` in `S`, + // then the generic `A` should be renamed to `T`. While the last two generic args + // doesn't change, it renames . So we apply `` as generic arguments + // to impl. + let old_impl_params = old_impl.generic_param_list(); + let self_ty = old_impl.self_ty(); + + if let (Some(old_impl_gpl), Some(self_ty)) = (old_impl_params, self_ty) { + // Make pair of the arguments of `field_ty` and `old_strukt_args` to + // get the list for substitution + let mut arg_substs = FxHashMap::default(); + + match field_ty { + field_ty @ ast::Type::PathType(_) => { + let field_args = field_ty.generic_arg_list(); + if let (Some(field_args), Some(old_impl_args)) = + (field_args, self_ty.generic_arg_list()) + { + field_args.generic_args().zip(old_impl_args.generic_args()).for_each( + |(field_arg, impl_arg)| { + arg_substs.entry(impl_arg.to_string()).or_insert(field_arg); + }, + ) + } + } + _ => {} + } + + let args = old_impl_gpl + .to_generic_args() + .generic_args() + .map(|old_arg| { + arg_substs.get(&old_arg.to_string()).map_or_else( + || old_arg.clone(), + |replace_with| { + // The old_arg will be replaced, so it becomes redundant + let old_arg_name = old_arg.to_string(); + if old_trait_args.contains(&old_arg_name) { + // However, we should check type bounds and where clauses on old_arg, + // if it has type bound, we should keep the type bound. + // match trait_params.and_then(|params| params.remove_generic_arg(&old_arg)) { + // Some(ast::GenericParam::TypeParam(ty)) => { + // ty.type_bound_list().and_then(|bounds| ) + // } + // _ => {} + // } + if let Some(params) = trait_params { + params.remove_generic_arg(&old_arg); + } + } + replace_with.clone() + }, + ) + }) + .collect_vec(); + args.is_empty().not().then(|| make::generic_arg_list(args.into_iter())) + } else { + None + } +} + +fn subst_name_in_strukt( + ctx: &AssistContext<'_>, + strukt: &ast::Struct, + item: &N, + args: GenericArgList, +) -> Option +where + N: ast::AstNode, +{ + let hir_strukt = ctx.sema.to_struct_def(strukt)?; + let hir_adt = hir::Adt::from(hir_strukt); + + let item = item.clone_for_update(); + let item_scope = ctx.sema.scope(item.syntax())?; + let transform = PathTransform::adt_transformation(&item_scope, &item_scope, hir_adt, args); + transform.apply(&item.syntax()); + Some(item) +} + +fn has_self_type(trait_: hir::Trait, ctx: &AssistContext<'_>) -> Option<()> { + let trait_source = ctx.sema.source(trait_)?.value; + trait_source + .syntax() + .descendants_with_tokens() + .filter_map(|e| e.into_token()) + .find(|e| e.kind() == SyntaxKind::SELF_TYPE_KW) + .map(|_| ()) +} + +fn resolve_conflicts_for_strukt( + strukt: &ast::Struct, + old_impl_params: Option<&ast::GenericParamList>, +) -> Option { + match (strukt.generic_param_list(), old_impl_params) { + (Some(old_strukt_params), Some(old_impl_params)) => { + let params = make::generic_param_list(std::iter::empty()).clone_for_update(); + + for old_strukt_param in old_strukt_params.generic_params() { + // Get old name from `strukt`` + let mut name = SmolStr::from(match &old_strukt_param { + ast::GenericParam::ConstParam(c) => c.name()?.to_string(), + ast::GenericParam::LifetimeParam(l) => { + l.lifetime()?.lifetime_ident_token()?.to_string() + } + ast::GenericParam::TypeParam(t) => t.name()?.to_string(), + }); + + // The new name cannot be conflicted with generics in trait, and the renamed names. + name = suggest_name::for_unique_generic_name(&name, old_impl_params); + name = suggest_name::for_unique_generic_name(&name, ¶ms); + match old_strukt_param { + ast::GenericParam::ConstParam(c) => { + if let Some(const_ty) = c.ty() { + let const_param = make::const_param(make::name(&name), const_ty); + params.add_generic_param(ast::GenericParam::ConstParam( + const_param.clone_for_update(), + )); + } + } + p @ ast::GenericParam::LifetimeParam(_) => { + params.add_generic_param(p.clone_for_update()); + } + ast::GenericParam::TypeParam(t) => { + let type_bounds = t.type_bound_list(); + let type_param = make::type_param(make::name(&name), type_bounds); + params.add_generic_param(ast::GenericParam::TypeParam( + type_param.clone_for_update(), + )); + } + } + } + Some(params) + } + (Some(old_strukt_gpl), None) => Some(old_strukt_gpl), + _ => None, + } +} + fn process_assoc_item( item: syntax::ast::AssocItem, qual_path_ty: ast::Path, @@ -381,10 +702,14 @@ fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> Option // >::ConstName; // FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it. // make::path_qualified(qual_path_ty, path_expr_segment.as_single_segment().unwrap()); - let qualpath = qualpath(qual_path_ty, path_expr_segment); - let inner = - make::item_const(item.visibility(), item.name()?, item.ty()?, make::expr_path(qualpath)) - .clone_for_update(); + let qualified_path = qualified_path(qual_path_ty, path_expr_segment); + let inner = make::item_const( + item.visibility(), + item.name()?, + item.ty()?, + make::expr_path(qualified_path), + ) + .clone_for_update(); Some(AssocItem::Const(inner)) } @@ -395,7 +720,7 @@ fn func_assoc_item( base_name: &str, ) -> Option { let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str()); - let qualpath = qualpath(qual_path_ty, path_expr_segment); + let qualified_path = qualified_path(qual_path_ty, path_expr_segment); let call = match item.param_list() { // Methods and funcs should be handled separately. @@ -413,31 +738,33 @@ fn func_assoc_item( let param_count = l.params().count(); let args = convert_param_list_to_arg_list(l).clone_for_update(); - + let pos_after_l_paren = Position::after(args.l_paren_token()?); if param_count > 0 { // Add SelfParam and a TOKEN::COMMA - ted::insert_all( - Position::after(args.l_paren_token()?), + ted::insert_all_raw( + pos_after_l_paren, vec![ NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()), - NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)), NodeOrToken::Token(make::token(SyntaxKind::COMMA)), + NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)), ], ); } else { // Add SelfParam only - ted::insert( - Position::after(args.l_paren_token()?), + ted::insert_raw( + pos_after_l_paren, NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()), ); } - make::expr_call(make::expr_path(qualpath), args) + make::expr_call(make::expr_path(qualified_path), args) + } + None => { + make::expr_call(make::expr_path(qualified_path), convert_param_list_to_arg_list(l)) } - None => make::expr_call(make::expr_path(qualpath), convert_param_list_to_arg_list(l)), }, None => make::expr_call( - make::expr_path(qualpath), + make::expr_path(qualified_path), convert_param_list_to_arg_list(make::param_list(None, Vec::new())), ), } @@ -463,8 +790,8 @@ fn func_assoc_item( fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option { let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str()); - let qualpath = qualpath(qual_path_ty, path_expr_segment); - let ty = make::ty_path(qualpath); + let qualified_path = qualified_path(qual_path_ty, path_expr_segment); + let ty = make::ty_path(qualified_path); let ident = item.name()?.to_string(); let alias = make::ty_alias( @@ -479,7 +806,7 @@ fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option ast::Path { +fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path { make::path_from_text(&format!("{}::{}", qual_path_ty.to_string(), path_expr_seg.to_string())) } @@ -510,6 +837,29 @@ impl Trait for Base {} ); } + #[test] + fn test_self_ty() { + // trait whith `Self` type cannot be delegated + // + // See the function `fn f() -> Self`. + // It should be `fn f() -> Base` in `Base`, and `fn f() -> S` in `S` + check_assist_not_applicable( + generate_delegate_trait, + r#" +struct Base(()); +struct S(B$0ase); +trait Trait { + fn f() -> Self; +} +impl Trait for Base { + fn f() -> Base { + Base(()) + } +} +"#, + ); + } + #[test] fn test_struct_struct_basic() { check_assist( @@ -628,7 +978,7 @@ unsafe impl Trait for S { } unsafe fn a_method(&self) { - ::a_method( &self.base ) + ::a_method(&self.base) } } @@ -672,6 +1022,245 @@ where ); } + #[test] + fn test_fields_with_generics() { + check_assist( + generate_delegate_trait, + r#" +struct B { + a: T +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl Trait for B { + fn f(&self, a: T1) -> T1 { a } +} + +struct A {} +struct S { + b :$0 B, +} +"#, + r#" +struct B { + a: T +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl Trait for B { + fn f(&self, a: T1) -> T1 { a } +} + +struct A {} +struct S { + b : B, +} + +impl Trait for S { + fn f(&self, a: T1) -> T1 { + as Trait>::f(&self.b, a) + } +} +"#, + ); + } + + #[test] + fn test_generics_with_conflict_names() { + check_assist( + generate_delegate_trait, + r#" +struct B { + a: T +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl Trait for B { + fn f(&self, a: T) -> T { a } +} + +struct S { + b : $0B, +} +"#, + r#" +struct B { + a: T +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl Trait for B { + fn f(&self, a: T) -> T { a } +} + +struct S { + b : B, +} + +impl Trait for S { + fn f(&self, a: T) -> T { + as Trait>::f(&self.b, a) + } +} +"#, + ); + } + + #[test] + fn test_lifetime_with_conflict_names() { + check_assist( + generate_delegate_trait, + r#" +struct B<'a, T> { + a: &'a T +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl<'a, T, T0> Trait for B<'a, T0> { + fn f(&self, a: T) -> T { a } +} + +struct S<'a, T> { + b : $0B<'a, T>, +} +"#, + r#" +struct B<'a, T> { + a: &'a T +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl<'a, T, T0> Trait for B<'a, T0> { + fn f(&self, a: T) -> T { a } +} + +struct S<'a, T> { + b : B<'a, T>, +} + +impl<'a, T, T1> Trait for S<'a, T1> { + fn f(&self, a: T) -> T { + as Trait>::f(&self.b, a) + } +} +"#, + ); + } + + #[test] + fn test_multiple_generics() { + check_assist( + generate_delegate_trait, + r#" +struct B { + a: T1, + b: T2 +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl Trait for B { + fn f(&self, a: T) -> T { a } +} + +struct S { + b :$0 B, +} +"#, + r#" +struct B { + a: T1, + b: T2 +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl Trait for B { + fn f(&self, a: T) -> T { a } +} + +struct S { + b : B, +} + +impl Trait for S { + fn f(&self, a: i32) -> i32 { + as Trait>::f(&self.b, a) + } +} +"#, + ); + } + + #[test] + fn test_generics_multiplex() { + check_assist( + generate_delegate_trait, + r#" +struct B { + a: T +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl Trait for B { + fn f(&self, a: T) -> T { a } +} + +struct S { + b : $0B, +} +"#, + r#" +struct B { + a: T +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl Trait for B { + fn f(&self, a: T) -> T { a } +} + +struct S { + b : B, +} + +impl Trait for S { + fn f(&self, a: T0) -> T0 { + as Trait>::f(&self.b, a) + } +} +"#, + ); + } + #[test] fn test_complex_without_where() { check_assist( @@ -719,7 +1308,7 @@ impl<'a, T, const C: usize> Trait<'a, T, C> for S { } fn assoc_method(&self, p: ()) { - >::assoc_method( &self.field , p) + >::assoc_method(&self.field, p) } } @@ -789,7 +1378,7 @@ where } fn assoc_method(&self, p: ()) { - >::assoc_method( &self.field , p) + >::assoc_method(&self.field, p) } } @@ -875,7 +1464,7 @@ where } fn assoc_method(&self, p: ()) { - >::assoc_method( &self.field , p) + >::assoc_method(&self.field, p) } } @@ -923,6 +1512,132 @@ where ); } + #[test] + fn test_type_bound_with_generics_1() { + check_assist( + generate_delegate_trait, + r#" +trait AnotherTrait {} +struct B +where + T1: AnotherTrait +{ + a: T, + b: T1 +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl Trait for B { + fn f(&self, a: T) -> T { a } +} + +struct S +where + T1: AnotherTrait +{ + b : $0B, +}"#, + r#" +trait AnotherTrait {} +struct B +where + T1: AnotherTrait +{ + a: T, + b: T1 +} + +trait Trait { + fn f(&self, a: T) -> T; +} + +impl Trait for B { + fn f(&self, a: T) -> T { a } +} + +struct S +where + T1: AnotherTrait +{ + b : B, +} + +impl Trait for S +where + T10: AnotherTrait +{ + fn f(&self, a: T) -> T { + as Trait>::f(&self.b, a) + } +}"#, + ); + } + + #[test] + fn test_type_bound_with_generics_2() { + check_assist( + generate_delegate_trait, + r#" +trait AnotherTrait {} +struct B +where + T1: AnotherTrait +{ + b: T1 +} + +trait Trait { + fn f(&self, a: T1) -> T1; +} + +impl Trait for B { + fn f(&self, a: T) -> T { a } +} + +struct S +where + T: AnotherTrait +{ + b : $0B, +}"#, + r#" +trait AnotherTrait {} +struct B +where + T1: AnotherTrait +{ + b: T1 +} + +trait Trait { + fn f(&self, a: T1) -> T1; +} + +impl Trait for B { + fn f(&self, a: T) -> T { a } +} + +struct S +where + T: AnotherTrait +{ + b : B, +} + +impl Trait for S +where + T0: AnotherTrait +{ + fn f(&self, a: T) -> T { + as Trait>::f(&self.b, a) + } +}"#, + ); + } + #[test] fn test_docstring_example() { check_assist( @@ -975,7 +1690,7 @@ impl SomeTrait for B { } fn method_(&mut self) -> bool { - ::method_( &mut self.a ) + ::method_(&mut self.a) } } "#, @@ -1043,7 +1758,7 @@ impl some_module::SomeTrait for B { } fn method_(&mut self) -> bool { - ::method_( &mut self.a ) + ::method_(&mut self.a) } }"#, ) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs index 1a1e992e28a48..2aaf9d0679d34 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -114,7 +114,7 @@ fn add_variant_to_accumulator( parent: PathParent, ) -> Option<()> { let db = ctx.db(); - let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?; + let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node_rooted(db)?; acc.add( AssistId("generate_enum_variant", AssistKind::Generate), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index a113c817f7e94..5bb200e84a494 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -8,20 +8,21 @@ use ide_db::{ famous_defs::FamousDefs, helpers::is_editable_crate, path_transform::PathTransform, + source_change::SourceChangeBuilder, FxHashMap, FxHashSet, RootDatabase, SnippetCap, }; +use itertools::Itertools; use stdx::to_lower_snake_case; use syntax::{ ast::{ - self, - edit::{AstNodeEdit, IndentLevel}, - make, AstNode, CallExpr, HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds, + self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, CallExpr, HasArgList, + HasGenericParams, HasModuleItem, HasTypeBounds, }, - SyntaxKind, SyntaxNode, TextRange, TextSize, + ted, SyntaxKind, SyntaxNode, TextRange, T, }; use crate::{ - utils::{convert_reference_type, find_struct_impl, render_snippet, Cursor}, + utils::{convert_reference_type, find_struct_impl}, AssistContext, AssistId, AssistKind, Assists, }; @@ -65,7 +66,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { } let fn_name = &*name_ref.text(); - let TargetInfo { target_module, adt_name, target, file, insert_offset } = + let TargetInfo { target_module, adt_name, target, file } = fn_target_info(ctx, path, &call, fn_name)?; if let Some(m) = target_module { @@ -77,16 +78,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?; let text_range = call.syntax().text_range(); let label = format!("Generate {} function", function_builder.fn_name); - add_func_to_accumulator( - acc, - ctx, - text_range, - function_builder, - insert_offset, - file, - adt_name, - label, - ) + add_func_to_accumulator(acc, ctx, text_range, function_builder, file, adt_name, label) } struct TargetInfo { @@ -94,7 +86,6 @@ struct TargetInfo { adt_name: Option, target: GeneratedFunctionTarget, file: FileId, - insert_offset: TextSize, } impl TargetInfo { @@ -103,9 +94,8 @@ impl TargetInfo { adt_name: Option, target: GeneratedFunctionTarget, file: FileId, - insert_offset: TextSize, ) -> Self { - Self { target_module, adt_name, target, file, insert_offset } + Self { target_module, adt_name, target, file } } } @@ -156,7 +146,7 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { } let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?; - let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?; + let target = get_method_target(ctx, &impl_, &adt)?; let function_builder = FunctionBuilder::from_method_call( ctx, @@ -169,16 +159,7 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let text_range = call.syntax().text_range(); let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; let label = format!("Generate {} method", function_builder.fn_name); - add_func_to_accumulator( - acc, - ctx, - text_range, - function_builder, - insert_offset, - file, - adt_name, - label, - ) + add_func_to_accumulator(acc, ctx, text_range, function_builder, file, adt_name, label) } fn add_func_to_accumulator( @@ -186,23 +167,28 @@ fn add_func_to_accumulator( ctx: &AssistContext<'_>, text_range: TextRange, function_builder: FunctionBuilder, - insert_offset: TextSize, file: FileId, adt_name: Option, label: String, ) -> Option<()> { - acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |builder| { - let indent = IndentLevel::from_node(function_builder.target.syntax()); - let function_template = function_builder.render(adt_name.is_some()); - let mut func = function_template.to_string(ctx.config.snippet_cap); + acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |edit| { + edit.edit_file(file); + + let target = function_builder.target.clone(); + let function_template = function_builder.render(); + let func = function_template.to_ast(ctx.config.snippet_cap, edit); + if let Some(name) = adt_name { + let name = make::ty_path(make::ext::ident_path(&format!("{}", name.display(ctx.db())))); + // FIXME: adt may have generic params. - func = format!("\n{indent}impl {} {{\n{func}\n{indent}}}", name.display(ctx.db())); - } - builder.edit_file(file); - match ctx.config.snippet_cap { - Some(cap) => builder.insert_snippet(cap, insert_offset, func), - None => builder.insert(insert_offset, func), + let impl_ = make::impl_(None, None, name, None, None).clone_for_update(); + + func.indent(IndentLevel(1)); + impl_.get_or_create_assoc_item_list().add_item(func.into()); + target.insert_impl_at(edit, impl_); + } else { + target.insert_fn_at(edit, func); } }) } @@ -220,36 +206,33 @@ fn get_adt_source( } struct FunctionTemplate { - leading_ws: String, fn_def: ast::Fn, ret_type: Option, should_focus_return_type: bool, - trailing_ws: String, tail_expr: ast::Expr, } impl FunctionTemplate { - fn to_string(&self, cap: Option) -> String { - let Self { leading_ws, fn_def, ret_type, should_focus_return_type, trailing_ws, tail_expr } = - self; - - let f = match cap { - Some(cap) => { - let cursor = if *should_focus_return_type { - // Focus the return type if there is one - match ret_type { - Some(ret_type) => ret_type.syntax(), - None => tail_expr.syntax(), + fn to_ast(&self, cap: Option, edit: &mut SourceChangeBuilder) -> ast::Fn { + let Self { fn_def, ret_type, should_focus_return_type, tail_expr } = self; + + if let Some(cap) = cap { + if *should_focus_return_type { + // Focus the return type if there is one + match ret_type { + Some(ret_type) => { + edit.add_placeholder_snippet(cap, ret_type.clone()); } - } else { - tail_expr.syntax() - }; - render_snippet(cap, fn_def.syntax(), Cursor::Replace(cursor)) + None => { + edit.add_placeholder_snippet(cap, tail_expr.clone()); + } + } + } else { + edit.add_placeholder_snippet(cap, tail_expr.clone()); } - None => fn_def.to_string(), - }; + } - format!("{leading_ws}{f}{trailing_ws}") + fn_def.clone() } } @@ -356,7 +339,7 @@ impl FunctionBuilder { }) } - fn render(self, is_method: bool) -> FunctionTemplate { + fn render(self) -> FunctionTemplate { let placeholder_expr = make::ext::expr_todo(); let fn_body = make::block_expr(vec![], Some(placeholder_expr)); let visibility = match self.visibility { @@ -364,7 +347,7 @@ impl FunctionBuilder { Visibility::Crate => Some(make::visibility_pub_crate()), Visibility::Pub => Some(make::visibility_pub()), }; - let mut fn_def = make::fn_( + let fn_def = make::fn_( visibility, self.fn_name, self.generic_param_list, @@ -375,34 +358,10 @@ impl FunctionBuilder { self.is_async, false, // FIXME : const and unsafe are not handled yet. false, - ); - let leading_ws; - let trailing_ws; - - match self.target { - GeneratedFunctionTarget::BehindItem(it) => { - let mut indent = IndentLevel::from_node(&it); - if is_method { - indent = indent + 1; - leading_ws = format!("{indent}"); - } else { - leading_ws = format!("\n\n{indent}"); - } - - fn_def = fn_def.indent(indent); - trailing_ws = String::new(); - } - GeneratedFunctionTarget::InEmptyItemList(it) => { - let indent = IndentLevel::from_node(&it); - let leading_indent = indent + 1; - leading_ws = format!("\n{leading_indent}"); - fn_def = fn_def.indent(leading_indent); - trailing_ws = format!("\n{indent}"); - } - }; + ) + .clone_for_update(); FunctionTemplate { - leading_ws, ret_type: fn_def.ret_type(), // PANIC: we guarantee we always create a function body with a tail expr tail_expr: fn_def @@ -412,7 +371,6 @@ impl FunctionBuilder { .expect("function body should have a tail expression"), should_focus_return_type: self.should_focus_return_type, fn_def, - trailing_ws, } } } @@ -456,40 +414,37 @@ fn get_fn_target_info( target_module: Option, call: CallExpr, ) -> Option { - let (target, file, insert_offset) = get_fn_target(ctx, target_module, call)?; - Some(TargetInfo::new(target_module, None, target, file, insert_offset)) + let (target, file) = get_fn_target(ctx, target_module, call)?; + Some(TargetInfo::new(target_module, None, target, file)) } fn get_fn_target( ctx: &AssistContext<'_>, target_module: Option, call: CallExpr, -) -> Option<(GeneratedFunctionTarget, FileId, TextSize)> { +) -> Option<(GeneratedFunctionTarget, FileId)> { let mut file = ctx.file_id(); let target = match target_module { Some(target_module) => { - let module_source = target_module.definition_source(ctx.db()); - let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?; + let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module); file = in_file; target } None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?, }; - Some((target.clone(), file, get_insert_offset(&target))) + Some((target.clone(), file)) } fn get_method_target( ctx: &AssistContext<'_>, impl_: &Option, adt: &Adt, -) -> Option<(GeneratedFunctionTarget, TextSize)> { +) -> Option { let target = match impl_ { - Some(impl_) => next_space_for_fn_in_impl(impl_)?, - None => { - GeneratedFunctionTarget::BehindItem(adt.source(ctx.sema.db)?.syntax().value.clone()) - } + Some(impl_) => GeneratedFunctionTarget::InImpl(impl_.clone()), + None => GeneratedFunctionTarget::AfterItem(adt.source(ctx.sema.db)?.syntax().value.clone()), }; - Some((target.clone(), get_insert_offset(&target))) + Some(target) } fn assoc_fn_target_info( @@ -505,36 +460,120 @@ fn assoc_fn_target_info( return None; } let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?; - let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?; + let target = get_method_target(ctx, &impl_, &adt)?; let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; - Some(TargetInfo::new(target_module, adt_name, target, file, insert_offset)) -} - -fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize { - match target { - GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(), - GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'), - } + Some(TargetInfo::new(target_module, adt_name, target, file)) } #[derive(Clone)] enum GeneratedFunctionTarget { - BehindItem(SyntaxNode), + AfterItem(SyntaxNode), InEmptyItemList(SyntaxNode), + InImpl(ast::Impl), } impl GeneratedFunctionTarget { fn syntax(&self) -> &SyntaxNode { match self { - GeneratedFunctionTarget::BehindItem(it) => it, + GeneratedFunctionTarget::AfterItem(it) => it, GeneratedFunctionTarget::InEmptyItemList(it) => it, + GeneratedFunctionTarget::InImpl(it) => it.syntax(), } } fn parent(&self) -> SyntaxNode { match self { - GeneratedFunctionTarget::BehindItem(it) => it.parent().expect("item without parent"), + GeneratedFunctionTarget::AfterItem(it) => it.parent().expect("item without parent"), GeneratedFunctionTarget::InEmptyItemList(it) => it.clone(), + GeneratedFunctionTarget::InImpl(it) => it.syntax().clone(), + } + } + + fn insert_impl_at(&self, edit: &mut SourceChangeBuilder, impl_: ast::Impl) { + match self { + GeneratedFunctionTarget::AfterItem(item) => { + let item = edit.make_syntax_mut(item.clone()); + let position = if item.parent().is_some() { + ted::Position::after(&item) + } else { + ted::Position::first_child_of(&item) + }; + + let indent = IndentLevel::from_node(&item); + let leading_ws = make::tokens::whitespace(&format!("\n{indent}")); + impl_.indent(indent); + + ted::insert_all(position, vec![leading_ws.into(), impl_.syntax().clone().into()]); + } + GeneratedFunctionTarget::InEmptyItemList(item_list) => { + let item_list = edit.make_syntax_mut(item_list.clone()); + let insert_after = + item_list.children_with_tokens().find_or_first(|child| child.kind() == T!['{']); + let position = match insert_after { + Some(child) => ted::Position::after(child), + None => ted::Position::first_child_of(&item_list), + }; + + let indent = IndentLevel::from_node(&item_list); + let leading_indent = indent + 1; + let leading_ws = make::tokens::whitespace(&format!("\n{leading_indent}")); + impl_.indent(indent); + + ted::insert_all(position, vec![leading_ws.into(), impl_.syntax().clone().into()]); + } + GeneratedFunctionTarget::InImpl(_) => { + unreachable!("can't insert an impl inside an impl") + } + } + } + + fn insert_fn_at(&self, edit: &mut SourceChangeBuilder, func: ast::Fn) { + match self { + GeneratedFunctionTarget::AfterItem(item) => { + let item = edit.make_syntax_mut(item.clone()); + let position = if item.parent().is_some() { + ted::Position::after(&item) + } else { + ted::Position::first_child_of(&item) + }; + + let indent = IndentLevel::from_node(&item); + let leading_ws = make::tokens::whitespace(&format!("\n\n{indent}")); + func.indent(indent); + + ted::insert_all_raw( + position, + vec![leading_ws.into(), func.syntax().clone().into()], + ); + } + GeneratedFunctionTarget::InEmptyItemList(item_list) => { + let item_list = edit.make_syntax_mut(item_list.clone()); + let insert_after = + item_list.children_with_tokens().find_or_first(|child| child.kind() == T!['{']); + let position = match insert_after { + Some(child) => ted::Position::after(child), + None => ted::Position::first_child_of(&item_list), + }; + + let indent = IndentLevel::from_node(&item_list); + let leading_indent = indent + 1; + let leading_ws = make::tokens::whitespace(&format!("\n{leading_indent}")); + let trailing_ws = make::tokens::whitespace(&format!("\n{indent}")); + func.indent(leading_indent); + + ted::insert_all( + position, + vec![leading_ws.into(), func.syntax().clone().into(), trailing_ws.into()], + ); + } + GeneratedFunctionTarget::InImpl(impl_) => { + let impl_ = edit.make_mut(impl_.clone()); + + let leading_indent = impl_.indent_level() + 1; + func.indent(leading_indent); + + impl_.get_or_create_assoc_item_list().add_item(func.into()); + } } } } @@ -1026,43 +1065,40 @@ fn next_space_for_fn_after_call_site(expr: ast::CallableExpr) -> Option, -) -> Option<(FileId, GeneratedFunctionTarget)> { - let file = module_source.file_id.original_file(db); + db: &dyn hir::db::HirDatabase, + target_module: hir::Module, +) -> (FileId, GeneratedFunctionTarget) { + let module_source = target_module.definition_source(db); + let file = module_source.file_id.original_file(db.upcast()); let assist_item = match &module_source.value { hir::ModuleSource::SourceFile(it) => match it.items().last() { - Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()), - None => GeneratedFunctionTarget::BehindItem(it.syntax().clone()), + Some(last_item) => GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()), + None => GeneratedFunctionTarget::AfterItem(it.syntax().clone()), }, hir::ModuleSource::Module(it) => match it.item_list().and_then(|it| it.items().last()) { - Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()), - None => GeneratedFunctionTarget::InEmptyItemList(it.item_list()?.syntax().clone()), + Some(last_item) => GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()), + None => { + let item_list = + it.item_list().expect("module definition source should have an item list"); + GeneratedFunctionTarget::InEmptyItemList(item_list.syntax().clone()) + } }, hir::ModuleSource::BlockExpr(it) => { if let Some(last_item) = it.statements().take_while(|stmt| matches!(stmt, ast::Stmt::Item(_))).last() { - GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()) + GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()) } else { GeneratedFunctionTarget::InEmptyItemList(it.syntax().clone()) } } }; - Some((file, assist_item)) -} -fn next_space_for_fn_in_impl(impl_: &ast::Impl) -> Option { - let assoc_item_list = impl_.assoc_item_list()?; - if let Some(last_item) = assoc_item_list.assoc_items().last() { - Some(GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())) - } else { - Some(GeneratedFunctionTarget::InEmptyItemList(assoc_item_list.syntax().clone())) - } + (file, assist_item) } #[derive(Clone, Copy)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs index 5b9cc5f66cde1..2eb7089b7c38e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs @@ -315,17 +315,6 @@ fn inline( } else { fn_body.clone_for_update() }; - if let Some(imp) = body.syntax().ancestors().find_map(ast::Impl::cast) { - if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) { - if let Some(t) = imp.self_ty() { - body.syntax() - .descendants_with_tokens() - .filter_map(NodeOrToken::into_token) - .filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW) - .for_each(|tok| ted::replace(tok, t.syntax())); - } - } - } let usages_for_locals = |local| { Definition::Local(local) .usages(sema) @@ -381,6 +370,27 @@ fn inline( } } + // We should place the following code after last usage of `usages_for_locals` + // because `ted::replace` will change the offset in syntax tree, which makes + // `FileReference` incorrect + if let Some(imp) = + sema.ancestors_with_macros(fn_body.syntax().clone()).find_map(ast::Impl::cast) + { + if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) { + if let Some(t) = imp.self_ty() { + while let Some(self_tok) = body + .syntax() + .descendants_with_tokens() + .filter_map(NodeOrToken::into_token) + .find(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW) + { + let replace_with = t.clone_subtree().syntax().clone_for_update(); + ted::replace(self_tok, replace_with); + } + } + } + } + let mut func_let_vars: BTreeSet = BTreeSet::new(); // grab all of the local variable declarations in the function @@ -1510,4 +1520,106 @@ fn main() { "#, ); } + + #[test] + fn inline_call_with_multiple_self_types_eq() { + check_assist( + inline_call, + r#" +#[derive(PartialEq, Eq)] +enum Enum { + A, + B, +} + +impl Enum { + fn a_or_b_eq(&self) -> bool { + self == &Self::A || self == &Self::B + } +} + +fn a() -> bool { + Enum::A.$0a_or_b_eq() +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Enum { + A, + B, +} + +impl Enum { + fn a_or_b_eq(&self) -> bool { + self == &Self::A || self == &Self::B + } +} + +fn a() -> bool { + { + let ref this = Enum::A; + this == &Enum::A || this == &Enum::B + } +} +"#, + ) + } + + #[test] + fn inline_call_with_self_type_in_macros() { + check_assist( + inline_call, + r#" +trait Trait { + fn f(a: T1) -> Self; +} + +macro_rules! impl_from { + ($t: ty) => { + impl Trait<$t> for $t { + fn f(a: $t) -> Self { + a as Self + } + } + }; +} + +struct A {} + +impl_from!(A); + +fn main() { + let a: A = A{}; + let b = >::$0f(a); +} +"#, + r#" +trait Trait { + fn f(a: T1) -> Self; +} + +macro_rules! impl_from { + ($t: ty) => { + impl Trait<$t> for $t { + fn f(a: $t) -> Self { + a as Self + } + } + }; +} + +struct A {} + +impl_from!(A); + +fn main() { + let a: A = A{}; + let b = { + let a = a; + a as A + }; +} +"#, + ) + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs index b0d35c02d67ba..b1daa7802ed89 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_generic.rs @@ -18,7 +18,7 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists}; // ``` pub(crate) fn introduce_named_generic(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let impl_trait_type = ctx.find_node_at_offset::()?; - let param = impl_trait_type.syntax().parent().and_then(ast::Param::cast)?; + let param = impl_trait_type.syntax().ancestors().find_map(|node| ast::Param::cast(node))?; let fn_ = param.syntax().ancestors().find_map(ast::Fn::cast)?; let type_bound_list = impl_trait_type.type_bound_list()?; @@ -31,15 +31,16 @@ pub(crate) fn introduce_named_generic(acc: &mut Assists, ctx: &AssistContext<'_> |edit| { let impl_trait_type = edit.make_mut(impl_trait_type); let fn_ = edit.make_mut(fn_); - - let type_param_name = suggest_name::for_generic_parameter(&impl_trait_type); + let fn_generic_param_list = fn_.get_or_create_generic_param_list(); + let type_param_name = + suggest_name::for_impl_trait_as_generic(&impl_trait_type, &fn_generic_param_list); let type_param = make::type_param(make::name(&type_param_name), Some(type_bound_list)) .clone_for_update(); let new_ty = make::ty(&type_param_name).clone_for_update(); ted::replace(impl_trait_type.syntax(), new_ty.syntax()); - fn_.get_or_create_generic_param_list().add_generic_param(type_param.into()); + fn_generic_param_list.add_generic_param(type_param.into()); if let Some(cap) = ctx.config.snippet_cap { if let Some(generic_param) = @@ -111,12 +112,19 @@ fn foo<$0B: Bar #[test] fn replace_impl_trait_with_exist_generic_letter() { - // FIXME: This is wrong, we should pick a different name if the one we - // want is already bound. check_assist( introduce_named_generic, r#"fn foo(bar: $0impl Bar) {}"#, - r#"fn foo(bar: B) {}"#, + r#"fn foo(bar: B0) {}"#, + ); + } + + #[test] + fn replace_impl_trait_with_more_exist_generic_letter() { + check_assist( + introduce_named_generic, + r#"fn foo(bar: $0impl Bar) {}"#, + r#"fn foo(bar: B2) {}"#, ); } @@ -149,4 +157,22 @@ fn foo< r#"fn foo<$0F: Foo + Bar>(bar: F) {}"#, ); } + + #[test] + fn replace_impl_with_mut() { + check_assist( + introduce_named_generic, + r#"fn f(iter: &mut $0impl Iterator) {}"#, + r#"fn f<$0I: Iterator>(iter: &mut I) {}"#, + ); + } + + #[test] + fn replace_impl_inside() { + check_assist( + introduce_named_generic, + r#"fn f(x: &mut Vec<$0impl Iterator>) {}"#, + r#"fn f<$0I: Iterator>(x: &mut Vec) {}"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs index 6ed9bd85fcc9c..67fea772c795a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs @@ -11,7 +11,10 @@ use syntax::{ ted, AstNode, WalkEvent, }; -use crate::assist_context::{AssistContext, Assists}; +use crate::{ + assist_context::{AssistContext, Assists}, + utils, +}; // Assist: promote_local_to_const // @@ -79,15 +82,13 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>) let name_ref = make::name_ref(&name); for usage in usages { - let Some(usage) = usage.name.as_name_ref().cloned() else { continue }; - if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage) { - let record_field = edit.make_mut(record_field); - let name_expr = - make::expr_path(make::path_from_text(&name)).clone_for_update(); - record_field.replace_expr(name_expr); + let Some(usage_name) = usage.name.as_name_ref().cloned() else { continue }; + if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage_name) { + let name_expr = make::expr_path(make::path_from_text(&name)); + utils::replace_record_field_expr(ctx, edit, record_field, name_expr); } else { - let usage = edit.make_mut(usage); - ted::replace(usage.syntax(), name_ref.clone_for_update().syntax()); + let usage_range = usage.range; + edit.replace(usage_range, name_ref.syntax().text()); } } } @@ -212,6 +213,76 @@ fn main() { ) } + #[test] + fn usage_in_macro() { + check_assist( + promote_local_to_const, + r" +macro_rules! identity { + ($body:expr) => { + $body + } +} + +fn baz() -> usize { + let $0foo = 2; + identity![foo] +} +", + r" +macro_rules! identity { + ($body:expr) => { + $body + } +} + +fn baz() -> usize { + const $0FOO: usize = 2; + identity![FOO] +} +", + ) + } + + #[test] + fn usage_shorthand_in_macro() { + check_assist( + promote_local_to_const, + r" +struct Foo { + foo: usize, +} + +macro_rules! identity { + ($body:expr) => { + $body + }; +} + +fn baz() -> Foo { + let $0foo = 2; + identity![Foo { foo }] +} +", + r" +struct Foo { + foo: usize, +} + +macro_rules! identity { + ($body:expr) => { + $body + }; +} + +fn baz() -> Foo { + const $0FOO: usize = 2; + identity![Foo { foo: FOO }] +} +", + ) + } + #[test] fn not_applicable_non_const_meth_call() { cov_mark::check!(promote_local_non_const); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs index ee44064e7c5e7..859ed1476c457 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs @@ -423,7 +423,7 @@ mod z { struct X(); struct Y(); mod z { - use super::{X}; + use super::X; fn w() { let x = X(); @@ -495,7 +495,7 @@ struct X(); mod y { struct Y(); mod z { - use crate::{X}; + use crate::X; fn f() { let x = X(); } @@ -526,7 +526,7 @@ struct X(); mod y { struct Y(); mod z { - use crate::{y::Y}; + use crate::y::Y; fn f() { let y = Y(); } @@ -536,6 +536,184 @@ mod y { ); } + #[test] + fn remove_unused_auto_remove_brace_nested() { + check_assist( + remove_unused_imports, + r#" +mod a { + pub struct A(); +} +mod b { + struct F(); + mod c { + $0use {{super::{{ + {d::{{{{{{{S, U}}}}}}}}, + {{{{e::{H, L, {{{R}}}}}}}}, + F, super::a::A + }}}};$0 + fn f() { + let f = F(); + let l = L(); + let a = A(); + let s = S(); + let h = H(); + } + } + + mod d { + pub struct S(); + pub struct U(); + } + + mod e { + pub struct H(); + pub struct L(); + pub struct R(); + } +} +"#, + r#" +mod a { + pub struct A(); +} +mod b { + struct F(); + mod c { + use super::{ + d::S, + e::{H, L}, + F, super::a::A + }; + fn f() { + let f = F(); + let l = L(); + let a = A(); + let s = S(); + let h = H(); + } + } + + mod d { + pub struct S(); + pub struct U(); + } + + mod e { + pub struct H(); + pub struct L(); + pub struct R(); + } +} +"#, + ); + } + + #[test] + fn remove_comma_after_auto_remove_brace() { + check_assist( + remove_unused_imports, + r#" +mod m { + pub mod x { + pub struct A; + pub struct B; + } + pub mod y { + pub struct C; + } +} + +$0use m::{ + x::{A, B}, + y::C, +};$0 + +fn main() { + B; +} +"#, + r#" +mod m { + pub mod x { + pub struct A; + pub struct B; + } + pub mod y { + pub struct C; + } +} + +use m:: + x::B +; + +fn main() { + B; +} +"#, + ); + check_assist( + remove_unused_imports, + r#" +mod m { + pub mod x { + pub struct A; + pub struct B; + } + pub mod y { + pub struct C; + pub struct D; + } + pub mod z { + pub struct E; + pub struct F; + } +} + +$0use m::{ + x::{A, B}, + y::{C, D,}, + z::{E, F}, +};$0 + +fn main() { + B; + C; + F; +} +"#, + r#" +mod m { + pub mod x { + pub struct A; + pub struct B; + } + pub mod y { + pub struct C; + pub struct D; + } + pub mod z { + pub struct E; + pub struct F; + } +} + +use m::{ + x::B, + y::C, + z::F, +}; + +fn main() { + B; + C; + F; +} +"#, + ); + } + #[test] fn remove_nested_all_unused() { check_assist( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs index b1daaea1ed1b2..09759019baa7a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs @@ -1,4 +1,7 @@ -use syntax::ast::{self, AstNode}; +use syntax::{ + ast::{self, make, AstNode}, + ted, +}; use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists}; @@ -42,19 +45,34 @@ pub(crate) fn replace_is_method_with_if_let_method( suggest_name::for_variable(&receiver, &ctx.sema) }; - let target = call_expr.syntax().text_range(); - let (assist_id, message, text) = if name_ref.text() == "is_some" { ("replace_is_some_with_if_let_some", "Replace `is_some` with `if let Some`", "Some") } else { ("replace_is_ok_with_if_let_ok", "Replace `is_ok` with `if let Ok`", "Ok") }; - acc.add(AssistId(assist_id, AssistKind::RefactorRewrite), message, target, |edit| { - let var_name = format!("${{0:{}}}", var_name); - let replacement = format!("let {}({}) = {}", text, var_name, receiver); - edit.replace(target, replacement); - }) + acc.add( + AssistId(assist_id, AssistKind::RefactorRewrite), + message, + call_expr.syntax().text_range(), + |edit| { + let call_expr = edit.make_mut(call_expr); + + let var_pat = make::ident_pat(false, false, make::name(&var_name)); + let pat = make::tuple_struct_pat(make::ext::ident_path(text), [var_pat.into()]); + let let_expr = make::expr_let(pat.into(), receiver).clone_for_update(); + + if let Some(cap) = ctx.config.snippet_cap { + if let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat() { + if let Some(first_var) = pat.fields().next() { + edit.add_placeholder_snippet(cap, first_var); + } + } + } + + ted::replace(call_expr.syntax(), let_expr.syntax()); + }, + ) } _ => return None, } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs index 25b3d6d9da91c..95b9eb52948f8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs @@ -5,13 +5,14 @@ mod sourcegen; use expect_test::expect; use hir::Semantics; use ide_db::{ - base_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt}, + base_db::{FileId, FileRange, SourceDatabaseExt}, imports::insert_use::{ImportGranularity, InsertUseConfig}, source_change::FileSystemEdit, RootDatabase, SnippetCap, }; use stdx::{format_to, trim_indent}; use syntax::TextRange; +use test_fixture::WithFixture; use test_utils::{assert_eq_text, extract_offset}; use crate::{ @@ -504,16 +505,33 @@ pub fn test_some_range(a: int) -> bool { TextEdit { indels: [ Indel { - insert: "let $0var_name = 5;\n ", - delete: 45..45, + insert: "let", + delete: 45..47, }, Indel { insert: "var_name", - delete: 59..60, + delete: 48..60, + }, + Indel { + insert: "=", + delete: 61..81, + }, + Indel { + insert: "5;\n if let 2..6 = var_name {\n true\n } else {\n false\n }", + delete: 82..108, }, ], }, - None, + Some( + SnippetEdit( + [ + ( + 0, + 49..49, + ), + ], + ), + ), ), }, file_system_edits: [], @@ -566,16 +584,33 @@ pub fn test_some_range(a: int) -> bool { TextEdit { indels: [ Indel { - insert: "let $0var_name = 5;\n ", - delete: 45..45, + insert: "let", + delete: 45..47, }, Indel { insert: "var_name", - delete: 59..60, + delete: 48..60, + }, + Indel { + insert: "=", + delete: 61..81, + }, + Indel { + insert: "5;\n if let 2..6 = var_name {\n true\n } else {\n false\n }", + delete: 82..108, }, ], }, - None, + Some( + SnippetEdit( + [ + ( + 0, + 49..49, + ), + ], + ), + ), ), }, file_system_edits: [], diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index da5822bba9c88..0c2331796f9ea 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -1153,7 +1153,7 @@ impl SomeTrait for B { } fn method_(&mut self) -> bool { - ::method_( &mut self.a ) + ::method_(&mut self.a) } } "#####, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index f51e99a914e2b..927a8e3c19a18 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -813,3 +813,21 @@ fn test_required_hashes() { assert_eq!(3, required_hashes("#ab\"##c")); assert_eq!(5, required_hashes("#ab\"##\"####c")); } + +/// Replaces the record expression, handling field shorthands including inside macros. +pub(crate) fn replace_record_field_expr( + ctx: &AssistContext<'_>, + edit: &mut SourceChangeBuilder, + record_field: ast::RecordExprField, + initializer: ast::Expr, +) { + if let Some(ast::Expr::PathExpr(path_expr)) = record_field.expr() { + // replace field shorthand + let file_range = ctx.sema.original_range(path_expr.syntax()); + edit.insert(file_range.range.end(), format!(": {}", initializer.syntax().text())) + } else if let Some(expr) = record_field.expr() { + // just replace expr + let file_range = ctx.sema.original_range(expr.syntax()); + edit.replace(file_range.range, initializer.syntax().text()); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs index 16704d598ef4a..b4c6cbff2a4fa 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs @@ -1,5 +1,7 @@ //! This module contains functions to suggest names for expressions, functions and other items +use std::collections::HashSet; + use hir::Semantics; use ide_db::RootDatabase; use itertools::Itertools; @@ -58,12 +60,59 @@ const USELESS_METHODS: &[&str] = &[ "into_future", ]; -pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr { +/// Suggest a unique name for generic parameter. +/// +/// `existing_params` is used to check if the name conflicts with existing +/// generic parameters. +/// +/// The function checks if the name conflicts with existing generic parameters. +/// If so, it will try to resolve the conflict by adding a number suffix, e.g. +/// `T`, `T0`, `T1`, ... +pub(crate) fn for_unique_generic_name( + name: &str, + existing_params: &ast::GenericParamList, +) -> SmolStr { + let param_names = existing_params + .generic_params() + .map(|param| match param { + ast::GenericParam::TypeParam(t) => t.name().unwrap().to_string(), + p => p.to_string(), + }) + .collect::>(); + let mut name = name.to_string(); + let base_len = name.len(); + let mut count = 0; + while param_names.contains(&name) { + name.truncate(base_len); + name.push_str(&count.to_string()); + count += 1; + } + + name.into() +} + +/// Suggest name of impl trait type +/// +/// `existing_params` is used to check if the name conflicts with existing +/// generic parameters. +/// +/// # Current implementation +/// +/// In current implementation, the function tries to get the name from the first +/// character of the name for the first type bound. +/// +/// If the name conflicts with existing generic parameters, it will try to +/// resolve the conflict with `for_unique_generic_name`. +pub(crate) fn for_impl_trait_as_generic( + ty: &ast::ImplTraitType, + existing_params: &ast::GenericParamList, +) -> SmolStr { let c = ty .type_bound_list() .and_then(|bounds| bounds.syntax().text().char_at(0.into())) .unwrap_or('T'); - c.encode_utf8(&mut [0; 4]).into() + + for_unique_generic_name(c.encode_utf8(&mut [0; 4]), existing_params) } /// Suggest name of variable for given expression @@ -275,7 +324,8 @@ fn from_field_name(expr: &ast::Expr) -> Option { #[cfg(test)] mod tests { - use ide_db::base_db::{fixture::WithFixture, FileRange}; + use ide_db::base_db::FileRange; + use test_fixture::WithFixture; use super::*; diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml index 60f90a41b962f..7fbcf3d19e0f1 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml @@ -35,3 +35,7 @@ expect-test = "1.4.0" # local deps test-utils.workspace = true +test-fixture.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs index 466f0b1fb7f9b..9155caa2e0b85 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs @@ -26,6 +26,7 @@ mod cfg; mod derive; mod lint; mod repr; +mod macro_use; pub(crate) use self::derive::complete_derive_path; @@ -35,6 +36,7 @@ pub(crate) fn complete_known_attribute_input( ctx: &CompletionContext<'_>, &colon_prefix: &bool, fake_attribute_under_caret: &ast::Attr, + extern_crate: Option<&ast::ExternCrate>, ) -> Option<()> { let attribute = fake_attribute_under_caret; let name_ref = match attribute.path() { @@ -66,6 +68,9 @@ pub(crate) fn complete_known_attribute_input( lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints); } "cfg" => cfg::complete_cfg(acc, ctx), + "macro_use" => { + macro_use::complete_macro_use(acc, ctx, extern_crate, &parse_tt_as_comma_sep_paths(tt)?) + } _ => (), } Some(()) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs new file mode 100644 index 0000000000000..f45f9cba258de --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs @@ -0,0 +1,35 @@ +//! Completion for macros in `#[macro_use(...)]` +use hir::ModuleDef; +use ide_db::SymbolKind; +use syntax::ast; + +use crate::{context::CompletionContext, item::CompletionItem, Completions}; + +pub(super) fn complete_macro_use( + acc: &mut Completions, + ctx: &CompletionContext<'_>, + extern_crate: Option<&ast::ExternCrate>, + existing_imports: &[ast::Path], +) { + let Some(extern_crate) = extern_crate else { return }; + let Some(extern_crate) = ctx.sema.to_def(extern_crate) else { return }; + let Some(krate) = extern_crate.resolved_crate(ctx.db) else { return }; + + for mod_def in krate.root_module().declarations(ctx.db) { + if let ModuleDef::Macro(mac) = mod_def { + let mac_name = mac.name(ctx.db); + let Some(mac_name) = mac_name.as_str() else { continue }; + + let existing_import = existing_imports + .iter() + .filter_map(|p| p.as_single_name_ref()) + .find(|n| n.text() == mac_name); + if existing_import.is_some() { + continue; + } + + let item = CompletionItem::new(SymbolKind::Macro, ctx.source_range(), mac_name); + item.add_to(acc, ctx.db); + } + } +} diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs index 613a35dcb1088..53a1c8405c2c1 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs @@ -27,6 +27,8 @@ pub(crate) fn complete_dot( } let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. }); + let is_method_acces_with_parens = + matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); complete_fields( acc, @@ -35,6 +37,7 @@ pub(crate) fn complete_dot( |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty), |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty), is_field_access, + is_method_acces_with_parens, ); complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None)); @@ -83,6 +86,7 @@ pub(crate) fn complete_undotted_self( }, |acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty), true, + false, ); complete_methods(ctx, &ty, |func| { acc.add_method( @@ -106,12 +110,14 @@ fn complete_fields( mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type), mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type), is_field_access: bool, + is_method_acess_with_parens: bool, ) { let mut seen_names = FxHashSet::default(); for receiver in receiver.autoderef(ctx.db) { for (field, ty) in receiver.fields(ctx.db) { if seen_names.insert(field.name(ctx.db)) - && (is_field_access || ty.is_fn() || ty.is_closure()) + && (is_field_access + || (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure()))) { named_field(acc, field, ty); } @@ -120,7 +126,8 @@ fn complete_fields( // Tuples are always the last type in a deref chain, so just check if the name is // already seen without inserting into the hashset. if !seen_names.contains(&hir::Name::new_tuple_field(i)) - && (is_field_access || ty.is_fn() || ty.is_closure()) + && (is_field_access + || (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure()))) { // Tuple fields are always public (tuple struct fields are handled above). tuple_index(acc, i, ty); @@ -1236,4 +1243,24 @@ fn foo() { "#, ) } + + #[test] + fn test_fn_field_dot_access_method_has_parens_false() { + check( + r#" +struct Foo { baz: fn() } +impl Foo { + fn bar(self, t: T): T { t } +} + +fn baz() { + let foo = Foo{ baz: || {} }; + foo.ba$0::<>; +} +"#, + expect![[r#" + me bar(…) fn(self, T) + "#]], + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 0da7ba6d0001a..108b040de6bad 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -371,6 +371,7 @@ pub(super) enum CompletionAnalysis { UnexpandedAttrTT { colon_prefix: bool, fake_attribute_under_caret: Option, + extern_crate: Option, }, } @@ -693,7 +694,7 @@ impl<'a> CompletionContext<'a> { let krate = scope.krate(); let module = scope.module(); - let toolchain = db.crate_graph()[krate.into()].channel; + let toolchain = db.crate_graph()[krate.into()].channel(); // `toolchain == None` means we're in some detached files. Since we have no information on // the toolchain being used, let's just allow unstable items to be listed. let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index 1e6b2f319aad7..7da6648365740 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -1,7 +1,7 @@ //! Module responsible for analyzing the code surrounding the cursor for completion. use std::iter; -use hir::{HasSource, Semantics, Type, TypeInfo, Variant}; +use hir::{Semantics, Type, TypeInfo, Variant}; use ide_db::{active_parameter::ActiveParameter, RootDatabase}; use syntax::{ algo::{find_node_at_offset, non_trivia_sibling}, @@ -254,11 +254,13 @@ fn analyze( { let colon_prefix = previous_non_trivia_token(self_token.clone()) .map_or(false, |it| T![:] == it.kind()); + CompletionAnalysis::UnexpandedAttrTT { fake_attribute_under_caret: fake_ident_token .parent_ancestors() .find_map(ast::Attr::cast), colon_prefix, + extern_crate: p.ancestors().find_map(ast::ExternCrate::cast), } } else { return None; @@ -359,7 +361,12 @@ fn expected_type_and_name( let ty = it.pat() .and_then(|pat| sema.type_of_pat(&pat)) .or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it))) - .map(TypeInfo::original); + .map(TypeInfo::original) + .filter(|ty| { + // don't infer the let type if the expr is a function, + // preventing parenthesis from vanishing + it.ty().is_some() || !ty.is_fn() + }); let name = match it.pat() { Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name), Some(_) | None => None, @@ -413,20 +420,16 @@ fn expected_type_and_name( })().unwrap_or((None, None)) }, ast::RecordExprField(it) => { + let field_ty = sema.resolve_record_field(&it).map(|(_, _, ty)| ty); + let field_name = it.field_name().map(NameOrNameRef::NameRef); if let Some(expr) = it.expr() { cov_mark::hit!(expected_type_struct_field_with_leading_char); - ( - sema.type_of_expr(&expr).map(TypeInfo::original), - it.field_name().map(NameOrNameRef::NameRef), - ) + let ty = field_ty + .or_else(|| sema.type_of_expr(&expr).map(TypeInfo::original)); + (ty, field_name) } else { cov_mark::hit!(expected_type_struct_field_followed_by_comma); - let ty = sema.resolve_record_field(&it) - .map(|(_, _, ty)| ty); - ( - ty, - it.field_name().map(NameOrNameRef::NameRef), - ) + (field_ty, field_name) } }, // match foo { $0 } @@ -740,13 +743,13 @@ fn classify_name_ref( match sema.resolve_path(&segment.parent_path().top_path())? { hir::PathResolution::Def(def) => match def { hir::ModuleDef::Function(func) => { - func.source(sema.db)?.value.generic_param_list() + sema.source(func)?.value.generic_param_list() } hir::ModuleDef::Adt(adt) => { - adt.source(sema.db)?.value.generic_param_list() + sema.source(adt)?.value.generic_param_list() } hir::ModuleDef::Variant(variant) => { - variant.parent_enum(sema.db).source(sema.db)?.value.generic_param_list() + sema.source(variant.parent_enum(sema.db))?.value.generic_param_list() } hir::ModuleDef::Trait(trait_) => { if let ast::GenericArg::AssocTypeArg(arg) = &arg { @@ -772,14 +775,14 @@ fn classify_name_ref( return None; } else { in_trait = Some(trait_); - trait_.source(sema.db)?.value.generic_param_list() + sema.source(trait_)?.value.generic_param_list() } } hir::ModuleDef::TraitAlias(trait_) => { - trait_.source(sema.db)?.value.generic_param_list() + sema.source(trait_)?.value.generic_param_list() } hir::ModuleDef::TypeAlias(ty_) => { - ty_.source(sema.db)?.value.generic_param_list() + sema.source(ty_)?.value.generic_param_list() } _ => None, }, @@ -788,7 +791,7 @@ fn classify_name_ref( }, ast::MethodCallExpr(call) => { let func = sema.resolve_method_call(&call)?; - func.source(sema.db)?.value.generic_param_list() + sema.source(func)?.value.generic_param_list() }, ast::AssocTypeArg(arg) => { let trait_ = ast::PathSegment::cast(arg.syntax().parent()?.parent()?)?; @@ -805,7 +808,7 @@ fn classify_name_ref( }, _ => None, })?; - assoc_ty.source(sema.db)?.value.generic_param_list() + sema.source(*assoc_ty)?.value.generic_param_list() } _ => None, }, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index 37a2828e8dc8f..ff324e7a56d63 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -211,12 +211,14 @@ pub fn completions( CompletionAnalysis::UnexpandedAttrTT { colon_prefix, fake_attribute_under_caret: Some(attr), + extern_crate, } => { completions::attribute::complete_known_attribute_input( acc, ctx, colon_prefix, attr, + extern_crate.as_ref(), ); } CompletionAnalysis::UnexpandedAttrTT { .. } | CompletionAnalysis::String { .. } => (), diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index 2ea3f74d18bce..581d557e831ad 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -837,11 +837,11 @@ fn main() { } "#, expect![[r#" - fn main [] - fn test [] + fn main() [] + fn test(…) [] md dep [] fn function (use dep::test_mod_a::function) [requires_import] - fn function (use dep::test_mod_b::function) [requires_import] + fn function(…) (use dep::test_mod_b::function) [requires_import] "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs index d23ed71fdcc65..b306bede653be 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs @@ -305,12 +305,15 @@ fn params( return None; } - // Don't add parentheses if the expected type is some function reference. - if let Some(ty) = &ctx.expected_type { - // FIXME: check signature matches? - if ty.is_fn() { - cov_mark::hit!(no_call_parens_if_fn_ptr_needed); - return None; + // Don't add parentheses if the expected type is a function reference with the same signature. + if let Some(expected) = ctx.expected_type.as_ref().filter(|e| e.is_fn()) { + if let Some(expected) = expected.as_callable(ctx.db) { + if let Some(completed) = func.ty(ctx.db).as_callable(ctx.db) { + if expected.sig() == completed.sig() { + cov_mark::hit!(no_call_parens_if_fn_ptr_needed); + return None; + } + } } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index f28afacc586ff..f13754e2ded05 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -26,12 +26,13 @@ mod visibility; use expect_test::Expect; use hir::PrefixKind; use ide_db::{ - base_db::{fixture::ChangeFixture, FileLoader, FilePosition}, + base_db::{FileLoader, FilePosition}, imports::insert_use::{ImportGranularity, InsertUseConfig}, RootDatabase, SnippetCap, }; use itertools::Itertools; use stdx::{format_to, trim_indent}; +use test_fixture::ChangeFixture; use test_utils::assert_eq_text; use crate::{ diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs index d8c134c533b34..351abe9850b98 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs @@ -1067,3 +1067,82 @@ mod repr { ); } } + +mod macro_use { + use super::*; + + #[test] + fn completes_macros() { + check( + r#" +//- /dep.rs crate:dep +#[macro_export] +macro_rules! foo { + () => {}; +} + +#[macro_export] +macro_rules! bar { + () => {}; +} + +//- /main.rs crate:main deps:dep +#[macro_use($0)] +extern crate dep; +"#, + expect![[r#" + ma bar + ma foo + "#]], + ) + } + + #[test] + fn only_completes_exported_macros() { + check( + r#" +//- /dep.rs crate:dep +#[macro_export] +macro_rules! foo { + () => {}; +} + +macro_rules! bar { + () => {}; +} + +//- /main.rs crate:main deps:dep +#[macro_use($0)] +extern crate dep; +"#, + expect![[r#" + ma foo + "#]], + ) + } + + #[test] + fn does_not_completes_already_imported_macros() { + check( + r#" +//- /dep.rs crate:dep +#[macro_export] +macro_rules! foo { + () => {}; +} + +#[macro_export] +macro_rules! bar { + () => {}; +} + +//- /main.rs crate:main deps:dep +#[macro_use(foo, $0)] +extern crate dep; +"#, + expect![[r#" + ma bar + "#]], + ) + } +} diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml index 4a2e770f193a0..f14d9ed1b9334 100644 --- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml @@ -16,11 +16,11 @@ cov-mark = "2.0.0-pre.1" tracing.workspace = true rayon.workspace = true fst = { version = "0.4.7", default-features = false } -rustc-hash = "1.1.0" +rustc-hash.workspace = true once_cell = "1.17.0" either.workspace = true itertools.workspace = true -arrayvec = "0.7.2" +arrayvec.workspace = true indexmap.workspace = true memchr = "2.6.4" triomphe.workspace = true @@ -34,6 +34,7 @@ profile.workspace = true stdx.workspace = true syntax.workspace = true text-edit.workspace = true +span.workspace = true # ide should depend only on the top-level `hir` package. if you need # something from some `hir-xxx` subpackage, reexport the API via `hir`. hir.workspace = true @@ -47,4 +48,8 @@ xshell.workspace = true # local deps test-utils.workspace = true +test-fixture.workspace = true sourcegen.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs index 343be870c9eea..db6cd128e83d3 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs @@ -5,13 +5,13 @@ use base_db::{ debug::{DebugQueryTable, TableEntry}, Database, Durability, Query, QueryTable, }, - Change, SourceRootId, + SourceRootId, }; use profile::{memory_usage, Bytes}; use rustc_hash::FxHashSet; use triomphe::Arc; -use crate::{symbol_index::SymbolsDatabase, RootDatabase}; +use crate::{symbol_index::SymbolsDatabase, Change, RootDatabase}; impl RootDatabase { pub fn request_cancellation(&mut self) { @@ -23,7 +23,7 @@ impl RootDatabase { let _p = profile::span("RootDatabase::apply_change"); self.request_cancellation(); tracing::trace!("apply_change {:?}", change); - if let Some(roots) = &change.roots { + if let Some(roots) = &change.source_change.roots { let mut local_roots = FxHashSet::default(); let mut library_roots = FxHashSet::default(); for (idx, root) in roots.iter().enumerate() { @@ -87,7 +87,6 @@ impl RootDatabase { // SourceDatabase base_db::ParseQuery base_db::CrateGraphQuery - base_db::ProcMacrosQuery // SourceDatabaseExt base_db::FileTextQuery @@ -104,6 +103,7 @@ impl RootDatabase { hir::db::MacroArgQuery hir::db::ParseMacroExpansionQuery hir::db::RealSpanMapQuery + hir::db::ProcMacrosQuery // DefDatabase hir::db::FileItemTreeQuery diff --git a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs index 26f3cd28a276f..cc8e8431708ab 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs @@ -138,15 +138,13 @@ pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option { for doc in docs { // str::lines doesn't yield anything for the empty string if !doc.is_empty() { - buf.extend(Itertools::intersperse( - doc.lines().map(|line| { - line.char_indices() - .nth(indent) - .map_or(line, |(offset, _)| &line[offset..]) - .trim_end() - }), - "\n", - )); + // We don't trim trailing whitespace from doc comments as multiple trailing spaces + // indicates a hard line break in Markdown. + let lines = doc.lines().map(|line| { + line.char_indices().nth(indent).map_or(line, |(offset, _)| &line[offset..]) + }); + + buf.extend(Itertools::intersperse(lines, "\n")); } buf.push('\n'); } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs index 01d2f1970c38e..a3abce8964247 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs @@ -1,6 +1,6 @@ -use base_db::fixture::WithFixture; use hir::PrefixKind; use stdx::trim_indent; +use test_fixture::WithFixture; use test_utils::{assert_eq_text, CURSOR_MARKER}; use super::*; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index fefc05e535505..128971994f64b 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -43,6 +43,8 @@ pub mod syntax_helpers { pub use parser::LexedStr; } +pub use hir::Change; + use std::{fmt, mem::ManuallyDrop}; use base_db::{ diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index fb4c0c12691db..8c1a6e6e40b88 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -82,6 +82,34 @@ impl<'a> PathTransform<'a> { } } + pub fn impl_transformation( + target_scope: &'a SemanticsScope<'a>, + source_scope: &'a SemanticsScope<'a>, + impl_: hir::Impl, + generic_arg_list: ast::GenericArgList, + ) -> PathTransform<'a> { + PathTransform { + source_scope, + target_scope, + generic_def: Some(impl_.into()), + substs: get_type_args_from_arg_list(generic_arg_list).unwrap_or_default(), + } + } + + pub fn adt_transformation( + target_scope: &'a SemanticsScope<'a>, + source_scope: &'a SemanticsScope<'a>, + adt: hir::Adt, + generic_arg_list: ast::GenericArgList, + ) -> PathTransform<'a> { + PathTransform { + source_scope, + target_scope, + generic_def: Some(adt.into()), + substs: get_type_args_from_arg_list(generic_arg_list).unwrap_or_default(), + } + } + pub fn generic_transformation( target_scope: &'a SemanticsScope<'a>, source_scope: &'a SemanticsScope<'a>, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs index d2b6a732689c1..7f28965885ad2 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs @@ -22,9 +22,10 @@ //! Our current behavior is ¯\_(ツ)_/¯. use std::fmt; -use base_db::{span::SyntaxContextId, AnchoredPathBuf, FileId, FileRange}; +use base_db::{AnchoredPathBuf, FileId, FileRange}; use either::Either; use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics}; +use span::SyntaxContextId; use stdx::{never, TupleExt}; use syntax::{ ast::{self, HasName}, @@ -515,7 +516,7 @@ fn source_edit_from_def( if let Definition::Local(local) = def { let mut file_id = None; for source in local.sources(sema.db) { - let source = match source.source.clone().original_ast_node(sema.db) { + let source = match source.source.clone().original_ast_node_rooted(sema.db) { Some(source) => source, None => match source .source @@ -559,7 +560,7 @@ fn source_edit_from_def( } } else { // Foo { ref mut field } -> Foo { field: ref mut new_name } - // ^ insert `field: ` + // original_ast_node_rootedd: ` // ^^^^^ replace this with `new_name` edit.insert( pat.syntax().text_range().start(), diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index be8566b759cf3..f5f0f0576f224 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -378,9 +378,9 @@ impl Query { #[cfg(test)] mod tests { - use base_db::fixture::WithFixture; use expect_test::expect_file; use hir::symbols::SymbolCollector; + use test_fixture::WithFixture; use super::*; @@ -414,6 +414,12 @@ impl Struct { fn impl_fn() {} } +struct StructT; + +impl StructT { + fn generic_impl_fn() {} +} + trait Trait { fn trait_fn(&self); } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt index c9875c7f8f29c..f0b97779c736d 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -23,12 +23,12 @@ ), ptr: SyntaxNodePtr { kind: TYPE_ALIAS, - range: 397..417, + range: 470..490, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 402..407, + range: 475..480, }, ), }, @@ -51,12 +51,12 @@ ), ptr: SyntaxNodePtr { kind: CONST, - range: 340..361, + range: 413..434, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 346..351, + range: 419..424, }, ), }, @@ -79,12 +79,12 @@ ), ptr: SyntaxNodePtr { kind: CONST, - range: 520..592, + range: 593..665, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 526..542, + range: 599..615, }, ), }, @@ -139,12 +139,12 @@ ), ptr: SyntaxNodePtr { kind: USE_TREE, - range: 654..676, + range: 727..749, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 663..676, + range: 736..749, }, ), }, @@ -197,12 +197,12 @@ ), ptr: SyntaxNodePtr { kind: STATIC, - range: 362..396, + range: 435..469, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 369..375, + range: 442..448, }, ), }, @@ -276,7 +276,7 @@ Struct( Struct { id: StructId( - 4, + 5, ), }, ), @@ -287,12 +287,12 @@ ), ptr: SyntaxNodePtr { kind: STRUCT, - range: 318..336, + range: 391..409, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 325..335, + range: 398..408, }, ), }, @@ -308,7 +308,7 @@ Struct( Struct { id: StructId( - 5, + 6, ), }, ), @@ -319,12 +319,12 @@ ), ptr: SyntaxNodePtr { kind: STRUCT, - range: 555..581, + range: 628..654, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 562..580, + range: 635..653, }, ), }, @@ -340,7 +340,37 @@ Struct( Struct { id: StructId( - 6, + 7, + ), + }, + ), + ), + loc: DeclarationLocation { + hir_file_id: FileId( + 0, + ), + ptr: SyntaxNodePtr { + kind: STRUCT, + range: 552..580, + }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 559..579, + }, + ), + }, + container_name: None, + is_alias: false, + is_assoc: false, + }, + FileSymbol { + name: "StructT", + def: Adt( + Struct( + Struct { + id: StructId( + 2, ), }, ), @@ -351,12 +381,12 @@ ), ptr: SyntaxNodePtr { kind: STRUCT, - range: 479..507, + range: 261..279, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 486..506, + range: 268..275, }, ), }, @@ -379,12 +409,12 @@ ), ptr: SyntaxNodePtr { kind: TRAIT, - range: 261..300, + range: 334..373, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 267..272, + range: 340..345, }, ), }, @@ -409,12 +439,12 @@ ), ptr: SyntaxNodePtr { kind: USE_TREE, - range: 682..696, + range: 755..769, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 691..696, + range: 764..769, }, ), }, @@ -469,12 +499,12 @@ ), ptr: SyntaxNodePtr { kind: MODULE, - range: 419..457, + range: 492..530, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 423..428, + range: 496..501, }, ), }, @@ -499,12 +529,12 @@ ), ptr: SyntaxNodePtr { kind: MODULE, - range: 594..604, + range: 667..677, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 598..603, + range: 671..676, }, ), }, @@ -542,6 +572,36 @@ is_alias: false, is_assoc: false, }, + FileSymbol { + name: "generic_impl_fn", + def: Function( + Function { + id: FunctionId( + 3, + ), + }, + ), + loc: DeclarationLocation { + hir_file_id: FileId( + 0, + ), + ptr: SyntaxNodePtr { + kind: FN, + range: 307..330, + }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 310..325, + }, + ), + }, + container_name: Some( + "StructT", + ), + is_alias: false, + is_assoc: true, + }, FileSymbol { name: "impl_fn", def: Function( @@ -566,7 +626,9 @@ }, ), }, - container_name: None, + container_name: Some( + "Struct", + ), is_alias: false, is_assoc: true, }, @@ -615,12 +677,12 @@ ), ptr: SyntaxNodePtr { kind: FN, - range: 302..338, + range: 375..411, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 305..309, + range: 378..382, }, ), }, @@ -645,12 +707,12 @@ ), ptr: SyntaxNodePtr { kind: USE_TREE, - range: 611..648, + range: 684..721, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 628..648, + range: 701..721, }, ), }, @@ -673,12 +735,12 @@ ), ptr: SyntaxNodePtr { kind: FN, - range: 279..298, + range: 352..371, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 282..290, + range: 355..363, }, ), }, @@ -705,7 +767,7 @@ Struct( Struct { id: StructId( - 2, + 3, ), }, ), @@ -716,12 +778,12 @@ ), ptr: SyntaxNodePtr { kind: STRUCT, - range: 435..455, + range: 508..528, }, name_ptr: AstPtr( SyntaxNodePtr { kind: NAME, - range: 442..454, + range: 515..527, }, ), }, @@ -776,7 +838,7 @@ Struct( Struct { id: StructId( - 3, + 4, ), }, ), @@ -836,7 +898,7 @@ Struct( Struct { id: StructId( - 3, + 4, ), }, ), @@ -866,7 +928,7 @@ Struct( Struct { id: StructId( - 3, + 4, ), }, ), diff --git a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs index 9abbc34414291..bbdfd81d653f6 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs @@ -113,10 +113,11 @@ fn assoc_item_of_trait( #[cfg(test)] mod tests { - use base_db::{fixture::ChangeFixture, FilePosition}; + use base_db::FilePosition; use expect_test::{expect, Expect}; use hir::Semantics; use syntax::ast::{self, AstNode}; + use test_fixture::ChangeFixture; use crate::RootDatabase; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml index f4055024cc32c..3ed48457a2842 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml @@ -32,7 +32,11 @@ expect-test = "1.4.0" # local deps test-utils.workspace = true +test-fixture.workspace = true sourcegen.workspace = true [features] in-rust-tree = [] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs index 820014391467e..c202264bb566a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -76,4 +76,24 @@ impl Marker for Foo { "#, ) } + + #[test] + fn dont_work_for_negative_impl() { + check_diagnostics( + r#" +trait Marker { + const FLAG: bool = false; + fn boo(); + fn foo () {} +} +struct Foo; +impl !Marker for Foo { + type T = i32; + const FLAG: bool = true; + fn bar() {} + fn boo() {} +} + "#, + ) + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs new file mode 100644 index 0000000000000..f1c95993c843e --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs @@ -0,0 +1,52 @@ +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: unresolved-assoc-item +// +// This diagnostic is triggered if the referenced associated item does not exist. +pub(crate) fn unresolved_assoc_item( + ctx: &DiagnosticsContext<'_>, + d: &hir::UnresolvedAssocItem, +) -> Diagnostic { + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0599"), + "no such associated item", + d.expr_or_pat.clone().map(Into::into), + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn bare() { + check_diagnostics( + r#" +struct S; + +fn main() { + let _ = S::Assoc; + //^^^^^^^^ error: no such associated item +} +"#, + ); + } + + #[test] + fn unimplemented_trait() { + check_diagnostics( + r#" +struct S; +trait Foo { + const X: u32; +} + +fn main() { + let _ = S::X; + //^^^^ error: no such associated item +} +"#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 464b0a710ea7b..60a45a05a4a12 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -1,11 +1,14 @@ -use hir::{db::ExpandDatabase, HirDisplay}; +use hir::{db::ExpandDatabase, AssocItem, HirDisplay, InFile}; use ide_db::{ assists::{Assist, AssistId, AssistKind}, base_db::FileRange, label::Label, source_change::SourceChange, }; -use syntax::{ast, AstNode, TextRange}; +use syntax::{ + ast::{self, make, HasArgList}, + AstNode, SmolStr, TextRange, +}; use text_edit::TextEdit; use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext}; @@ -17,15 +20,17 @@ pub(crate) fn unresolved_method( ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall, ) -> Diagnostic { - let field_suffix = if d.field_with_same_name.is_some() { + let suffix = if d.field_with_same_name.is_some() { ", but a field with a similar name exists" + } else if d.assoc_func_with_same_name.is_some() { + ", but an associated function with a similar name exists" } else { "" }; Diagnostic::new( DiagnosticCode::RustcHardError("E0599"), format!( - "no method `{}` on type `{}`{field_suffix}", + "no method `{}` on type `{}`{suffix}", d.name.display(ctx.sema.db), d.receiver.display(ctx.sema.db) ), @@ -46,11 +51,27 @@ pub(crate) fn unresolved_method( } fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option> { - if let Some(ty) = &d.field_with_same_name { + let field_fix = if let Some(ty) = &d.field_with_same_name { field_fix(ctx, d, ty) } else { // FIXME: add quickfix None + }; + + let assoc_func_fix = assoc_func_fix(ctx, d); + + let mut fixes = vec![]; + if let Some(field_fix) = field_fix { + fixes.push(field_fix); + } + if let Some(assoc_func_fix) = assoc_func_fix { + fixes.push(assoc_func_fix); + } + + if fixes.is_empty() { + None + } else { + Some(fixes) } } @@ -58,7 +79,7 @@ fn field_fix( ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall, ty: &hir::Type, -) -> Option> { +) -> Option { if !ty.impls_fnonce(ctx.sema.db) { return None; } @@ -78,7 +99,7 @@ fn field_fix( } _ => return None, }; - Some(vec![Assist { + Some(Assist { id: AssistId("expected-method-found-field-fix", AssistKind::QuickFix), label: Label::new("Use parentheses to call the value of the field".to_string()), group: None, @@ -88,13 +109,180 @@ fn field_fix( (file_id, TextEdit::insert(range.end(), ")".to_owned())), ])), trigger_signature_help: false, - }]) + }) +} + +fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option { + if let Some(assoc_item_id) = d.assoc_func_with_same_name { + let db = ctx.sema.db; + + let expr_ptr = &d.expr; + let root = db.parse_or_expand(expr_ptr.file_id); + let expr: ast::Expr = expr_ptr.value.to_node(&root); + + let call = ast::MethodCallExpr::cast(expr.syntax().clone())?; + let range = InFile::new(expr_ptr.file_id, call.syntax().text_range()) + .original_node_file_range_rooted(db) + .range; + + let receiver = call.receiver()?; + let receiver_type = &ctx.sema.type_of_expr(&receiver)?.original; + + let need_to_take_receiver_as_first_arg = match hir::AssocItem::from(assoc_item_id) { + AssocItem::Function(f) => { + let assoc_fn_params = f.assoc_fn_params(db); + if assoc_fn_params.is_empty() { + false + } else { + assoc_fn_params + .first() + .map(|first_arg| { + // For generic type, say `Box`, take `Box::into_raw(b: Self)` as example, + // type of `b` is `Self`, which is `Box`, containing unspecified generics. + // However, type of `receiver` is specified, it could be `Box` or something like that, + // so `first_arg.ty() == receiver_type` evaluate to `false` here. + // Here add `first_arg.ty().as_adt() == receiver_type.as_adt()` as guard, + // apply `.as_adt()` over `Box` or `Box` gets `Box`, so we get `true` here. + + // FIXME: it fails when type of `b` is `Box` with other generic param different from `receiver` + first_arg.ty() == receiver_type + || first_arg.ty().as_adt() == receiver_type.as_adt() + }) + .unwrap_or(false) + } + } + _ => false, + }; + + let mut receiver_type_adt_name = receiver_type.as_adt()?.name(db).to_smol_str().to_string(); + + let generic_parameters: Vec = receiver_type.generic_parameters(db).collect(); + // if receiver should be pass as first arg in the assoc func, + // we could omit generic parameters cause compiler can deduce it automatically + if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() { + let generic_parameters = generic_parameters.join(", ").to_string(); + receiver_type_adt_name = + format!("{}::<{}>", receiver_type_adt_name, generic_parameters); + } + + let method_name = call.name_ref()?; + let assoc_func_call = format!("{}::{}()", receiver_type_adt_name, method_name); + + let assoc_func_call = make::expr_path(make::path_from_text(&assoc_func_call)); + + let args: Vec<_> = if need_to_take_receiver_as_first_arg { + std::iter::once(receiver).chain(call.arg_list()?.args()).collect() + } else { + call.arg_list()?.args().collect() + }; + let args = make::arg_list(args); + + let assoc_func_call_expr_string = make::expr_call(assoc_func_call, args).to_string(); + + let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id; + + Some(Assist { + id: AssistId("method_call_to_assoc_func_call_fix", AssistKind::QuickFix), + label: Label::new(format!( + "Use associated func call instead: `{}`", + assoc_func_call_expr_string + )), + group: None, + target: range, + source_change: Some(SourceChange::from_text_edit( + file_id, + TextEdit::replace(range, assoc_func_call_expr_string), + )), + trigger_signature_help: false, + }) + } else { + None + } } #[cfg(test)] mod tests { use crate::tests::{check_diagnostics, check_fix}; + #[test] + fn test_assoc_func_fix() { + check_fix( + r#" +struct A {} + +impl A { + fn hello() {} +} +fn main() { + let a = A{}; + a.hello$0(); +} +"#, + r#" +struct A {} + +impl A { + fn hello() {} +} +fn main() { + let a = A{}; + A::hello(); +} +"#, + ); + } + + #[test] + fn test_assoc_func_diagnostic() { + check_diagnostics( + r#" +struct A {} +impl A { + fn hello() {} +} +fn main() { + let a = A{}; + a.hello(); + // ^^^^^ 💡 error: no method `hello` on type `A`, but an associated function with a similar name exists +} +"#, + ); + } + + #[test] + fn test_assoc_func_fix_with_generic() { + check_fix( + r#" +struct A { + a: T, + b: U +} + +impl A { + fn foo() {} +} +fn main() { + let a = A {a: 0, b: ""}; + a.foo()$0; +} +"#, + r#" +struct A { + a: T, + b: U +} + +impl A { + fn foo() {} +} +fn main() { + let a = A {a: 0, b: ""}; + A::::foo(); +} +"#, + ); + } + #[test] fn smoke_test() { check_diagnostics( diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index 579386c72ef4d..c7ad09e7ebdd9 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -51,6 +51,7 @@ mod handlers { pub(crate) mod typed_hole; pub(crate) mod type_mismatch; pub(crate) mod unimplemented_builtin_macro; + pub(crate) mod unresolved_assoc_item; pub(crate) mod unresolved_extern_crate; pub(crate) mod unresolved_field; pub(crate) mod unresolved_method; @@ -371,7 +372,8 @@ pub fn diagnostics( AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d), AnyDiagnostic::UndeclaredLabel(d) => handlers::undeclared_label::undeclared_label(&ctx, &d), AnyDiagnostic::UnimplementedBuiltinMacro(d) => handlers::unimplemented_builtin_macro::unimplemented_builtin_macro(&ctx, &d), - AnyDiagnostic::UnreachableLabel(d) => handlers::unreachable_label:: unreachable_label(&ctx, &d), + AnyDiagnostic::UnreachableLabel(d) => handlers::unreachable_label::unreachable_label(&ctx, &d), + AnyDiagnostic::UnresolvedAssocItem(d) => handlers::unresolved_assoc_item::unresolved_assoc_item(&ctx, &d), AnyDiagnostic::UnresolvedExternCrate(d) => handlers::unresolved_extern_crate::unresolved_extern_crate(&ctx, &d), AnyDiagnostic::UnresolvedField(d) => handlers::unresolved_field::unresolved_field(&ctx, &d), AnyDiagnostic::UnresolvedImport(d) => handlers::unresolved_import::unresolved_import(&ctx, &d), diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs index 48e0363c9ca8d..67912a3a03eb0 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs @@ -3,12 +3,11 @@ mod sourcegen; use expect_test::Expect; use ide_db::{ - assists::AssistResolveStrategy, - base_db::{fixture::WithFixture, SourceDatabaseExt}, - LineIndexDatabase, RootDatabase, + assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase, }; use itertools::Itertools; use stdx::trim_indent; +use test_fixture::WithFixture; use test_utils::{assert_eq_text, extract_annotations, MiniCore}; use crate::{DiagnosticsConfig, ExprFillDefaultMode, Severity}; diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml index 56b29f92b8293..57b1f9465ad3e 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml @@ -31,3 +31,7 @@ expect-test = "1.4.0" # local deps test-utils.workspace = true +test-fixture.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs index 424ba3d7fd506..7c7d146cb4a87 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs @@ -65,8 +65,8 @@ fn parser_undefined_placeholder_in_replacement() { /// `code` may optionally contain a cursor marker `$0`. If it doesn't, then the position will be /// the start of the file. If there's a second cursor marker, then we'll return a single range. pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec) { - use ide_db::base_db::fixture::WithFixture; use ide_db::symbol_index::SymbolsDatabase; + use test_fixture::{WithFixture, WORKSPACE}; let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) { ide_db::RootDatabase::with_range_or_offset(code) } else { @@ -86,7 +86,7 @@ pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Ve } } let mut local_roots = FxHashSet::default(); - local_roots.insert(ide_db::base_db::fixture::WORKSPACE); + local_roots.insert(WORKSPACE); db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); (db, position, selections) } diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml index 0943574ec1b5b..9f0a2f30f658a 100644 --- a/src/tools/rust-analyzer/crates/ide/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml @@ -14,7 +14,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" crossbeam-channel = "0.5.5" -arrayvec = "0.7.4" +arrayvec.workspace = true either.workspace = true itertools.workspace = true tracing.workspace = true @@ -50,6 +50,10 @@ expect-test = "1.4.0" # local deps test-utils.workspace = true +test-fixture.workspace = true [features] in-rust-tree = ["ide-assists/in-rust-tree", "ide-diagnostics/in-rust-tree"] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs index d7f82b4af3e10..f49c5af0af1f1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/annotations.rs +++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs @@ -3,8 +3,9 @@ use ide_db::{ base_db::{FileId, FilePosition, FileRange}, defs::Definition, helpers::visit_file_defs, - RootDatabase, + FxHashSet, RootDatabase, }; +use itertools::Itertools; use syntax::{ast::HasName, AstNode, TextRange}; use crate::{ @@ -23,13 +24,13 @@ mod fn_references; // and running/debugging binaries. // // image::https://user-images.githubusercontent.com/48062697/113020672-b7c34f00-917a-11eb-8f6e-858735660a0e.png[] -#[derive(Debug)] +#[derive(Debug, Hash, PartialEq, Eq)] pub struct Annotation { pub range: TextRange, pub kind: AnnotationKind, } -#[derive(Debug)] +#[derive(Debug, Hash, PartialEq, Eq)] pub enum AnnotationKind { Runnable(Runnable), HasImpls { pos: FilePosition, data: Option> }, @@ -56,7 +57,7 @@ pub(crate) fn annotations( config: &AnnotationConfig, file_id: FileId, ) -> Vec { - let mut annotations = Vec::default(); + let mut annotations = FxHashSet::default(); if config.annotate_runnables { for runnable in runnables(db, file_id) { @@ -66,7 +67,7 @@ pub(crate) fn annotations( let range = runnable.nav.focus_or_full_range(); - annotations.push(Annotation { range, kind: AnnotationKind::Runnable(runnable) }); + annotations.insert(Annotation { range, kind: AnnotationKind::Runnable(runnable) }); } } @@ -99,13 +100,13 @@ pub(crate) fn annotations( }) .for_each(|range| { let (annotation_range, target_position) = mk_ranges(range); - annotations.push(Annotation { + annotations.insert(Annotation { range: annotation_range, kind: AnnotationKind::HasReferences { pos: target_position, data: None, }, - }) + }); }) } if config.annotate_references || config.annotate_impls { @@ -131,14 +132,14 @@ pub(crate) fn annotations( }; let (annotation_range, target_pos) = mk_ranges(range); if config.annotate_impls && !matches!(def, Definition::Const(_)) { - annotations.push(Annotation { + annotations.insert(Annotation { range: annotation_range, kind: AnnotationKind::HasImpls { pos: target_pos, data: None }, }); } if config.annotate_references { - annotations.push(Annotation { + annotations.insert(Annotation { range: annotation_range, kind: AnnotationKind::HasReferences { pos: target_pos, data: None }, }); @@ -149,7 +150,7 @@ pub(crate) fn annotations( node: InFile, source_file_id: FileId, ) -> Option<(TextRange, Option)> { - if let Some(InRealFile { file_id, value }) = node.original_ast_node(db) { + if let Some(InRealFile { file_id, value }) = node.original_ast_node_rooted(db) { if file_id == source_file_id { return Some(( value.syntax().text_range(), @@ -171,7 +172,7 @@ pub(crate) fn annotations( })); } - annotations + annotations.into_iter().sorted_by_key(|a| (a.range.start(), a.range.end())).collect() } pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) -> Annotation { @@ -252,25 +253,6 @@ fn main() { "#, expect![[r#" [ - Annotation { - range: 53..57, - kind: Runnable( - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 50..85, - focus_range: 53..57, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - ), - }, Annotation { range: 6..10, kind: HasReferences { @@ -306,6 +288,25 @@ fn main() { ), }, }, + Annotation { + range: 53..57, + kind: Runnable( + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 50..85, + focus_range: 53..57, + name: "main", + kind: Function, + }, + kind: Bin, + cfg: None, + }, + ), + }, Annotation { range: 53..57, kind: HasReferences { @@ -337,28 +338,9 @@ fn main() { "#, expect![[r#" [ - Annotation { - range: 17..21, - kind: Runnable( - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 14..48, - focus_range: 17..21, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - ), - }, Annotation { range: 7..11, - kind: HasImpls { + kind: HasReferences { pos: FilePosition { file_id: FileId( 0, @@ -366,13 +348,20 @@ fn main() { offset: 7, }, data: Some( - [], + [ + FileRange { + file_id: FileId( + 0, + ), + range: 41..45, + }, + ], ), }, }, Annotation { range: 7..11, - kind: HasReferences { + kind: HasImpls { pos: FilePosition { file_id: FileId( 0, @@ -380,17 +369,29 @@ fn main() { offset: 7, }, data: Some( - [ - FileRange { - file_id: FileId( - 0, - ), - range: 41..45, - }, - ], + [], ), }, }, + Annotation { + range: 17..21, + kind: Runnable( + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 14..48, + focus_range: 17..21, + name: "main", + kind: Function, + }, + kind: Bin, + cfg: None, + }, + ), + }, Annotation { range: 17..21, kind: HasReferences { @@ -426,28 +427,9 @@ fn main() { "#, expect![[r#" [ - Annotation { - range: 69..73, - kind: Runnable( - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 66..100, - focus_range: 69..73, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - ), - }, Annotation { range: 7..11, - kind: HasImpls { + kind: HasReferences { pos: FilePosition { file_id: FileId( 0, @@ -456,14 +438,17 @@ fn main() { }, data: Some( [ - NavigationTarget { + FileRange { file_id: FileId( 0, ), - full_range: 36..64, - focus_range: 57..61, - name: "impl", - kind: Impl, + range: 57..61, + }, + FileRange { + file_id: FileId( + 0, + ), + range: 93..97, }, ], ), @@ -471,7 +456,7 @@ fn main() { }, Annotation { range: 7..11, - kind: HasReferences { + kind: HasImpls { pos: FilePosition { file_id: FileId( 0, @@ -480,17 +465,14 @@ fn main() { }, data: Some( [ - FileRange { - file_id: FileId( - 0, - ), - range: 57..61, - }, - FileRange { + NavigationTarget { file_id: FileId( 0, ), - range: 93..97, + full_range: 36..64, + focus_range: 57..61, + name: "impl", + kind: Impl, }, ], ), @@ -555,6 +537,25 @@ fn main() { ), }, }, + Annotation { + range: 69..73, + kind: Runnable( + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 66..100, + focus_range: 69..73, + name: "main", + kind: Function, + }, + kind: Bin, + cfg: None, + }, + ), + }, ] "#]], ); @@ -622,28 +623,9 @@ fn main() { "#, expect![[r#" [ - Annotation { - range: 61..65, - kind: Runnable( - Runnable { - use_name_in_title: false, - nav: NavigationTarget { - file_id: FileId( - 0, - ), - full_range: 58..95, - focus_range: 61..65, - name: "main", - kind: Function, - }, - kind: Bin, - cfg: None, - }, - ), - }, Annotation { range: 7..11, - kind: HasImpls { + kind: HasReferences { pos: FilePosition { file_id: FileId( 0, @@ -652,14 +634,17 @@ fn main() { }, data: Some( [ - NavigationTarget { + FileRange { file_id: FileId( 0, ), - full_range: 14..56, - focus_range: 19..23, - name: "impl", - kind: Impl, + range: 19..23, + }, + FileRange { + file_id: FileId( + 0, + ), + range: 74..78, }, ], ), @@ -667,7 +652,7 @@ fn main() { }, Annotation { range: 7..11, - kind: HasReferences { + kind: HasImpls { pos: FilePosition { file_id: FileId( 0, @@ -676,17 +661,14 @@ fn main() { }, data: Some( [ - FileRange { - file_id: FileId( - 0, - ), - range: 19..23, - }, - FileRange { + NavigationTarget { file_id: FileId( 0, ), - range: 74..78, + full_range: 14..56, + focus_range: 19..23, + name: "impl", + kind: Impl, }, ], ), @@ -727,6 +709,25 @@ fn main() { ), }, }, + Annotation { + range: 61..65, + kind: Runnable( + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 58..95, + focus_range: 61..65, + name: "main", + kind: Function, + }, + kind: Bin, + cfg: None, + }, + ), + }, ] "#]], ); @@ -745,6 +746,20 @@ mod tests { "#, expect![[r#" [ + Annotation { + range: 3..7, + kind: HasReferences { + pos: FilePosition { + file_id: FileId( + 0, + ), + offset: 3, + }, + data: Some( + [], + ), + }, + }, Annotation { range: 3..7, kind: Runnable( @@ -812,20 +827,6 @@ mod tests { }, ), }, - Annotation { - range: 3..7, - kind: HasReferences { - pos: FilePosition { - file_id: FileId( - 0, - ), - offset: 3, - }, - data: Some( - [], - ), - }, - }, ] "#]], ); @@ -877,7 +878,7 @@ struct Foo; [ Annotation { range: 0..71, - kind: HasImpls { + kind: HasReferences { pos: FilePosition { file_id: FileId( 0, @@ -891,7 +892,7 @@ struct Foo; }, Annotation { range: 0..71, - kind: HasReferences { + kind: HasImpls { pos: FilePosition { file_id: FileId( 0, diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index 9760f9daf0a39..a36082bafcf26 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -492,7 +492,7 @@ fn get_doc_base_urls( let Some(krate) = def.krate(db) else { return Default::default() }; let Some(display_name) = krate.display_name(db) else { return Default::default() }; let crate_data = &db.crate_graph()[krate.into()]; - let channel = crate_data.channel.map_or("nightly", ReleaseChannel::as_str); + let channel = crate_data.channel().unwrap_or(ReleaseChannel::Nightly).as_str(); let (web_base, local_base) = match &crate_data.origin { // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself. diff --git a/src/tools/rust-analyzer/crates/ide/src/fixture.rs b/src/tools/rust-analyzer/crates/ide/src/fixture.rs index 2e5903c0602e3..3b19b85c4bc12 100644 --- a/src/tools/rust-analyzer/crates/ide/src/fixture.rs +++ b/src/tools/rust-analyzer/crates/ide/src/fixture.rs @@ -1,5 +1,5 @@ //! Utilities for creating `Analysis` instances for tests. -use ide_db::base_db::fixture::ChangeFixture; +use test_fixture::ChangeFixture; use test_utils::{extract_annotations, RangeOrOffset}; use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange}; diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 7491879a67fb4..e0beba8fb380f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -4,7 +4,7 @@ use crate::{ doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget, RangeInfo, TryToNav, }; -use hir::{AsAssocItem, AssocItem, DescendPreference, Semantics}; +use hir::{AsAssocItem, AssocItem, DescendPreference, ModuleDef, Semantics}; use ide_db::{ base_db::{AnchoredPath, FileId, FileLoader}, defs::{Definition, IdentClass}, @@ -73,10 +73,15 @@ pub(crate) fn goto_definition( .into_iter() .filter_map(|token| { let parent = token.parent()?; + if let Some(tt) = ast::TokenTree::cast(parent.clone()) { if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) { return Some(vec![x]); } + + if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.clone()) { + return Some(vec![x]); + } } Some( IdentClass::classify_node(sema, &parent)? @@ -140,6 +145,27 @@ fn try_lookup_include_path( }) } +fn try_lookup_macro_def_in_macro_use( + sema: &Semantics<'_, RootDatabase>, + token: SyntaxToken, +) -> Option { + let extern_crate = token.parent()?.ancestors().find_map(ast::ExternCrate::cast)?; + let extern_crate = sema.to_def(&extern_crate)?; + let krate = extern_crate.resolved_crate(sema.db)?; + + for mod_def in krate.root_module().declarations(sema.db) { + if let ModuleDef::Macro(mac) = mod_def { + if mac.name(sema.db).as_str() == Some(token.text()) { + if let Some(nav) = mac.try_to_nav(sema.db) { + return Some(nav.call_site); + } + } + } + } + + None +} + /// finds the trait definition of an impl'd item, except function /// e.g. /// ```rust @@ -2081,4 +2107,47 @@ fn test() { "#, ); } + + #[test] + fn goto_macro_def_from_macro_use() { + check( + r#" +//- /main.rs crate:main deps:mac +#[macro_use(foo$0)] +extern crate mac; + +//- /mac.rs crate:mac +#[macro_export] +macro_rules! foo { + //^^^ + () => {}; +} + "#, + ); + + check( + r#" +//- /main.rs crate:main deps:mac +#[macro_use(foo, bar$0, baz)] +extern crate mac; + +//- /mac.rs crate:mac +#[macro_export] +macro_rules! foo { + () => {}; +} + +#[macro_export] +macro_rules! bar { + //^^^ + () => {}; +} + +#[macro_export] +macro_rules! baz { + () => {}; +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs index 6384db39d7c62..c1a4a7b1fc794 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs @@ -4,7 +4,6 @@ use ide_db::{ helpers::pick_best_token, RootDatabase, }; -use itertools::Itertools; use syntax::{ast, AstNode, SyntaxKind::*, T}; use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav}; @@ -34,10 +33,10 @@ pub(crate) fn goto_implementation( })?; let range = original_token.text_range(); let navs = - sema.descend_into_macros(DescendPreference::None, original_token) - .into_iter() - .filter_map(|token| token.parent().and_then(ast::NameLike::cast)) - .filter_map(|node| match &node { + sema.descend_into_macros_single(DescendPreference::SameText, original_token) + .parent() + .and_then(ast::NameLike::cast) + .and_then(|node| match &node { ast::NameLike::Name(name) => { NameClass::classify(&sema, name).and_then(|class| match class { NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it), @@ -52,8 +51,7 @@ pub(crate) fn goto_implementation( }), ast::NameLike::Lifetime(_) => None, }) - .unique() - .filter_map(|def| { + .and_then(|def| { let navs = match def { Definition::Trait(trait_) => impls_for_trait(&sema, trait_), Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)), @@ -75,8 +73,7 @@ pub(crate) fn goto_implementation( }; Some(navs) }) - .flatten() - .collect(); + .unwrap_or_default(); Some(RangeInfo { range, info: navs }) } diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index a19952e4cae97..6ff16b9e2f713 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -67,6 +67,7 @@ use std::ffi::OsStr; use cfg::CfgOptions; use fetch_crates::CrateInfo; +use hir::Change; use ide_db::{ base_db::{ salsa::{self, ParallelDatabase}, @@ -122,7 +123,7 @@ pub use ide_completion::{ }; pub use ide_db::{ base_db::{ - Cancelled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, + Cancelled, CrateGraph, CrateId, Edition, FileChange, FileId, FilePosition, FileRange, SourceRoot, SourceRootId, }, documentation::Documentation, @@ -183,7 +184,7 @@ impl AnalysisHost { /// Applies changes to the current state of the world. If there are /// outstanding snapshots, they will be canceled. pub fn apply_change(&mut self, change: Change) { - self.db.apply_change(change) + self.db.apply_change(change); } /// NB: this clears the database diff --git a/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs b/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs index 718868c8747b1..fa01875e20486 100644 --- a/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs +++ b/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs @@ -6,6 +6,7 @@ use pulldown_cmark::{Event, Parser, Tag}; /// Currently limited in styling, i.e. no ascii tables or lists pub(crate) fn remove_markdown(markdown: &str) -> String { let mut out = String::new(); + out.reserve_exact(markdown.len()); let parser = Parser::new(markdown); for event in parser { @@ -13,10 +14,7 @@ pub(crate) fn remove_markdown(markdown: &str) -> String { Event::Text(text) | Event::Code(text) => out.push_str(&text), Event::SoftBreak => out.push(' '), Event::HardBreak | Event::Rule | Event::End(Tag::CodeBlock(_)) => out.push('\n'), - Event::End(Tag::Paragraph) => { - out.push('\n'); - out.push('\n'); - } + Event::End(Tag::Paragraph) => out.push_str("\n\n"), Event::Start(_) | Event::End(_) | Event::Html(_) @@ -25,7 +23,10 @@ pub(crate) fn remove_markdown(markdown: &str) -> String { } } - if let Some(p) = out.rfind(|c| c != '\n') { + if let Some(mut p) = out.rfind(|c| c != '\n') { + while !out.is_char_boundary(p + 1) { + p += 1; + } out.drain(p + 1..); } @@ -153,4 +154,10 @@ book] or the [Reference]. For more information on the various types of functions and how they're used, consult the Rust book or the Reference."#]].assert_eq(&res); } + + #[test] + fn on_char_boundary() { + expect!["a┘"].assert_eq(&remove_markdown("```text\na┘\n```")); + expect!["وقار"].assert_eq(&remove_markdown("```\nوقار\n```\n")); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs index f85700daf1f78..bf6ad47a49527 100644 --- a/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs +++ b/src/tools/rust-analyzer/crates/ide/src/shuffle_crate_graph.rs @@ -1,5 +1,6 @@ +use hir::{db::ExpandDatabase, ProcMacros}; use ide_db::{ - base_db::{salsa::Durability, CrateGraph, ProcMacros, SourceDatabase}, + base_db::{salsa::Durability, CrateGraph, SourceDatabase}, FxHashMap, RootDatabase, }; use triomphe::Arc; @@ -39,7 +40,7 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { data.is_proc_macro, data.origin.clone(), data.target_layout.clone(), - data.channel, + data.toolchain.clone(), ); new_proc_macros.insert(new_id, proc_macros[&old_id].clone()); map.insert(old_id, new_id); diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index 990376a49659d..483fb76d91cf2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -646,8 +646,9 @@ mod tests { use std::iter; use expect_test::{expect, Expect}; - use ide_db::base_db::{fixture::ChangeFixture, FilePosition}; + use ide_db::base_db::FilePosition; use stdx::format_to; + use test_fixture::ChangeFixture; use crate::RootDatabase; diff --git a/src/tools/rust-analyzer/crates/ide/src/ssr.rs b/src/tools/rust-analyzer/crates/ide/src/ssr.rs index d8d81869a2f81..f0d18fdefa712 100644 --- a/src/tools/rust-analyzer/crates/ide/src/ssr.rs +++ b/src/tools/rust-analyzer/crates/ide/src/ssr.rs @@ -59,10 +59,11 @@ mod tests { use expect_test::expect; use ide_assists::{Assist, AssistResolveStrategy}; use ide_db::{ - base_db::{fixture::WithFixture, salsa::Durability, FileRange}, + base_db::{salsa::Durability, FileRange}, symbol_index::SymbolsDatabase, FxHashSet, RootDatabase, }; + use test_fixture::WithFixture; use triomphe::Arc; use super::ssr_assists; @@ -70,7 +71,7 @@ mod tests { fn get_assists(ra_fixture: &str, resolve: AssistResolveStrategy) -> Vec { let (mut db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture); let mut local_roots = FxHashSet::default(); - local_roots.insert(ide_db::base_db::fixture::WORKSPACE); + local_roots.insert(test_fixture::WORKSPACE); db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); ssr_assists(&db, &resolve, FileRange { file_id, range: range_or_offset.into() }) } diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs index e7f97ebe6f7bd..b2b305c1d3805 100644 --- a/src/tools/rust-analyzer/crates/ide/src/status.rs +++ b/src/tools/rust-analyzer/crates/ide/src/status.rs @@ -10,7 +10,7 @@ use ide_db::{ debug::{DebugQueryTable, TableEntry}, Query, QueryTable, }, - CrateId, FileId, FileTextQuery, ParseQuery, SourceDatabase, SourceRootId, + CrateData, FileId, FileTextQuery, ParseQuery, SourceDatabase, SourceRootId, }, symbol_index::ModuleSymbolsQuery, }; @@ -54,25 +54,54 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { format_to!(buf, "{} block def maps\n", collect_query_count(BlockDefMapQuery.in_db(db))); if let Some(file_id) = file_id { - format_to!(buf, "\nFile info:\n"); + format_to!(buf, "\nCrates for file {}:\n", file_id.index()); let crates = crate::parent_module::crates_for(db, file_id); if crates.is_empty() { format_to!(buf, "Does not belong to any crate"); } let crate_graph = db.crate_graph(); - for krate in crates { - let display_crate = |krate: CrateId| match &crate_graph[krate].display_name { - Some(it) => format!("{it}({})", krate.into_raw()), - None => format!("{}", krate.into_raw()), - }; - format_to!(buf, "Crate: {}\n", display_crate(krate)); - format_to!(buf, "Enabled cfgs: {:?}\n", crate_graph[krate].cfg_options); - let deps = crate_graph[krate] - .dependencies + for crate_id in crates { + let CrateData { + root_file_id, + edition, + version, + display_name, + cfg_options, + potential_cfg_options, + env, + dependencies, + origin, + is_proc_macro, + target_layout, + toolchain, + } = &crate_graph[crate_id]; + format_to!( + buf, + "Crate: {}\n", + match display_name { + Some(it) => format!("{it}({})", crate_id.into_raw()), + None => format!("{}", crate_id.into_raw()), + } + ); + format_to!(buf, " Root module file id: {}\n", root_file_id.index()); + format_to!(buf, " Edition: {}\n", edition); + format_to!(buf, " Version: {}\n", version.as_deref().unwrap_or("n/a")); + format_to!(buf, " Enabled cfgs: {:?}\n", cfg_options); + format_to!(buf, " Potential cfgs: {:?}\n", potential_cfg_options); + format_to!(buf, " Env: {:?}\n", env); + format_to!(buf, " Origin: {:?}\n", origin); + format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro); + format_to!(buf, " Workspace Target Layout: {:?}\n", target_layout); + format_to!( + buf, + " Workspace Toolchain: {}\n", + toolchain.as_ref().map_or_else(|| "n/a".into(), |v| v.to_string()) + ); + let deps = dependencies .iter() - .map(|dep| format!("{}={:?}", dep.name, dep.crate_id)) + .map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw())) .format(", "); - format_to!(buf, "Dependencies: {}\n", deps); + format_to!(buf, " Dependencies: {}\n", deps); } } diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml index d9184b0fb6fe5..67b4164ce1fe0 100644 --- a/src/tools/rust-analyzer/crates/intern/Cargo.toml +++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml @@ -16,5 +16,8 @@ doctest = false # We need to freeze the version of the crate, as the raw-api feature is considered unstable dashmap.workspace = true hashbrown.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true triomphe.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/limit/Cargo.toml b/src/tools/rust-analyzer/crates/limit/Cargo.toml index c088869099227..c89722cc40dee 100644 --- a/src/tools/rust-analyzer/crates/limit/Cargo.toml +++ b/src/tools/rust-analyzer/crates/limit/Cargo.toml @@ -11,3 +11,6 @@ rust-version.workspace = true [features] tracking = [] default = ["tracking"] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml index 31b9f6c76d067..dcab6328a4e89 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml +++ b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml @@ -12,7 +12,7 @@ authors.workspace = true [dependencies] anyhow.workspace = true -crossbeam-channel = "0.5.5" +crossbeam-channel.workspace = true itertools.workspace = true tracing.workspace = true @@ -23,3 +23,9 @@ project-model.workspace = true tt.workspace = true vfs.workspace = true vfs-notify.workspace = true +span.workspace = true + +hir-expand.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index db9654220dd74..556ed73a04c2c 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -5,17 +5,19 @@ use std::{collections::hash_map::Entry, mem, path::Path, sync}; use crossbeam_channel::{unbounded, Receiver}; -use ide::{AnalysisHost, Change, SourceRoot}; +use hir_expand::proc_macro::{ + ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacroLoadResult, + ProcMacros, +}; +use ide::{AnalysisHost, SourceRoot}; use ide_db::{ - base_db::{ - span::SpanData, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, - ProcMacroKind, ProcMacroLoadResult, ProcMacros, - }, - FxHashMap, + base_db::{CrateGraph, Env}, + Change, FxHashMap, }; use itertools::Itertools; use proc_macro_api::{MacroDylib, ProcMacroServer}; use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace}; +use span::Span; use tt::DelimSpan; use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; @@ -374,13 +376,13 @@ struct Expander(proc_macro_api::ProcMacro); impl ProcMacroExpander for Expander { fn expand( &self, - subtree: &tt::Subtree, - attrs: Option<&tt::Subtree>, + subtree: &tt::Subtree, + attrs: Option<&tt::Subtree>, env: &Env, - def_site: SpanData, - call_site: SpanData, - mixed_site: SpanData, - ) -> Result, ProcMacroExpansionError> { + def_site: Span, + call_site: Span, + mixed_site: Span, + ) -> Result, ProcMacroExpansionError> { let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) { Ok(Ok(subtree)) => Ok(subtree), @@ -397,13 +399,13 @@ struct IdentityExpander; impl ProcMacroExpander for IdentityExpander { fn expand( &self, - subtree: &tt::Subtree, - _: Option<&tt::Subtree>, + subtree: &tt::Subtree, + _: Option<&tt::Subtree>, _: &Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result, ProcMacroExpansionError> { + _: Span, + _: Span, + _: Span, + ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } } @@ -415,13 +417,13 @@ struct EmptyExpander; impl ProcMacroExpander for EmptyExpander { fn expand( &self, - _: &tt::Subtree, - _: Option<&tt::Subtree>, + _: &tt::Subtree, + _: Option<&tt::Subtree>, _: &Env, - call_site: SpanData, - _: SpanData, - _: SpanData, - ) -> Result, ProcMacroExpansionError> { + call_site: Span, + _: Span, + _: Span, + ) -> Result, ProcMacroExpansionError> { Ok(tt::Subtree::empty(DelimSpan { open: call_site, close: call_site })) } } diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml index adab1003d103b..2046fa943a8a1 100644 --- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml +++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml @@ -13,7 +13,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" -rustc-hash = "1.1.0" +rustc-hash.workspace = true smallvec.workspace = true tracing.workspace = true @@ -22,6 +22,13 @@ syntax.workspace = true parser.workspace = true tt.workspace = true stdx.workspace = true +span.workspace = true [dev-dependencies] test-utils.workspace = true + +[features] +in-rust-tree = ["parser/in-rust-tree", "syntax/in-rust-tree"] + +[lints] +workspace = true diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs index f503aecce2c2f..6c3917b37f1fc 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs @@ -20,7 +20,10 @@ fn benchmark_parse_macro_rules() { let rules = macro_rules_fixtures_tt(); let hash: usize = { let _pt = bench("mbe parse macro rules"); - rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it, true).rules.len()).sum() + rules + .values() + .map(|it| DeclarativeMacro::parse_macro_rules(it, true, true).rules.len()) + .sum() }; assert_eq!(hash, 1144); } @@ -38,7 +41,7 @@ fn benchmark_expand_macro_rules() { invocations .into_iter() .map(|(id, tt)| { - let res = rules[&id].expand(&tt, |_| ()); + let res = rules[&id].expand(&tt, |_| (), true, DUMMY); assert!(res.err.is_none()); res.value.token_trees.len() }) @@ -50,7 +53,7 @@ fn benchmark_expand_macro_rules() { fn macro_rules_fixtures() -> FxHashMap> { macro_rules_fixtures_tt() .into_iter() - .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true))) + .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true, true))) .collect() } @@ -64,8 +67,11 @@ fn macro_rules_fixtures_tt() -> FxHashMap .filter_map(ast::MacroRules::cast) .map(|rule| { let id = rule.name().unwrap().to_string(); - let def_tt = - syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap); + let def_tt = syntax_node_to_token_tree( + rule.token_tree().unwrap().syntax(), + DummyTestSpanMap, + DUMMY, + ); (id, def_tt) }) .collect() @@ -105,7 +111,7 @@ fn invocation_fixtures( for op in rule.lhs.iter() { collect_from_op(op, &mut subtree, &mut seed); } - if it.expand(&subtree, |_| ()).err.is_none() { + if it.expand(&subtree, |_| (), true, DUMMY).err.is_none() { res.push((name.clone(), subtree)); break; } @@ -199,7 +205,7 @@ fn invocation_fixtures( }); parent.token_trees.push(subtree.into()); } - Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } => {} + Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. } => {} }; // Simple linear congruential generator for deterministic result diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs index 0e755f69bf7d3..60483809dc18d 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs @@ -16,6 +16,8 @@ pub(crate) fn expand_rules( input: &tt::Subtree, marker: impl Fn(&mut S) + Copy, is_2021: bool, + new_meta_vars: bool, + call_site: S, ) -> ExpandResult> { let mut match_: Option<(matcher::Match, &crate::Rule)> = None; for rule in rules { @@ -25,8 +27,13 @@ pub(crate) fn expand_rules( // If we find a rule that applies without errors, we're done. // Unconditionally returning the transcription here makes the // `test_repeat_bad_var` test fail. - let ExpandResult { value, err: transcribe_err } = - transcriber::transcribe(&rule.rhs, &new_match.bindings, marker); + let ExpandResult { value, err: transcribe_err } = transcriber::transcribe( + &rule.rhs, + &new_match.bindings, + marker, + new_meta_vars, + call_site, + ); if transcribe_err.is_none() { return ExpandResult::ok(value); } @@ -45,11 +52,14 @@ pub(crate) fn expand_rules( if let Some((match_, rule)) = match_ { // if we got here, there was no match without errors let ExpandResult { value, err: transcribe_err } = - transcriber::transcribe(&rule.rhs, &match_.bindings, marker); + transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars, call_site); ExpandResult { value, err: match_.err.or(transcribe_err) } } else { ExpandResult::new( - tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }, + tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(call_site), + token_trees: vec![], + }, ExpandError::NoMatchingRule, ) } @@ -121,6 +131,7 @@ enum Binding { #[derive(Debug, Clone, PartialEq, Eq)] enum Fragment { + Empty, /// token fragments are just copy-pasted into the output Tokens(tt::TokenTree), /// Expr ast fragments are surrounded with `()` on insertion to preserve diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs index 012b02a3f87ab..40b4c7cdd656b 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs @@ -63,7 +63,7 @@ use std::rc::Rc; use smallvec::{smallvec, SmallVec}; use syntax::SmolStr; -use tt::Span; +use tt::{DelimSpan, Span}; use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, @@ -74,11 +74,7 @@ use crate::{ impl Bindings { fn push_optional(&mut self, name: &SmolStr) { - // FIXME: Do we have a better way to represent an empty token ? - // Insert an empty subtree for empty token - let tt = - tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }.into(); - self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); + self.inner.insert(name.clone(), Binding::Fragment(Fragment::Empty)); } fn push_empty(&mut self, name: &SmolStr) { @@ -387,6 +383,7 @@ fn match_loop_inner<'t, S: Span>( eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>, error_items: &mut SmallVec<[MatchState<'t, S>; 1]>, is_2021: bool, + delim_span: tt::DelimSpan, ) { macro_rules! try_push { ($items: expr, $it:expr) => { @@ -474,7 +471,7 @@ fn match_loop_inner<'t, S: Span>( cur_items.push(new_item); } cur_items.push(MatchState { - dot: tokens.iter_delimited(None), + dot: tokens.iter_delimited(delim_span), stack: Default::default(), up: Some(Box::new(item)), sep: separator.clone(), @@ -489,7 +486,7 @@ fn match_loop_inner<'t, S: Span>( if let Ok(subtree) = src.clone().expect_subtree() { if subtree.delimiter.kind == delimiter.kind { item.stack.push(item.dot); - item.dot = tokens.iter_delimited(Some(*delimiter)); + item.dot = tokens.iter_delimited_with(*delimiter); cur_items.push(item); } } @@ -497,7 +494,7 @@ fn match_loop_inner<'t, S: Span>( OpDelimited::Op(Op::Var { kind, name, .. }) => { if let &Some(kind) = kind { let mut fork = src.clone(); - let match_res = match_meta_var(kind, &mut fork, is_2021); + let match_res = match_meta_var(kind, &mut fork, is_2021, delim_span); match match_res.err { None => { // Some meta variables are optional (e.g. vis) @@ -588,7 +585,9 @@ fn match_loop_inner<'t, S: Span>( item.is_error = true; error_items.push(item); } - OpDelimited::Op(Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. }) => { + OpDelimited::Op( + Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. }, + ) => { stdx::never!("metavariable expression in lhs found"); } OpDelimited::Open => { @@ -609,6 +608,7 @@ fn match_loop_inner<'t, S: Span>( } fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match { + let span = src.delimiter.delim_span(); let mut src = TtIter::new(src); let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new(); let mut res = Match::default(); @@ -617,7 +617,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: let mut bindings_builder = BindingsBuilder::default(); let mut cur_items = smallvec![MatchState { - dot: pattern.iter_delimited(None), + dot: pattern.iter_delimited(span), stack: Default::default(), up: None, sep: None, @@ -648,6 +648,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: &mut eof_items, &mut error_items, is_2021, + span, ); stdx::always!(cur_items.is_empty()); @@ -761,12 +762,13 @@ fn match_meta_var( kind: MetaVarKind, input: &mut TtIter<'_, S>, is_2021: bool, + delim_span: DelimSpan, ) -> ExpandResult>> { let fragment = match kind { MetaVarKind::Path => { - return input - .expect_fragment(parser::PrefixEntryPoint::Path) - .map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path)); + return input.expect_fragment(parser::PrefixEntryPoint::Path).map(|it| { + it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) + }); } MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop, @@ -795,7 +797,7 @@ fn match_meta_var( return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| { tt.map(|tt| match tt { tt::TokenTree::Leaf(leaf) => tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), token_trees: vec![leaf.into()], }, tt::TokenTree::Subtree(mut s) => { @@ -829,7 +831,7 @@ fn match_meta_var( match neg { None => lit.into(), Some(neg) => tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(*literal.span()), token_trees: vec![neg, lit.into()], }), } @@ -851,18 +853,21 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &Meta Op::Subtree { tokens, .. } => collect_vars(collector_fun, tokens), Op::Repeat { tokens, .. } => collect_vars(collector_fun, tokens), Op::Literal(_) | Op::Ident(_) | Op::Punct(_) => {} - Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } => { + Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } | Op::Length { .. } => { stdx::never!("metavariable expression in lhs found"); } } } } impl MetaTemplate { - fn iter_delimited(&self, delimited: Option>) -> OpDelimitedIter<'_, S> { + fn iter_delimited_with(&self, delimiter: tt::Delimiter) -> OpDelimitedIter<'_, S> { + OpDelimitedIter { inner: &self.0, idx: 0, delimited: delimiter } + } + fn iter_delimited(&self, span: tt::DelimSpan) -> OpDelimitedIter<'_, S> { OpDelimitedIter { inner: &self.0, idx: 0, - delimited: delimited.unwrap_or(tt::Delimiter::DUMMY_INVISIBLE), + delimited: tt::Delimiter::invisible_delim_spanned(span), } } } @@ -958,11 +963,13 @@ impl TtIter<'_, S> { self.expect_lifetime() } else { let puncts = self.expect_glued_punct()?; + let delimiter = tt::Delimiter { + open: puncts.first().unwrap().span, + close: puncts.last().unwrap().span, + kind: tt::DelimiterKind::Invisible, + }; let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); - Ok(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), - token_trees, - })) + Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter, token_trees })) } } else { self.next().ok_or(()).cloned() @@ -977,7 +984,11 @@ impl TtIter<'_, S> { let ident = self.expect_ident_or_underscore()?; Ok(tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter { + open: punct.span, + close: ident.span, + kind: tt::DelimiterKind::Invisible, + }, token_trees: vec![ tt::Leaf::Punct(*punct).into(), tt::Leaf::Ident(ident.clone()).into(), diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs index 7a3e8653c28ff..6e79cdaa0b9d0 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs @@ -59,12 +59,12 @@ impl Bindings { token_trees: token_trees.clone(), }; Ok(match f { - Fragment::Tokens(_) => unreachable!(), + Fragment::Tokens(_) | Fragment::Empty => unreachable!(), Fragment::Expr(_) => Fragment::Expr, Fragment::Path(_) => Fragment::Path, }(subtree)) } - Binding::Fragment(it @ Fragment::Tokens(_)) => Ok(it.clone()), + Binding::Fragment(it @ (Fragment::Tokens(_) | Fragment::Empty)) => Ok(it.clone()), // emit some reasonable default expansion for missing bindings, // this gives better recovery than emitting the `$fragment-name` verbatim Binding::Missing(it) => Ok({ @@ -87,10 +87,7 @@ impl Bindings { })), // FIXME: Meta and Item should get proper defaults MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => { - Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, - token_trees: vec![], - })) + Fragment::Empty } MetaVarKind::Path | MetaVarKind::Ty @@ -131,8 +128,10 @@ pub(super) fn transcribe( template: &MetaTemplate, bindings: &Bindings, marker: impl Fn(&mut S) + Copy, + new_meta_vars: bool, + call_site: S, ) -> ExpandResult> { - let mut ctx = ExpandCtx { bindings, nesting: Vec::new() }; + let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars, call_site }; let mut arena: Vec> = Vec::new(); expand_subtree(&mut ctx, template, None, &mut arena, marker) } @@ -152,6 +151,8 @@ struct NestingState { struct ExpandCtx<'a, S> { bindings: &'a Bindings, nesting: Vec, + new_meta_vars: bool, + call_site: S, } fn expand_subtree( @@ -206,13 +207,13 @@ fn expand_subtree( Op::Var { name, id, .. } => { let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker); err = err.or(e); - push_fragment(arena, fragment); + push_fragment(ctx, arena, fragment); } Op::Repeat { tokens: subtree, kind, separator } => { let ExpandResult { value: fragment, err: e } = expand_repeat(ctx, subtree, *kind, separator, arena, marker); err = err.or(e); - push_fragment(arena, fragment) + push_fragment(ctx, arena, fragment) } Op::Ignore { name, id } => { // Expand the variable, but ignore the result. This registers the repetition count. @@ -225,8 +226,20 @@ fn expand_subtree( arena.push( tt::Leaf::Literal(tt::Literal { text: index.to_string().into(), - // FIXME - span: S::DUMMY, + span: ctx.call_site, + }) + .into(), + ); + } + Op::Length { depth } => { + let length = ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |_nest| { + // FIXME: to be implemented + 0 + }); + arena.push( + tt::Leaf::Literal(tt::Literal { + text: length.to_string().into(), + span: ctx.call_site, }) .into(), ); @@ -268,7 +281,13 @@ fn expand_subtree( } } - let c = match count(ctx, binding, 0, *depth) { + let res = if ctx.new_meta_vars { + count(ctx, binding, 0, depth.unwrap_or(0)) + } else { + count_old(ctx, binding, 0, *depth) + }; + + let c = match res { Ok(c) => c, Err(e) => { // XXX: It *might* make sense to emit a dummy integer value like `0` here. @@ -285,8 +304,7 @@ fn expand_subtree( arena.push( tt::Leaf::Literal(tt::Literal { text: c.to_string().into(), - // FIXME - span: S::DUMMY, + span: ctx.call_site, }) .into(), ); @@ -297,7 +315,7 @@ fn expand_subtree( let tts = arena.drain(start_elements..).collect(); ExpandResult { value: tt::Subtree { - delimiter: delimiter.unwrap_or_else(tt::Delimiter::dummy_invisible), + delimiter: delimiter.unwrap_or_else(|| tt::Delimiter::invisible_spanned(ctx.call_site)), token_trees: tts, }, err, @@ -330,7 +348,7 @@ fn expand_var( // ``` // We just treat it a normal tokens let tt = tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + delimiter: tt::Delimiter::invisible_spanned(id), token_trees: vec![ tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) .into(), @@ -342,10 +360,8 @@ fn expand_var( } Err(e) => ExpandResult { value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan { - // FIXME - open: S::DUMMY, - // FIXME - close: S::DUMMY, + open: ctx.call_site, + close: ctx.call_site, }))), err: Some(e), }, @@ -389,7 +405,7 @@ fn expand_repeat( return ExpandResult { value: Fragment::Tokens( tt::Subtree { - delimiter: tt::Delimiter::dummy_invisible(), + delimiter: tt::Delimiter::invisible_spanned(ctx.call_site), token_trees: vec![], } .into(), @@ -403,7 +419,7 @@ fn expand_repeat( continue; } - t.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + t.delimiter.kind = tt::DelimiterKind::Invisible; push_subtree(&mut buf, t); if let Some(sep) = separator { @@ -437,7 +453,11 @@ fn expand_repeat( // Check if it is a single token subtree without any delimiter // e.g {Delimiter:None> ['>'] /Delimiter:None>} - let tt = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: buf }.into(); + let tt = tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(ctx.call_site), + token_trees: buf, + } + .into(); if RepeatKind::OneOrMore == kind && counter == 0 { return ExpandResult { @@ -448,14 +468,19 @@ fn expand_repeat( ExpandResult { value: Fragment::Tokens(tt), err } } -fn push_fragment(buf: &mut Vec>, fragment: Fragment) { +fn push_fragment( + ctx: &ExpandCtx<'_, S>, + buf: &mut Vec>, + fragment: Fragment, +) { match fragment { Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), Fragment::Expr(sub) => { push_subtree(buf, sub); } - Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt), + Fragment::Path(tt) => fix_up_and_push_path_tt(ctx, buf, tt), Fragment::Tokens(tt) => buf.push(tt), + Fragment::Empty => (), } } @@ -469,7 +494,11 @@ fn push_subtree(buf: &mut Vec>, tt: tt::Subtree) { /// Inserts the path separator `::` between an identifier and its following generic /// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why /// we need this fixup. -fn fix_up_and_push_path_tt(buf: &mut Vec>, subtree: tt::Subtree) { +fn fix_up_and_push_path_tt( + ctx: &ExpandCtx<'_, S>, + buf: &mut Vec>, + subtree: tt::Subtree, +) { stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible)); let mut prev_was_ident = false; // Note that we only need to fix up the top-level `TokenTree`s because the @@ -486,8 +515,7 @@ fn fix_up_and_push_path_tt(buf: &mut Vec>, subtree: tt tt::Leaf::Punct(tt::Punct { char: ':', spacing: tt::Spacing::Joint, - // FIXME - span: S::DUMMY, + span: ctx.call_site, }) .into(), ); @@ -495,8 +523,7 @@ fn fix_up_and_push_path_tt(buf: &mut Vec>, subtree: tt tt::Leaf::Punct(tt::Punct { char: ':', spacing: tt::Spacing::Alone, - // FIXME - span: S::DUMMY, + span: ctx.call_site, }) .into(), ); @@ -510,6 +537,25 @@ fn fix_up_and_push_path_tt(buf: &mut Vec>, subtree: tt /// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth /// defined by the metavar expression. fn count( + ctx: &ExpandCtx<'_, S>, + binding: &Binding, + depth_curr: usize, + depth_max: usize, +) -> Result { + match binding { + Binding::Nested(bs) => { + if depth_curr == depth_max { + Ok(bs.len()) + } else { + bs.iter().map(|b| count(ctx, b, depth_curr + 1, depth_max)).sum() + } + } + Binding::Empty => Ok(0), + Binding::Fragment(_) | Binding::Missing(_) => Ok(1), + } +} + +fn count_old( ctx: &ExpandCtx<'_, S>, binding: &Binding, our_depth: usize, @@ -517,9 +563,9 @@ fn count( ) -> Result { match binding { Binding::Nested(bs) => match count_depth { - None => bs.iter().map(|b| count(ctx, b, our_depth + 1, None)).sum(), + None => bs.iter().map(|b| count_old(ctx, b, our_depth + 1, None)).sum(), Some(0) => Ok(bs.len()), - Some(d) => bs.iter().map(|b| count(ctx, b, our_depth + 1, Some(d - 1))).sum(), + Some(d) => bs.iter().map(|b| count_old(ctx, b, our_depth + 1, Some(d - 1))).sum(), }, Binding::Empty => Ok(0), Binding::Fragment(_) | Binding::Missing(_) => { diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs index 9331798589fcc..2622d7eac10ee 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs @@ -16,7 +16,6 @@ mod to_parser_input; #[cfg(test)] mod benchmark; -mod token_map; use stdx::impl_from; use tt::Span; @@ -30,15 +29,12 @@ use crate::{ // FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces pub use ::parser::TopEntryPoint; -pub use tt::{Delimiter, DelimiterKind, Punct, SyntaxContext}; - -pub use crate::{ - syntax_bridge::{ - parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span, - syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node, - SpanMapper, - }, - token_map::SpanMap, +pub use tt::{Delimiter, DelimiterKind, Punct}; + +pub use crate::syntax_bridge::{ + parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span, + syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node, + SpanMapper, }; pub use crate::syntax_bridge::dummy_test_span_utils::*; @@ -151,7 +147,12 @@ impl DeclarativeMacro { } /// The old, `macro_rules! m {}` flavor. - pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { + pub fn parse_macro_rules( + tt: &tt::Subtree, + is_2021: bool, + // FIXME: Remove this once we drop support for rust 1.76 (defaults to true then) + new_meta_vars: bool, + ) -> DeclarativeMacro { // Note: this parsing can be implemented using mbe machinery itself, by // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing // manually seems easier. @@ -160,7 +161,7 @@ impl DeclarativeMacro { let mut err = None; while src.len() > 0 { - let rule = match Rule::parse(&mut src, true) { + let rule = match Rule::parse(&mut src, true, new_meta_vars) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -187,7 +188,12 @@ impl DeclarativeMacro { } /// The new, unstable `macro m {}` flavor. - pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { + pub fn parse_macro2( + tt: &tt::Subtree, + is_2021: bool, + // FIXME: Remove this once we drop support for rust 1.76 (defaults to true then) + new_meta_vars: bool, + ) -> DeclarativeMacro { let mut src = TtIter::new(tt); let mut rules = Vec::new(); let mut err = None; @@ -195,7 +201,7 @@ impl DeclarativeMacro { if tt::DelimiterKind::Brace == tt.delimiter.kind { cov_mark::hit!(parse_macro_def_rules); while src.len() > 0 { - let rule = match Rule::parse(&mut src, true) { + let rule = match Rule::parse(&mut src, true, new_meta_vars) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -214,7 +220,7 @@ impl DeclarativeMacro { } } else { cov_mark::hit!(parse_macro_def_simple); - match Rule::parse(&mut src, false) { + match Rule::parse(&mut src, false, new_meta_vars) { Ok(rule) => { if src.len() != 0 { err = Some(Box::new(ParseError::expected("remaining tokens in macro def"))); @@ -245,13 +251,19 @@ impl DeclarativeMacro { &self, tt: &tt::Subtree, marker: impl Fn(&mut S) + Copy, + new_meta_vars: bool, + call_site: S, ) -> ExpandResult> { - expander::expand_rules(&self.rules, &tt, marker, self.is_2021) + expander::expand_rules(&self.rules, &tt, marker, self.is_2021, new_meta_vars, call_site) } } impl Rule { - fn parse(src: &mut TtIter<'_, S>, expect_arrow: bool) -> Result { + fn parse( + src: &mut TtIter<'_, S>, + expect_arrow: bool, + new_meta_vars: bool, + ) -> Result { let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; if expect_arrow { src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?; @@ -260,7 +272,7 @@ impl Rule { let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; let lhs = MetaTemplate::parse_pattern(lhs)?; - let rhs = MetaTemplate::parse_template(rhs)?; + let rhs = MetaTemplate::parse_template(rhs, new_meta_vars)?; Ok(crate::Rule { lhs, rhs }) } diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs index 00ba35377a427..afdbbef231476 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/parser.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs @@ -25,23 +25,26 @@ pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>); impl MetaTemplate { pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result { - MetaTemplate::parse(pattern, Mode::Pattern) + MetaTemplate::parse(pattern, Mode::Pattern, false) } - pub(crate) fn parse_template(template: &tt::Subtree) -> Result { - MetaTemplate::parse(template, Mode::Template) + pub(crate) fn parse_template( + template: &tt::Subtree, + new_meta_vars: bool, + ) -> Result { + MetaTemplate::parse(template, Mode::Template, new_meta_vars) } pub(crate) fn iter(&self) -> impl Iterator> { self.0.iter() } - fn parse(tt: &tt::Subtree, mode: Mode) -> Result { + fn parse(tt: &tt::Subtree, mode: Mode, new_meta_vars: bool) -> Result { let mut src = TtIter::new(tt); let mut res = Vec::new(); while let Some(first) = src.peek_n(0) { - let op = next_op(first, &mut src, mode)?; + let op = next_op(first, &mut src, mode, new_meta_vars)?; res.push(op); } @@ -51,12 +54,35 @@ impl MetaTemplate { #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) enum Op { - Var { name: SmolStr, kind: Option, id: S }, - Ignore { name: SmolStr, id: S }, - Index { depth: usize }, - Count { name: SmolStr, depth: Option }, - Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option> }, - Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter }, + Var { + name: SmolStr, + kind: Option, + id: S, + }, + Ignore { + name: SmolStr, + id: S, + }, + Index { + depth: usize, + }, + Length { + depth: usize, + }, + Count { + name: SmolStr, + // FIXME: `usize`` once we drop support for 1.76 + depth: Option, + }, + Repeat { + tokens: MetaTemplate, + kind: RepeatKind, + separator: Option>, + }, + Subtree { + tokens: MetaTemplate, + delimiter: tt::Delimiter, + }, Literal(tt::Literal), Punct(SmallVec<[tt::Punct; 3]>), Ident(tt::Ident), @@ -122,6 +148,7 @@ fn next_op( first_peeked: &tt::TokenTree, src: &mut TtIter<'_, S>, mode: Mode, + new_meta_vars: bool, ) -> Result, ParseError> { let res = match first_peeked { tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => { @@ -135,14 +162,14 @@ fn next_op( tt::TokenTree::Subtree(subtree) => match subtree.delimiter.kind { tt::DelimiterKind::Parenthesis => { let (separator, kind) = parse_repeat(src)?; - let tokens = MetaTemplate::parse(subtree, mode)?; + let tokens = MetaTemplate::parse(subtree, mode, new_meta_vars)?; Op::Repeat { tokens, separator, kind } } tt::DelimiterKind::Brace => match mode { Mode::Template => { - parse_metavar_expr(&mut TtIter::new(subtree)).map_err(|()| { - ParseError::unexpected("invalid metavariable expression") - })? + parse_metavar_expr(new_meta_vars, &mut TtIter::new(subtree)).map_err( + |()| ParseError::unexpected("invalid metavariable expression"), + )? } Mode::Pattern => { return Err(ParseError::unexpected( @@ -206,7 +233,7 @@ fn next_op( tt::TokenTree::Subtree(subtree) => { src.next().expect("first token already peeked"); - let tokens = MetaTemplate::parse(subtree, mode)?; + let tokens = MetaTemplate::parse(subtree, mode, new_meta_vars)?; Op::Subtree { tokens, delimiter: subtree.delimiter } } }; @@ -287,7 +314,7 @@ fn parse_repeat( Err(ParseError::InvalidRepeat) } -fn parse_metavar_expr(src: &mut TtIter<'_, S>) -> Result, ()> { +fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, S>) -> Result, ()> { let func = src.expect_ident()?; let args = src.expect_subtree()?; @@ -299,14 +326,19 @@ fn parse_metavar_expr(src: &mut TtIter<'_, S>) -> Result, ()> { let op = match &*func.text { "ignore" => { + if new_meta_vars { + args.expect_dollar()?; + } let ident = args.expect_ident()?; Op::Ignore { name: ident.text.clone(), id: ident.span } } "index" => Op::Index { depth: parse_depth(&mut args)? }, + "length" => Op::Length { depth: parse_depth(&mut args)? }, "count" => { + if new_meta_vars { + args.expect_dollar()?; + } let ident = args.expect_ident()?; - // `${count(t)}` and `${count(t,)}` have different meanings. Not sure if this is a bug - // but that's how it's implemented in rustc as of this writing. See rust-lang/rust#111904. let depth = if try_eat_comma(&mut args) { Some(parse_depth(&mut args)?) } else { None }; Op::Count { name: ident.text.clone(), depth } } diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs index b89bfd74a6e04..8fa04ab983f06 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs @@ -1,6 +1,7 @@ //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`]. use rustc_hash::{FxHashMap, FxHashSet}; +use span::{SpanAnchor, SpanData, SpanMap}; use stdx::{never, non_empty_vec::NonEmptyVec}; use syntax::{ ast::{self, make::tokens::doc_comment}, @@ -10,10 +11,10 @@ use syntax::{ }; use tt::{ buffer::{Cursor, TokenBuffer}, - Span, SpanData, SyntaxContext, + Span, }; -use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, SpanMap}; +use crate::{to_parser_input::to_parser_input, tt_iter::TtIter}; #[cfg(test)] mod tests; @@ -36,66 +37,70 @@ impl> SpanMapper for &SM { /// Dummy things for testing where spans don't matter. pub(crate) mod dummy_test_span_utils { + use super::*; - pub type DummyTestSpanData = tt::SpanData; - pub const DUMMY: DummyTestSpanData = DummyTestSpanData::DUMMY; + pub type DummyTestSpanData = span::SpanData; + pub const DUMMY: DummyTestSpanData = span::SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor: span::SpanAnchor { + file_id: span::FileId::BOGUS, + ast_id: span::ROOT_ERASED_FILE_AST_ID, + }, + ctx: DummyTestSyntaxContext, + }; - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] - pub struct DummyTestSpanAnchor; - impl tt::SpanAnchor for DummyTestSpanAnchor { - const DUMMY: Self = DummyTestSpanAnchor; - } #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct DummyTestSyntaxContext; - impl SyntaxContext for DummyTestSyntaxContext { - const DUMMY: Self = DummyTestSyntaxContext; - } pub struct DummyTestSpanMap; - impl SpanMapper> for DummyTestSpanMap { - fn span_for( - &self, - range: syntax::TextRange, - ) -> tt::SpanData { - tt::SpanData { range, anchor: DummyTestSpanAnchor, ctx: DummyTestSyntaxContext } + impl SpanMapper> for DummyTestSpanMap { + fn span_for(&self, range: syntax::TextRange) -> span::SpanData { + span::SpanData { + range, + anchor: span::SpanAnchor { + file_id: span::FileId::BOGUS, + ast_id: span::ROOT_ERASED_FILE_AST_ID, + }, + ctx: DummyTestSyntaxContext, + } } } } /// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the /// subtree's spans. -pub fn syntax_node_to_token_tree( +pub fn syntax_node_to_token_tree( node: &SyntaxNode, map: SpanMap, -) -> tt::Subtree> + span: SpanData, +) -> tt::Subtree> where - SpanData: Span, - Anchor: Copy, - Ctx: SyntaxContext, - SpanMap: SpanMapper>, + SpanData: Span, + Ctx: Copy, + SpanMap: SpanMapper>, { - let mut c = Converter::new(node, map, Default::default(), Default::default()); + let mut c = Converter::new(node, map, Default::default(), Default::default(), span); convert_tokens(&mut c) } /// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the /// subtree's spans. Additionally using the append and remove parameters, the additional tokens can /// be injected or hidden from the output. -pub fn syntax_node_to_token_tree_modified( +pub fn syntax_node_to_token_tree_modified( node: &SyntaxNode, map: SpanMap, - append: FxHashMap>>>, + append: FxHashMap>>>, remove: FxHashSet, -) -> tt::Subtree> + call_site: SpanData, +) -> tt::Subtree> where - SpanMap: SpanMapper>, - SpanData: Span, - Anchor: Copy, - Ctx: SyntaxContext, + SpanMap: SpanMapper>, + SpanData: Span, + Ctx: Copy, { - let mut c = Converter::new(node, map, append, remove); + let mut c = Converter::new(node, map, append, remove, call_site); convert_tokens(&mut c) } @@ -113,14 +118,13 @@ where /// Converts a [`tt::Subtree`] back to a [`SyntaxNode`]. /// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans. -pub fn token_tree_to_syntax_node( - tt: &tt::Subtree>, +pub fn token_tree_to_syntax_node( + tt: &tt::Subtree>, entry_point: parser::TopEntryPoint, -) -> (Parse, SpanMap>) +) -> (Parse, SpanMap>) where - SpanData: Span, - Anchor: Copy, - Ctx: SyntaxContext, + SpanData: Span, + Ctx: Copy, { let buffer = match tt { tt::Subtree { @@ -150,21 +154,20 @@ where /// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided /// anchor with the given context. -pub fn parse_to_token_tree( - anchor: Anchor, +pub fn parse_to_token_tree( + anchor: SpanAnchor, ctx: Ctx, text: &str, -) -> Option>> +) -> Option>> where - SpanData: Span, - Anchor: Copy, - Ctx: SyntaxContext, + SpanData: Span, + Ctx: Copy, { let lexed = parser::LexedStr::new(text); if lexed.errors().next().is_some() { return None; } - let mut conv = RawConverter { lexed, pos: 0, anchor, ctx }; + let mut conv = RawConverter { lexed, anchor, pos: 0, ctx }; Some(convert_tokens(&mut conv)) } @@ -182,7 +185,11 @@ where } /// Split token tree with separate expr: $($e:expr)SEP* -pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec> { +pub fn parse_exprs_with_sep( + tt: &tt::Subtree, + sep: char, + span: S, +) -> Vec> { if tt.token_trees.is_empty() { return Vec::new(); } @@ -195,7 +202,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec break, - Some(tt) => tt.subtree_or_wrap(), + Some(tt) => tt.subtree_or_wrap(tt::DelimSpan { open: span, close: span }), }); let mut fork = iter.clone(); @@ -207,7 +214,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec, S: Span, { - let entry = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }; + let entry = tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(conv.call_site()), + token_trees: vec![], + }; let mut stack = NonEmptyVec::new(entry); while let Some((token, abs_range)) = conv.bump() { @@ -401,9 +411,20 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr { text = &text[0..text.len() - 2]; } - // Quote the string + let mut num_of_hashes = 0; + let mut count = 0; + for ch in text.chars() { + count = match ch { + '"' => 1, + '#' if count > 0 => count + 1, + _ => 0, + }; + num_of_hashes = num_of_hashes.max(count); + } + + // Quote raw string with delimiters // Note that `tt::Literal` expect an escaped string - let text = format!("\"{}\"", text.escape_debug()); + let text = format!("r{delim}\"{text}\"{delim}", delim = "#".repeat(num_of_hashes)); text.into() } @@ -450,10 +471,10 @@ fn convert_doc_comment( } /// A raw token (straight from lexer) converter -struct RawConverter<'a, Anchor, Ctx> { +struct RawConverter<'a, Ctx> { lexed: parser::LexedStr<'a>, pos: usize, - anchor: Anchor, + anchor: SpanAnchor, ctx: Ctx, } /// A raw token (straight from lexer) converter that gives every token the same span. @@ -485,18 +506,20 @@ trait TokenConverter: Sized { fn peek(&self) -> Option; fn span_for(&self, range: TextRange) -> S; + + fn call_site(&self) -> S; } -impl SrcToken, S> for usize { - fn kind(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SyntaxKind { +impl SrcToken, S> for usize { + fn kind(&self, ctx: &RawConverter<'_, Ctx>) -> SyntaxKind { ctx.lexed.kind(*self) } - fn to_char(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> Option { + fn to_char(&self, ctx: &RawConverter<'_, Ctx>) -> Option { ctx.lexed.text(*self).chars().next() } - fn to_text(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SmolStr { + fn to_text(&self, ctx: &RawConverter<'_, Ctx>) -> SmolStr { ctx.lexed.text(*self).into() } } @@ -515,18 +538,17 @@ impl SrcToken, S> for usize { } } -impl TokenConverter> - for RawConverter<'_, Anchor, Ctx> +impl TokenConverter> for RawConverter<'_, Ctx> where - SpanData: Span, + SpanData: Span, { type Token = usize; fn convert_doc_comment( &self, &token: &usize, - span: SpanData, - ) -> Option>>> { + span: SpanData, + ) -> Option>>> { let text = self.lexed.text(token); convert_doc_comment(&doc_comment(text), span) } @@ -550,9 +572,13 @@ where Some(self.pos) } - fn span_for(&self, range: TextRange) -> SpanData { + fn span_for(&self, range: TextRange) -> SpanData { SpanData { range, anchor: self.anchor, ctx: self.ctx } } + + fn call_site(&self) -> SpanData { + SpanData { range: TextRange::empty(0.into()), anchor: self.anchor, ctx: self.ctx } + } } impl TokenConverter for StaticRawConverter<'_, S> @@ -588,6 +614,10 @@ where fn span_for(&self, _: TextRange) -> S { self.span } + + fn call_site(&self) -> S { + self.span + } } struct Converter { @@ -600,6 +630,7 @@ struct Converter { map: SpanMap, append: FxHashMap>>, remove: FxHashSet, + call_site: S, } impl Converter { @@ -608,6 +639,7 @@ impl Converter { map: SpanMap, append: FxHashMap>>, remove: FxHashSet, + call_site: S, ) -> Self { let mut this = Converter { current: None, @@ -617,6 +649,7 @@ impl Converter { map, append, remove, + call_site, current_leafs: vec![], }; let first = this.next_token(); @@ -776,24 +809,27 @@ where fn span_for(&self, range: TextRange) -> S { self.map.span_for(range) } + fn call_site(&self) -> S { + self.call_site + } } -struct TtTreeSink<'a, Anchor, Ctx> +struct TtTreeSink<'a, Ctx> where - SpanData: Span, + SpanData: Span, { buf: String, - cursor: Cursor<'a, SpanData>, + cursor: Cursor<'a, SpanData>, text_pos: TextSize, inner: SyntaxTreeBuilder, - token_map: SpanMap>, + token_map: SpanMap>, } -impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx> +impl<'a, Ctx> TtTreeSink<'a, Ctx> where - SpanData: Span, + SpanData: Span, { - fn new(cursor: Cursor<'a, SpanData>) -> Self { + fn new(cursor: Cursor<'a, SpanData>) -> Self { TtTreeSink { buf: String::new(), cursor, @@ -803,7 +839,7 @@ where } } - fn finish(mut self) -> (Parse, SpanMap>) { + fn finish(mut self) -> (Parse, SpanMap>) { self.token_map.finish(); (self.inner.finish(), self.token_map) } @@ -821,9 +857,9 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { Some(&texts[idx..texts.len() - (1 - idx)]) } -impl TtTreeSink<'_, Anchor, Ctx> +impl TtTreeSink<'_, Ctx> where - SpanData: Span, + SpanData: Span, { /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs index bd8187a148a5e..e5569138dbf2f 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs @@ -7,11 +7,11 @@ use tt::{ Leaf, Punct, Spacing, }; -use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap}; +use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap, DUMMY}; fn check_punct_spacing(fixture: &str) { let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); - let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap); + let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY); let mut annotations: HashMap<_, _> = extract_annotations(fixture) .into_iter() .map(|(range, annotation)| { diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs index 40e8a2385f461..71513ef439175 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs @@ -51,6 +51,13 @@ impl<'a, S: Span> TtIter<'a, S> { } } + pub(crate) fn expect_dollar(&mut self) -> Result<(), ()> { + match self.expect_leaf()? { + tt::Leaf::Punct(tt::Punct { char: '$', .. }) => Ok(()), + _ => Err(()), + } + } + pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> { match self.expect_leaf()? { tt::Leaf::Ident(it) if it.text != "_" => Ok(it), @@ -169,10 +176,10 @@ impl<'a, S: Span> TtIter<'a, S> { } self.inner = self.inner.as_slice()[res.len()..].iter(); - let res = match res.len() { - 0 | 1 => res.pop(), - _ => Some(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::DUMMY_INVISIBLE, + let res = match &*res { + [] | [_] => res.pop(), + [first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(first.first_span()), token_trees: res, })), }; diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml index efb326323f915..0c63484634be9 100644 --- a/src/tools/rust-analyzer/crates/parser/Cargo.toml +++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml @@ -25,3 +25,6 @@ sourcegen.workspace = true [features] in-rust-tree = ["rustc-dependencies/in-rust-tree"] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/paths/Cargo.toml b/src/tools/rust-analyzer/crates/paths/Cargo.toml index 28b54be5212f4..3d8752b5a829d 100644 --- a/src/tools/rust-analyzer/crates/paths/Cargo.toml +++ b/src/tools/rust-analyzer/crates/paths/Cargo.toml @@ -16,3 +16,6 @@ doctest = false # serde-derive crate. Even though we don't activate the derive feature here, # someone else in the crate graph certainly does! # serde.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml index 2cbbc9489a294..49a0979f4f5c6 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml @@ -33,7 +33,11 @@ tt.workspace = true stdx.workspace = true profile.workspace = true text-size.workspace = true +span.workspace = true # Ideally this crate would not depend on salsa things, but we need span information here which wraps # InternIds for the syntax context base-db.workspace = true la-arena.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs index f697ecd3518f1..a87becd63e288 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs @@ -11,16 +11,19 @@ pub mod msg; mod process; mod version; -use base_db::span::SpanData; use indexmap::IndexSet; use paths::AbsPathBuf; +use span::Span; use std::{fmt, io, sync::Mutex}; use triomphe::Arc; use serde::{Deserialize, Serialize}; use crate::{ - msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS}, + msg::{ + deserialize_span_data_index_map, flat::serialize_span_data_index_map, ExpandMacro, + ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS, RUST_ANALYZER_SPAN_SUPPORT, + }, process::ProcMacroProcessSrv, }; @@ -136,13 +139,13 @@ impl ProcMacro { pub fn expand( &self, - subtree: &tt::Subtree, - attr: Option<&tt::Subtree>, + subtree: &tt::Subtree, + attr: Option<&tt::Subtree>, env: Vec<(String, String)>, - def_site: SpanData, - call_site: SpanData, - mixed_site: SpanData, - ) -> Result, PanicMessage>, ServerError> { + def_site: Span, + call_site: Span, + mixed_site: Span, + ) -> Result, PanicMessage>, ServerError> { let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version(); let current_dir = env .iter() @@ -166,6 +169,11 @@ impl ProcMacro { call_site, mixed_site, }, + span_data_table: if version >= RUST_ANALYZER_SPAN_SUPPORT { + serialize_span_data_index_map(&span_data_table) + } else { + Vec::new() + }, }; let response = self @@ -178,9 +186,14 @@ impl ProcMacro { msg::Response::ExpandMacro(it) => { Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table))) } - msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => { - Err(ServerError { message: "unexpected response".to_string(), io: None }) - } + msg::Response::ExpandMacroExtended(it) => Ok(it.map(|resp| { + FlatTree::to_subtree_resolved( + resp.tree, + version, + &deserialize_span_data_index_map(&resp.span_data_table), + ) + })), + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), } } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs index 1d3e45aff385e..557ddba5c78fe 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs @@ -10,28 +10,63 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize}; use crate::ProcMacroKind; -pub use crate::msg::flat::{FlatTree, TokenId}; +pub use crate::msg::flat::{ + deserialize_span_data_index_map, serialize_span_data_index_map, FlatTree, SpanDataIndexMap, + TokenId, +}; // The versions of the server protocol pub const NO_VERSION_CHECK_VERSION: u32 = 0; pub const VERSION_CHECK_VERSION: u32 = 1; pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; pub const HAS_GLOBAL_SPANS: u32 = 3; +pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4; -pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS; +pub const CURRENT_API_VERSION: u32 = RUST_ANALYZER_SPAN_SUPPORT; #[derive(Debug, Serialize, Deserialize)] pub enum Request { + /// Since [`NO_VERSION_CHECK_VERSION`] ListMacros { dylib_path: PathBuf }, + /// Since [`NO_VERSION_CHECK_VERSION`] ExpandMacro(ExpandMacro), + /// Since [`VERSION_CHECK_VERSION`] ApiVersionCheck {}, + /// Since [`RUST_ANALYZER_SPAN_SUPPORT`] + SetConfig(ServerConfig), +} + +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)] +pub enum SpanMode { + #[default] + Id, + RustAnalyzer, } #[derive(Debug, Serialize, Deserialize)] pub enum Response { + /// Since [`NO_VERSION_CHECK_VERSION`] ListMacros(Result, String>), + /// Since [`NO_VERSION_CHECK_VERSION`] ExpandMacro(Result), + /// Since [`NO_VERSION_CHECK_VERSION`] ApiVersionCheck(u32), + /// Since [`RUST_ANALYZER_SPAN_SUPPORT`] + SetConfig(ServerConfig), + /// Since [`RUST_ANALYZER_SPAN_SUPPORT`] + ExpandMacroExtended(Result), +} + +#[derive(Debug, Serialize, Deserialize, Default)] +#[serde(default)] +pub struct ServerConfig { + pub span_mode: SpanMode, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ExpandMacroExtended { + pub tree: FlatTree, + pub span_data_table: Vec, } #[derive(Debug, Serialize, Deserialize)] @@ -64,9 +99,12 @@ pub struct ExpandMacro { #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")] #[serde(default)] pub has_global_spans: ExpnGlobals, + #[serde(skip_serializing_if = "Vec::is_empty")] + #[serde(default)] + pub span_data_table: Vec, } -#[derive(Default, Debug, Serialize, Deserialize)] +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)] pub struct ExpnGlobals { #[serde(skip_serializing)] #[serde(default)] @@ -136,29 +174,27 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { #[cfg(test)] mod tests { - use base_db::{ - span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId}, - FileId, - }; + use base_db::FileId; use la_arena::RawIdx; + use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId}; use text_size::{TextRange, TextSize}; use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree}; use super::*; - fn fixture_token_tree() -> Subtree { + fn fixture_token_tree() -> Subtree { let anchor = SpanAnchor { file_id: FileId::from_raw(0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)), }; let mut subtree = Subtree { delimiter: Delimiter { - open: SpanData { + open: Span { range: TextRange::empty(TextSize::new(0)), anchor, ctx: SyntaxContextId::ROOT, }, - close: SpanData { + close: Span { range: TextRange::empty(TextSize::new(13)), anchor, ctx: SyntaxContextId::ROOT, @@ -170,7 +206,7 @@ mod tests { subtree.token_trees.push(TokenTree::Leaf( Ident { text: "struct".into(), - span: SpanData { + span: Span { range: TextRange::at(TextSize::new(0), TextSize::of("struct")), anchor, ctx: SyntaxContextId::ROOT, @@ -181,7 +217,7 @@ mod tests { subtree.token_trees.push(TokenTree::Leaf( Ident { text: "Foo".into(), - span: SpanData { + span: Span { range: TextRange::at(TextSize::new(5), TextSize::of("Foo")), anchor, ctx: SyntaxContextId::ROOT, @@ -192,7 +228,7 @@ mod tests { subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal { text: "Foo".into(), - span: SpanData { + span: Span { range: TextRange::at(TextSize::new(8), TextSize::of("Foo")), anchor, ctx: SyntaxContextId::ROOT, @@ -200,7 +236,7 @@ mod tests { }))); subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct { char: '@', - span: SpanData { + span: Span { range: TextRange::at(TextSize::new(11), TextSize::of('@')), anchor, ctx: SyntaxContextId::ROOT, @@ -209,12 +245,12 @@ mod tests { }))); subtree.token_trees.push(TokenTree::Subtree(Subtree { delimiter: Delimiter { - open: SpanData { + open: Span { range: TextRange::at(TextSize::new(12), TextSize::of('{')), anchor, ctx: SyntaxContextId::ROOT, }, - close: SpanData { + close: Span { range: TextRange::at(TextSize::new(13), TextSize::of('}')), anchor, ctx: SyntaxContextId::ROOT, @@ -243,6 +279,7 @@ mod tests { call_site: 0, mixed_site: 0, }, + span_data_table: Vec::new(), }; let json = serde_json::to_string(&task).unwrap(); diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs index 5835718628e55..8dfaba52625d0 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs @@ -37,13 +37,46 @@ use std::collections::{HashMap, VecDeque}; -use base_db::span::SpanData; use indexmap::IndexSet; +use la_arena::RawIdx; use serde::{Deserialize, Serialize}; +use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; +use text_size::TextRange; use crate::msg::ENCODE_CLOSE_SPAN_VERSION; -type SpanDataIndexMap = IndexSet; +pub type SpanDataIndexMap = IndexSet; + +pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec { + map.iter() + .flat_map(|span| { + [ + span.anchor.file_id.index(), + span.anchor.ast_id.into_raw().into_u32(), + span.range.start().into(), + span.range.end().into(), + span.ctx.into_u32(), + ] + }) + .collect() +} + +pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap { + debug_assert!(map.len() % 5 == 0); + map.chunks_exact(5) + .map(|span| { + let &[file_id, ast_id, start, end, e] = span else { unreachable!() }; + Span { + anchor: SpanAnchor { + file_id: FileId::from_raw(file_id), + ast_id: ErasedFileAstId::from_raw(RawIdx::from_u32(ast_id)), + }, + range: TextRange::new(start.into(), end.into()), + ctx: SyntaxContextId::from_u32(e), + } + }) + .collect() +} #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct TokenId(pub u32); @@ -54,9 +87,7 @@ impl std::fmt::Debug for TokenId { } } -impl tt::Span for TokenId { - const DUMMY: Self = TokenId(!0); -} +impl tt::Span for TokenId {} #[derive(Serialize, Deserialize, Debug)] pub struct FlatTree { @@ -93,7 +124,7 @@ struct IdentRepr { impl FlatTree { pub fn new( - subtree: &tt::Subtree, + subtree: &tt::Subtree, version: u32, span_data_table: &mut SpanDataIndexMap, ) -> FlatTree { @@ -158,7 +189,7 @@ impl FlatTree { self, version: u32, span_data_table: &SpanDataIndexMap, - ) -> tt::Subtree { + ) -> tt::Subtree { Reader { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { read_vec(self.subtree, SubtreeRepr::read_with_close_span) @@ -281,13 +312,13 @@ impl IdentRepr { } } -trait Span: Copy { +trait InternableSpan: Copy { type Table; fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId; fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self; } -impl Span for TokenId { +impl InternableSpan for TokenId { type Table = (); fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId { token_id @@ -297,8 +328,8 @@ impl Span for TokenId { id } } -impl Span for SpanData { - type Table = IndexSet; +impl InternableSpan for Span { + type Table = IndexSet; fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId { TokenId(table.insert_full(span).0 as u32) } @@ -307,7 +338,7 @@ impl Span for SpanData { } } -struct Writer<'a, 'span, S: Span> { +struct Writer<'a, 'span, S: InternableSpan> { work: VecDeque<(usize, &'a tt::Subtree)>, string_table: HashMap<&'a str, u32>, span_data_table: &'span mut S::Table, @@ -320,7 +351,7 @@ struct Writer<'a, 'span, S: Span> { text: Vec, } -impl<'a, 'span, S: Span> Writer<'a, 'span, S> { +impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> { fn write(&mut self, root: &'a tt::Subtree) { self.enqueue(root); while let Some((idx, subtree)) = self.work.pop_front() { @@ -393,7 +424,7 @@ impl<'a, 'span, S: Span> Writer<'a, 'span, S> { } } -struct Reader<'span, S: Span> { +struct Reader<'span, S: InternableSpan> { subtree: Vec, literal: Vec, punct: Vec, @@ -403,7 +434,7 @@ struct Reader<'span, S: Span> { span_data_table: &'span S::Table, } -impl<'span, S: Span> Reader<'span, S> { +impl<'span, S: InternableSpan> Reader<'span, S> { pub(crate) fn read(self) -> tt::Subtree { let mut res: Vec>> = vec![None; self.subtree.len()]; let read_span = |id| S::span_for_token_id(self.span_data_table, id); diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs index 9a20fa63ed700..3494164c0675c 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs @@ -9,7 +9,7 @@ use paths::{AbsPath, AbsPathBuf}; use stdx::JodChild; use crate::{ - msg::{Message, Request, Response, CURRENT_API_VERSION}, + msg::{Message, Request, Response, SpanMode, CURRENT_API_VERSION, RUST_ANALYZER_SPAN_SUPPORT}, ProcMacroKind, ServerError, }; @@ -19,6 +19,7 @@ pub(crate) struct ProcMacroProcessSrv { stdin: ChildStdin, stdout: BufReader, version: u32, + mode: SpanMode, } impl ProcMacroProcessSrv { @@ -27,7 +28,13 @@ impl ProcMacroProcessSrv { let mut process = Process::run(process_path.clone(), null_stderr)?; let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); - io::Result::Ok(ProcMacroProcessSrv { _process: process, stdin, stdout, version: 0 }) + io::Result::Ok(ProcMacroProcessSrv { + _process: process, + stdin, + stdout, + version: 0, + mode: SpanMode::Id, + }) }; let mut srv = create_srv(true)?; tracing::info!("sending version check"); @@ -43,6 +50,11 @@ impl ProcMacroProcessSrv { tracing::info!("got version {v}"); srv = create_srv(false)?; srv.version = v; + if srv.version > RUST_ANALYZER_SPAN_SUPPORT { + if let Ok(mode) = srv.enable_rust_analyzer_spans() { + srv.mode = mode; + } + } Ok(srv) } Err(e) => { @@ -62,9 +74,19 @@ impl ProcMacroProcessSrv { match response { Response::ApiVersionCheck(version) => Ok(version), - Response::ExpandMacro { .. } | Response::ListMacros { .. } => { - Err(ServerError { message: "unexpected response".to_string(), io: None }) - } + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), + } + } + + fn enable_rust_analyzer_spans(&mut self) -> Result { + let request = Request::SetConfig(crate::msg::ServerConfig { + span_mode: crate::msg::SpanMode::RustAnalyzer, + }); + let response = self.send_task(request)?; + + match response { + Response::SetConfig(crate::msg::ServerConfig { span_mode }) => Ok(span_mode), + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), } } @@ -78,9 +100,7 @@ impl ProcMacroProcessSrv { match response { Response::ListMacros(it) => Ok(it), - Response::ExpandMacro { .. } | Response::ApiVersionCheck { .. } => { - Err(ServerError { message: "unexpected response".to_string(), io: None }) - } + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml index 8f03c6ec7b572..a559ba0175565 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml @@ -14,7 +14,12 @@ proc-macro-api.workspace = true [features] sysroot-abi = ["proc-macro-srv/sysroot-abi"] +in-rust-tree = ["proc-macro-srv/in-rust-tree", "sysroot-abi"] + [[bin]] name = "rust-analyzer-proc-macro-srv" path = "src/main.rs" + +[lints] +workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs index 50ce586fc429b..87f7555b02ce4 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs @@ -1,5 +1,9 @@ //! A standalone binary for `proc-macro-srv`. //! Driver for proc macro server +#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] +#[cfg(feature = "in-rust-tree")] +extern crate rustc_driver as _; + use std::io; fn main() -> std::io::Result<()> { @@ -39,10 +43,22 @@ fn run() -> io::Result<()> { msg::Request::ListMacros { dylib_path } => { msg::Response::ListMacros(srv.list_macros(&dylib_path)) } - msg::Request::ExpandMacro(task) => msg::Response::ExpandMacro(srv.expand(task)), + msg::Request::ExpandMacro(task) => match srv.span_mode() { + msg::SpanMode::Id => msg::Response::ExpandMacro(srv.expand(task).map(|(it, _)| it)), + msg::SpanMode::RustAnalyzer => msg::Response::ExpandMacroExtended( + srv.expand(task).map(|(tree, span_data_table)| msg::ExpandMacroExtended { + tree, + span_data_table, + }), + ), + }, msg::Request::ApiVersionCheck {} => { msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION) } + msg::Request::SetConfig(config) => { + srv.set_span_mode(config.span_mode); + msg::Response::SetConfig(config) + } }; write_response(res)? } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml index 99993f16e2767..9c4375559c11b 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml @@ -26,13 +26,19 @@ stdx.workspace = true tt.workspace = true mbe.workspace = true paths.workspace = true +base-db.workspace = true +span.workspace = true proc-macro-api.workspace = true [dev-dependencies] expect-test = "1.4.0" # used as proc macro test targets -proc-macro-test.workspace = true +proc-macro-test.path = "./proc-macro-test" [features] -sysroot-abi = [] +sysroot-abi = ["proc-macro-test/sysroot-abi"] +in-rust-tree = ["mbe/in-rust-tree", "sysroot-abi"] + +[lints] +workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/Cargo.toml new file mode 100644 index 0000000000000..55be6bc23bbba --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "proc-macro-test" +version = "0.0.0" +publish = false + +edition = "2021" +license = "MIT OR Apache-2.0" + +[lib] +doctest = false + +[build-dependencies] +cargo_metadata = "0.18.1" + +# local deps +toolchain = { path = "../../toolchain", version = "0.0.0" } + +[features] +sysroot-abi = [] diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs similarity index 97% rename from src/tools/rust-analyzer/crates/proc-macro-test/build.rs rename to src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs index 7827157865a99..7299147686df7 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-test/build.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/build.rs @@ -70,6 +70,9 @@ fn main() { // instance to use the same target directory. .arg("--target-dir") .arg(&target_dir); + if cfg!(feature = "sysroot-abi") { + cmd.args(["--features", "sysroot-abi"]); + } if let Ok(target) = std::env::var("TARGET") { cmd.args(["--target", &target]); diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/imp/.gitignore b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/.gitignore similarity index 100% rename from src/tools/rust-analyzer/crates/proc-macro-test/imp/.gitignore rename to src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/.gitignore diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml similarity index 91% rename from src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml rename to src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml index 2a36737cef058..dc94fcd61a4f7 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/Cargo.toml @@ -9,8 +9,11 @@ publish = false doctest = false proc-macro = true -[workspace] - [dependencies] # this crate should not have any dependencies, since it uses its own workspace, # and its own `Cargo.lock` + +[features] +sysroot-abi = [] + +[workspace] diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs similarity index 79% rename from src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs rename to src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs index 32510fba2f8ca..b8aad4acefcf0 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs @@ -1,6 +1,10 @@ //! Exports a few trivial procedural macros for testing. +#![allow(unexpected_cfgs)] +#![cfg(feature = "sysroot-abi")] +#![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![warn(rust_2018_idioms, unused_lifetimes)] +#![feature(proc_macro_span, proc_macro_def_site)] use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree}; @@ -49,6 +53,29 @@ pub fn fn_like_mk_idents(_args: TokenStream) -> TokenStream { TokenStream::from_iter(trees) } +#[proc_macro] +pub fn fn_like_span_join(args: TokenStream) -> TokenStream { + let args = &mut args.into_iter(); + let first = args.next().unwrap(); + let second = args.next().unwrap(); + TokenStream::from(TokenTree::from(Ident::new_raw( + "joined", + first.span().join(second.span()).unwrap(), + ))) +} + +#[proc_macro] +pub fn fn_like_span_ops(args: TokenStream) -> TokenStream { + let args = &mut args.into_iter(); + let mut first = args.next().unwrap(); + first.set_span(Span::def_site()); + let mut second = args.next().unwrap(); + second.set_span(second.span().resolved_at(Span::def_site())); + let mut third = args.next().unwrap(); + third.set_span(third.span().start()); + TokenStream::from_iter(vec![first, second, third]) +} + #[proc_macro_attribute] pub fn attr_noop(_args: TokenStream, item: TokenStream) -> TokenStream { item diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/src/lib.rs similarity index 100% rename from src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs rename to src/tools/rust-analyzer/crates/proc-macro-srv/proc-macro-test/src/lib.rs diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs index f20e6832f6e98..52b4cced5f584 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs @@ -11,7 +11,10 @@ use libloading::Library; use memmap2::Mmap; use object::Object; use paths::AbsPath; -use proc_macro_api::{msg::TokenId, read_dylib_info, ProcMacroKind}; +use proc_macro::bridge; +use proc_macro_api::{read_dylib_info, ProcMacroKind}; + +use crate::ProcMacroSrvSpan; const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; @@ -147,15 +150,18 @@ impl Expander { Ok(Expander { inner: library }) } - pub fn expand( + pub fn expand( &self, macro_name: &str, - macro_body: &crate::tt::Subtree, - attributes: Option<&crate::tt::Subtree>, - def_site: TokenId, - call_site: TokenId, - mixed_site: TokenId, - ) -> Result { + macro_body: tt::Subtree, + attributes: Option>, + def_site: S, + call_site: S, + mixed_site: S, + ) -> Result, String> + where + ::TokenStream: Default, + { let result = self .inner .proc_macros diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs index 56529f71d855e..f1575a5b0bd83 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs @@ -11,11 +11,12 @@ //! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)… #![cfg(any(feature = "sysroot-abi", rust_analyzer))] -#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] +#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span, rustc_private)] #![warn(rust_2018_idioms, unused_lifetimes)] #![allow(unreachable_pub, internal_features)] extern crate proc_macro; +extern crate rustc_driver as _; mod dylib; mod server; @@ -32,36 +33,67 @@ use std::{ }; use proc_macro_api::{ - msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION}, + msg::{ + self, deserialize_span_data_index_map, serialize_span_data_index_map, ExpnGlobals, + SpanMode, TokenId, CURRENT_API_VERSION, + }, ProcMacroKind, }; +use span::Span; -mod tt { - pub use proc_macro_api::msg::TokenId; +use crate::server::TokenStream; - pub use ::tt::*; +// see `build.rs` +include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); - pub type Subtree = ::tt::Subtree; - pub type TokenTree = ::tt::TokenTree; - pub type Delimiter = ::tt::Delimiter; - pub type Leaf = ::tt::Leaf; - pub type Literal = ::tt::Literal; - pub type Punct = ::tt::Punct; - pub type Ident = ::tt::Ident; +trait ProcMacroSrvSpan: tt::Span { + type Server: proc_macro::bridge::server::Server>; + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server; } -// see `build.rs` -include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); +impl ProcMacroSrvSpan for TokenId { + type Server = server::token_id::TokenIdServer; + + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { + Self::Server { interner: &server::SYMBOL_INTERNER, call_site, def_site, mixed_site } + } +} +impl ProcMacroSrvSpan for Span { + type Server = server::rust_analyzer_span::RaSpanServer; + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { + Self::Server { + interner: &server::SYMBOL_INTERNER, + call_site, + def_site, + mixed_site, + tracked_env_vars: Default::default(), + tracked_paths: Default::default(), + } + } +} #[derive(Default)] pub struct ProcMacroSrv { expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>, + span_mode: SpanMode, } const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024; impl ProcMacroSrv { - pub fn expand(&mut self, task: msg::ExpandMacro) -> Result { + pub fn set_span_mode(&mut self, span_mode: SpanMode) { + self.span_mode = span_mode; + } + + pub fn span_mode(&self) -> SpanMode { + self.span_mode + } + + pub fn expand( + &mut self, + task: msg::ExpandMacro, + ) -> Result<(msg::FlatTree, Vec), msg::PanicMessage> { + let span_mode = self.span_mode; let expander = self.expander(task.lib.as_ref()).map_err(|err| { debug_assert!(false, "should list macros before asking to expand"); msg::PanicMessage(format!("failed to load macro: {err}")) @@ -71,10 +103,10 @@ impl ProcMacroSrv { for (k, v) in &task.env { env::set_var(k, v); } - let prev_working_dir = match task.current_dir { + let prev_working_dir = match &task.current_dir { Some(dir) => { let prev_working_dir = std::env::current_dir().ok(); - if let Err(err) = std::env::set_current_dir(&dir) { + if let Err(err) = std::env::set_current_dir(dir) { eprintln!("Failed to set the current working dir to {dir}. Error: {err:?}") } prev_working_dir @@ -83,38 +115,15 @@ impl ProcMacroSrv { }; let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans; - let def_site = TokenId(def_site as u32); - let call_site = TokenId(call_site as u32); - let mixed_site = TokenId(mixed_site as u32); - - let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION); - let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); - let result = thread::scope(|s| { - let thread = thread::Builder::new() - .stack_size(EXPANDER_STACK_SIZE) - .name(task.macro_name.clone()) - .spawn_scoped(s, || { - expander - .expand( - &task.macro_name, - ¯o_body, - attributes.as_ref(), - def_site, - call_site, - mixed_site, - ) - .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION)) - }); - let res = match thread { - Ok(handle) => handle.join(), - Err(e) => std::panic::resume_unwind(Box::new(e)), - }; - - match res { - Ok(res) => res, - Err(e) => std::panic::resume_unwind(e), + + let result = match span_mode { + SpanMode::Id => { + expand_id(task, expander, def_site, call_site, mixed_site).map(|it| (it, vec![])) } - }); + SpanMode::RustAnalyzer => { + expand_ra_span(task, expander, def_site, call_site, mixed_site) + } + }; prev_env.rollback(); @@ -155,6 +164,98 @@ impl ProcMacroSrv { } } +fn expand_id( + task: msg::ExpandMacro, + expander: &dylib::Expander, + def_site: usize, + call_site: usize, + mixed_site: usize, +) -> Result { + let def_site = TokenId(def_site as u32); + let call_site = TokenId(call_site as u32); + let mixed_site = TokenId(mixed_site as u32); + + let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION); + let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); + let result = thread::scope(|s| { + let thread = thread::Builder::new() + .stack_size(EXPANDER_STACK_SIZE) + .name(task.macro_name.clone()) + .spawn_scoped(s, || { + expander + .expand( + &task.macro_name, + macro_body, + attributes, + def_site, + call_site, + mixed_site, + ) + .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION)) + }); + let res = match thread { + Ok(handle) => handle.join(), + Err(e) => std::panic::resume_unwind(Box::new(e)), + }; + + match res { + Ok(res) => res, + Err(e) => std::panic::resume_unwind(e), + } + }); + result +} + +fn expand_ra_span( + task: msg::ExpandMacro, + expander: &dylib::Expander, + def_site: usize, + call_site: usize, + mixed_site: usize, +) -> Result<(msg::FlatTree, Vec), String> { + let mut span_data_table = deserialize_span_data_index_map(&task.span_data_table); + + let def_site = span_data_table[def_site]; + let call_site = span_data_table[call_site]; + let mixed_site = span_data_table[mixed_site]; + + let macro_body = task.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table); + let attributes = + task.attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); + let result = thread::scope(|s| { + let thread = thread::Builder::new() + .stack_size(EXPANDER_STACK_SIZE) + .name(task.macro_name.clone()) + .spawn_scoped(s, || { + expander + .expand( + &task.macro_name, + macro_body, + attributes, + def_site, + call_site, + mixed_site, + ) + .map(|it| { + ( + msg::FlatTree::new(&it, CURRENT_API_VERSION, &mut span_data_table), + serialize_span_data_index_map(&span_data_table), + ) + }) + }); + let res = match thread { + Ok(handle) => handle.join(), + Err(e) => std::panic::resume_unwind(Box::new(e)), + }; + + match res { + Ok(res) => res, + Err(e) => std::panic::resume_unwind(e), + } + }); + result +} + pub struct PanicMessage { message: Option, } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs index 716b85d096d07..3fe968c81ca12 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs @@ -2,9 +2,9 @@ use libloading::Library; use proc_macro::bridge; -use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo}; +use proc_macro_api::{ProcMacroKind, RustCInfo}; -use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt}; +use crate::{dylib::LoadProcMacroDylibError, ProcMacroSrvSpan}; pub(crate) struct ProcMacros { exported_macros: Vec, @@ -40,19 +40,19 @@ impl ProcMacros { Err(LoadProcMacroDylibError::AbiMismatch(info.version_string)) } - pub(crate) fn expand( + pub(crate) fn expand( &self, macro_name: &str, - macro_body: &tt::Subtree, - attributes: Option<&tt::Subtree>, - def_site: TokenId, - call_site: TokenId, - mixed_site: TokenId, - ) -> Result { - let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone()); + macro_body: tt::Subtree, + attributes: Option>, + def_site: S, + call_site: S, + mixed_site: S, + ) -> Result, crate::PanicMessage> { + let parsed_body = crate::server::TokenStream::with_subtree(macro_body); - let parsed_attributes = attributes.map_or(crate::server::TokenStream::new(), |attr| { - crate::server::TokenStream::with_subtree(attr.clone()) + let parsed_attributes = attributes.map_or_else(crate::server::TokenStream::new, |attr| { + crate::server::TokenStream::with_subtree(attr) }); for proc_macro in &self.exported_macros { @@ -62,12 +62,7 @@ impl ProcMacros { { let res = client.run( &bridge::server::SameThread, - crate::server::RustAnalyzer { - interner: &SYMBOL_INTERNER, - call_site, - def_site, - mixed_site, - }, + S::make_server(call_site, def_site, mixed_site), parsed_body, false, ); @@ -78,12 +73,7 @@ impl ProcMacros { bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => { let res = client.run( &bridge::server::SameThread, - crate::server::RustAnalyzer { - interner: &SYMBOL_INTERNER, - call_site, - def_site, - mixed_site, - }, + S::make_server(call_site, def_site, mixed_site), parsed_body, false, ); @@ -94,13 +84,7 @@ impl ProcMacros { bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => { let res = client.run( &bridge::server::SameThread, - crate::server::RustAnalyzer { - interner: &SYMBOL_INTERNER, - - call_site, - def_site, - mixed_site, - }, + S::make_server(call_site, def_site, mixed_site), parsed_attributes, parsed_body, false, @@ -113,7 +97,7 @@ impl ProcMacros { } } - Err(bridge::PanicMessage::String("Nothing to expand".to_string()).into()) + Err(bridge::PanicMessage::String(format!("proc-macro `{macro_name}` is missing")).into()) } pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs index 917d8a6e26af3..1854322ddb5c3 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs @@ -8,226 +8,18 @@ //! //! FIXME: No span and source file information is implemented yet -use proc_macro::bridge::{self, server}; +use proc_macro::bridge; mod token_stream; -use proc_macro_api::msg::TokenId; pub use token_stream::TokenStream; -use token_stream::TokenStreamBuilder; +pub mod token_id; +pub mod rust_analyzer_span; mod symbol; pub use symbol::*; +use tt::Spacing; -use std::{ - iter, - ops::{Bound, Range}, -}; - -use crate::tt; - -type Group = tt::Subtree; -type TokenTree = tt::TokenTree; -#[allow(unused)] -type Punct = tt::Punct; -type Spacing = tt::Spacing; -#[allow(unused)] -type Literal = tt::Literal; -type Span = tt::TokenId; - -#[derive(Clone)] -pub struct SourceFile { - // FIXME stub -} - -pub struct FreeFunctions; - -pub struct RustAnalyzer { - // FIXME: store span information here. - pub(crate) interner: SymbolInternerRef, - pub call_site: TokenId, - pub def_site: TokenId, - pub mixed_site: TokenId, -} - -impl server::Types for RustAnalyzer { - type FreeFunctions = FreeFunctions; - type TokenStream = TokenStream; - type SourceFile = SourceFile; - type Span = Span; - type Symbol = Symbol; -} - -impl server::FreeFunctions for RustAnalyzer { - fn injected_env_var(&mut self, _var: &str) -> Option { - None - } - - fn track_env_var(&mut self, _var: &str, _value: Option<&str>) { - // FIXME: track env var accesses - // https://github.com/rust-lang/rust/pull/71858 - } - fn track_path(&mut self, _path: &str) {} - - fn literal_from_str( - &mut self, - s: &str, - ) -> Result, ()> { - // FIXME: keep track of LitKind and Suffix - Ok(bridge::Literal { - kind: bridge::LitKind::Err, - symbol: Symbol::intern(self.interner, s), - suffix: None, - span: self.call_site, - }) - } - - fn emit_diagnostic(&mut self, _: bridge::Diagnostic) { - // FIXME handle diagnostic - } -} - -impl server::TokenStream for RustAnalyzer { - fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { - stream.is_empty() - } - fn from_str(&mut self, src: &str) -> Self::TokenStream { - Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") - } - fn to_string(&mut self, stream: &Self::TokenStream) -> String { - stream.to_string() - } - fn from_token_tree( - &mut self, - tree: bridge::TokenTree, - ) -> Self::TokenStream { - match tree { - bridge::TokenTree::Group(group) => { - let group = Group { - delimiter: delim_to_internal(group.delimiter, group.span), - token_trees: match group.stream { - Some(stream) => stream.into_iter().collect(), - None => Vec::new(), - }, - }; - let tree = TokenTree::from(group); - Self::TokenStream::from_iter(iter::once(tree)) - } - - bridge::TokenTree::Ident(ident) => { - let text = ident.sym.text(self.interner); - let text = - if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; - let ident: tt::Ident = tt::Ident { text, span: ident.span }; - let leaf = tt::Leaf::from(ident); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(iter::once(tree)) - } - - bridge::TokenTree::Literal(literal) => { - let literal = LiteralFormatter(literal); - let text = literal.with_stringify_parts(self.interner, |parts| { - ::tt::SmolStr::from_iter(parts.iter().copied()) - }); - - let literal = tt::Literal { text, span: literal.0.span }; - let leaf = tt::Leaf::from(literal); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(iter::once(tree)) - } - - bridge::TokenTree::Punct(p) => { - let punct = tt::Punct { - char: p.ch as char, - spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, - span: p.span, - }; - let leaf = tt::Leaf::from(punct); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(iter::once(tree)) - } - } - } - - fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result { - Ok(self_.clone()) - } - - fn concat_trees( - &mut self, - base: Option, - trees: Vec>, - ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for tree in trees { - builder.push(self.from_token_tree(tree)); - } - builder.build() - } - - fn concat_streams( - &mut self, - base: Option, - streams: Vec, - ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for stream in streams { - builder.push(stream); - } - builder.build() - } - - fn into_trees( - &mut self, - stream: Self::TokenStream, - ) -> Vec> { - stream - .into_iter() - .map(|tree| match tree { - tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { - bridge::TokenTree::Ident(bridge::Ident { - sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")), - is_raw: ident.text.starts_with("r#"), - span: ident.span, - }) - } - tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { - bridge::TokenTree::Literal(bridge::Literal { - // FIXME: handle literal kinds - kind: bridge::LitKind::Err, - symbol: Symbol::intern(self.interner, &lit.text), - // FIXME: handle suffixes - suffix: None, - span: lit.span, - }) - } - tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { - bridge::TokenTree::Punct(bridge::Punct { - ch: punct.char as u8, - joint: punct.spacing == Spacing::Joint, - span: punct.span, - }) - } - tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { - delimiter: delim_to_external(subtree.delimiter), - stream: if subtree.token_trees.is_empty() { - None - } else { - Some(subtree.token_trees.into_iter().collect()) - }, - span: bridge::DelimSpan::from_single(subtree.delimiter.open), - }), - }) - .collect() - } -} - -fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan) -> tt::Delimiter { +fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan) -> tt::Delimiter { let kind = match d { proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace, @@ -237,7 +29,7 @@ fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan) -> tt::Delimiter { open: span.open, close: span.close, kind } } -fn delim_to_external(d: tt::Delimiter) -> proc_macro::Delimiter { +fn delim_to_external(d: tt::Delimiter) -> proc_macro::Delimiter { match d.kind { tt::DelimiterKind::Parenthesis => proc_macro::Delimiter::Parenthesis, tt::DelimiterKind::Brace => proc_macro::Delimiter::Brace, @@ -262,121 +54,9 @@ fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing { } } -impl server::SourceFile for RustAnalyzer { - // FIXME these are all stubs - fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { - true - } - fn path(&mut self, _file: &Self::SourceFile) -> String { - String::new() - } - fn is_real(&mut self, _file: &Self::SourceFile) -> bool { - true - } -} - -impl server::Span for RustAnalyzer { - fn debug(&mut self, span: Self::Span) -> String { - format!("{:?}", span.0) - } - fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { - SourceFile {} - } - fn save_span(&mut self, _span: Self::Span) -> usize { - // FIXME stub - 0 - } - fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { - // FIXME stub - self.call_site - } - /// Recent feature, not yet in the proc_macro - /// - /// See PR: - /// https://github.com/rust-lang/rust/pull/55780 - fn source_text(&mut self, _span: Self::Span) -> Option { - None - } - - fn parent(&mut self, _span: Self::Span) -> Option { - // FIXME handle span - None - } - fn source(&mut self, span: Self::Span) -> Self::Span { - // FIXME handle span - span - } - fn byte_range(&mut self, _span: Self::Span) -> Range { - // FIXME handle span - Range { start: 0, end: 0 } - } - fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option { - // Just return the first span again, because some macros will unwrap the result. - Some(first) - } - fn subspan( - &mut self, - span: Self::Span, - _start: Bound, - _end: Bound, - ) -> Option { - // Just return the span again, because some macros will unwrap the result. - Some(span) - } - fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { - // FIXME handle span - self.call_site - } - - fn end(&mut self, _self_: Self::Span) -> Self::Span { - self.call_site - } - - fn start(&mut self, _self_: Self::Span) -> Self::Span { - self.call_site - } - - fn line(&mut self, _span: Self::Span) -> usize { - // FIXME handle line - 0 - } - - fn column(&mut self, _span: Self::Span) -> usize { - // FIXME handle column - 0 - } -} - -impl server::Symbol for RustAnalyzer { - fn normalize_and_validate_ident(&mut self, string: &str) -> Result { - // FIXME: nfc-normalize and validate idents - Ok(::intern_symbol(string)) - } -} - -impl server::Server for RustAnalyzer { - fn globals(&mut self) -> bridge::ExpnGlobals { - bridge::ExpnGlobals { - def_site: self.def_site, - call_site: self.call_site, - mixed_site: self.mixed_site, - } - } - - fn intern_symbol(ident: &str) -> Self::Symbol { - // FIXME: should be `self.interner` once the proc-macro api allows it. - Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) - } - - fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { - // FIXME: should be `self.interner` once the proc-macro api allows it. - f(symbol.text(&SYMBOL_INTERNER).as_str()) - } -} - -struct LiteralFormatter(bridge::Literal); +struct LiteralFormatter(bridge::Literal); -impl LiteralFormatter { +impl LiteralFormatter { /// Invokes the callback with a `&[&str]` consisting of each part of the /// literal's representation. This is done to allow the `ToString` and /// `Display` implementations to borrow references to symbol values, and @@ -427,66 +107,3 @@ impl LiteralFormatter { f(symbol.as_str(), suffix.as_str()) } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_ra_server_to_string() { - let s = TokenStream { - token_trees: vec![ - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "struct".into(), - span: tt::TokenId(0), - })), - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "T".into(), - span: tt::TokenId(0), - })), - tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId(0), - close: tt::TokenId(0), - kind: tt::DelimiterKind::Brace, - }, - token_trees: vec![], - }), - ], - }; - - assert_eq!(s.to_string(), "struct T {}"); - } - - #[test] - fn test_ra_server_from_str() { - let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId(0), - close: tt::TokenId(0), - kind: tt::DelimiterKind::Parenthesis, - }, - token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "a".into(), - span: tt::TokenId(0), - }))], - }); - - let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap(); - assert_eq!(t1.token_trees.len(), 1); - assert_eq!(t1.token_trees[0], subtree_paren_a); - - let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap(); - assert_eq!(t2.token_trees.len(), 2); - assert_eq!(t2.token_trees[0], subtree_paren_a); - - let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap(); - assert_eq!( - underscore.token_trees[0], - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "_".into(), - span: tt::TokenId(0), - })) - ); - } -} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/rust_analyzer_span.rs new file mode 100644 index 0000000000000..bcf3600d27366 --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -0,0 +1,411 @@ +//! proc-macro server backend based on rust-analyzer's internal span represention +//! This backend is used solely by rust-analyzer as it ties into rust-analyzer internals. +//! +//! It is an unfortunate result of how the proc-macro API works that we need to look into the +//! concrete representation of the spans, and as such, RustRover cannot make use of this unless they +//! change their representation to be compatible with rust-analyzer's. +use std::{ + collections::{HashMap, HashSet}, + iter, + ops::{Bound, Range}, +}; + +use ::tt::{TextRange, TextSize}; +use proc_macro::bridge::{self, server}; +use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; + +use crate::server::{ + delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, + Symbol, SymbolInternerRef, SYMBOL_INTERNER, +}; +mod tt { + pub use ::tt::*; + + pub type Subtree = ::tt::Subtree; + pub type TokenTree = ::tt::TokenTree; + pub type Leaf = ::tt::Leaf; + pub type Literal = ::tt::Literal; + pub type Punct = ::tt::Punct; + pub type Ident = ::tt::Ident; +} + +type TokenStream = crate::server::TokenStream; + +#[derive(Clone)] +pub struct SourceFile; +pub struct FreeFunctions; + +pub struct RaSpanServer { + pub(crate) interner: SymbolInternerRef, + // FIXME: Report this back to the caller to track as dependencies + pub tracked_env_vars: HashMap, Option>>, + // FIXME: Report this back to the caller to track as dependencies + pub tracked_paths: HashSet>, + pub call_site: Span, + pub def_site: Span, + pub mixed_site: Span, +} + +impl server::Types for RaSpanServer { + type FreeFunctions = FreeFunctions; + type TokenStream = TokenStream; + type SourceFile = SourceFile; + type Span = Span; + type Symbol = Symbol; +} + +impl server::FreeFunctions for RaSpanServer { + fn injected_env_var(&mut self, _: &str) -> Option { + None + } + + fn track_env_var(&mut self, var: &str, value: Option<&str>) { + self.tracked_env_vars.insert(var.into(), value.map(Into::into)); + } + fn track_path(&mut self, path: &str) { + self.tracked_paths.insert(path.into()); + } + + fn literal_from_str( + &mut self, + s: &str, + ) -> Result, ()> { + // FIXME: keep track of LitKind and Suffix + Ok(bridge::Literal { + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, s), + suffix: None, + span: self.call_site, + }) + } + + fn emit_diagnostic(&mut self, _: bridge::Diagnostic) { + // FIXME handle diagnostic + } +} + +impl server::TokenStream for RaSpanServer { + fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { + stream.is_empty() + } + fn from_str(&mut self, src: &str) -> Self::TokenStream { + Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") + } + fn to_string(&mut self, stream: &Self::TokenStream) -> String { + stream.to_string() + } + fn from_token_tree( + &mut self, + tree: bridge::TokenTree, + ) -> Self::TokenStream { + match tree { + bridge::TokenTree::Group(group) => { + let group = tt::Subtree { + delimiter: delim_to_internal(group.delimiter, group.span), + token_trees: match group.stream { + Some(stream) => stream.into_iter().collect(), + None => Vec::new(), + }, + }; + let tree = tt::TokenTree::from(group); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Ident(ident) => { + let text = ident.sym.text(self.interner); + let text = + if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; + let ident: tt::Ident = tt::Ident { text, span: ident.span }; + let leaf = tt::Leaf::from(ident); + let tree = tt::TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Literal(literal) => { + let literal = LiteralFormatter(literal); + let text = literal.with_stringify_parts(self.interner, |parts| { + ::tt::SmolStr::from_iter(parts.iter().copied()) + }); + + let literal = tt::Literal { text, span: literal.0.span }; + let leaf: tt::Leaf = tt::Leaf::from(literal); + let tree = tt::TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Punct(p) => { + let punct = tt::Punct { + char: p.ch as char, + spacing: if p.joint { tt::Spacing::Joint } else { tt::Spacing::Alone }, + span: p.span, + }; + let leaf = tt::Leaf::from(punct); + let tree = tt::TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + } + } + + fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result { + // FIXME: requires db, more importantly this requires name resolution so we would need to + // eagerly expand this proc-macro, but we can't know that this proc-macro is eager until we + // expand it ... + // This calls for some kind of marker that a proc-macro wants to access this eager API, + // otherwise we need to treat every proc-macro eagerly / or not support this. + Ok(self_.clone()) + } + + fn concat_trees( + &mut self, + base: Option, + trees: Vec>, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for tree in trees { + builder.push(self.from_token_tree(tree)); + } + builder.build() + } + + fn concat_streams( + &mut self, + base: Option, + streams: Vec, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for stream in streams { + builder.push(stream); + } + builder.build() + } + + fn into_trees( + &mut self, + stream: Self::TokenStream, + ) -> Vec> { + stream + .into_iter() + .map(|tree| match tree { + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { + bridge::TokenTree::Ident(bridge::Ident { + sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")), + is_raw: ident.text.starts_with("r#"), + span: ident.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + bridge::TokenTree::Literal(bridge::Literal { + // FIXME: handle literal kinds + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, &lit.text), + // FIXME: handle suffixes + suffix: None, + span: lit.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { + bridge::TokenTree::Punct(bridge::Punct { + ch: punct.char as u8, + joint: punct.spacing == tt::Spacing::Joint, + span: punct.span, + }) + } + tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { + delimiter: delim_to_external(subtree.delimiter), + stream: if subtree.token_trees.is_empty() { + None + } else { + Some(subtree.token_trees.into_iter().collect()) + }, + span: bridge::DelimSpan::from_single(subtree.delimiter.open), + }), + }) + .collect() + } +} + +impl server::SourceFile for RaSpanServer { + // FIXME these are all stubs + fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { + true + } + fn path(&mut self, _file: &Self::SourceFile) -> String { + String::new() + } + fn is_real(&mut self, _file: &Self::SourceFile) -> bool { + true + } +} + +impl server::Span for RaSpanServer { + fn debug(&mut self, span: Self::Span) -> String { + format!("{:?}", span) + } + fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { + // FIXME stub, requires db + SourceFile {} + } + fn save_span(&mut self, _span: Self::Span) -> usize { + // FIXME stub, requires builtin quote! implementation + 0 + } + fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { + // FIXME stub, requires builtin quote! implementation + self.call_site + } + /// Recent feature, not yet in the proc_macro + /// + /// See PR: + /// https://github.com/rust-lang/rust/pull/55780 + fn source_text(&mut self, _span: Self::Span) -> Option { + // FIXME requires db, needs special handling wrt fixup spans + None + } + + fn parent(&mut self, _span: Self::Span) -> Option { + // FIXME requires db, looks up the parent call site + None + } + fn source(&mut self, span: Self::Span) -> Self::Span { + // FIXME requires db, returns the top level call site + span + } + fn byte_range(&mut self, span: Self::Span) -> Range { + // FIXME requires db to resolve the ast id, THIS IS NOT INCREMENTAL + Range { start: span.range.start().into(), end: span.range.end().into() } + } + fn join(&mut self, first: Self::Span, second: Self::Span) -> Option { + // We can't modify the span range for fixup spans, those are meaningful to fixup, so just + // prefer the non-fixup span. + if first.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER { + return Some(second); + } + if second.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER { + return Some(first); + } + // FIXME: Once we can talk back to the client, implement a "long join" request for anchors + // that differ in [AstId]s as joining those spans requires resolving the AstIds. + if first.anchor != second.anchor { + return None; + } + // Differing context, we can't merge these so prefer the one that's root + if first.ctx != second.ctx { + if first.ctx.is_root() { + return Some(second); + } else if second.ctx.is_root() { + return Some(first); + } + } + Some(Span { + range: first.range.cover(second.range), + anchor: second.anchor, + ctx: second.ctx, + }) + } + fn subspan( + &mut self, + span: Self::Span, + start: Bound, + end: Bound, + ) -> Option { + // We can't modify the span range for fixup spans, those are meaningful to fixup. + if span.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER { + return Some(span); + } + let length = span.range.len().into(); + + let start: u32 = match start { + Bound::Included(lo) => lo, + Bound::Excluded(lo) => lo.checked_add(1)?, + Bound::Unbounded => 0, + } + .try_into() + .ok()?; + + let end: u32 = match end { + Bound::Included(hi) => hi.checked_add(1)?, + Bound::Excluded(hi) => hi, + Bound::Unbounded => span.range.len().into(), + } + .try_into() + .ok()?; + + // Bounds check the values, preventing addition overflow and OOB spans. + let span_start = span.range.start().into(); + if (u32::MAX - start) < span_start + || (u32::MAX - end) < span_start + || start >= end + || end > length + { + return None; + } + + Some(Span { + range: TextRange::new(TextSize::from(start), TextSize::from(end)) + span.range.start(), + ..span + }) + } + + fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span { + Span { ctx: at.ctx, ..span } + } + + fn end(&mut self, span: Self::Span) -> Self::Span { + // We can't modify the span range for fixup spans, those are meaningful to fixup. + if span.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER { + return span; + } + Span { range: TextRange::empty(span.range.end()), ..span } + } + + fn start(&mut self, span: Self::Span) -> Self::Span { + // We can't modify the span range for fixup spans, those are meaningful to fixup. + if span.anchor.ast_id == FIXUP_ERASED_FILE_AST_ID_MARKER { + return span; + } + Span { range: TextRange::empty(span.range.start()), ..span } + } + + fn line(&mut self, _span: Self::Span) -> usize { + // FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL + 0 + } + + fn column(&mut self, _span: Self::Span) -> usize { + // FIXME requires db to resolve line index, THIS IS NOT INCREMENTAL + 0 + } +} + +impl server::Symbol for RaSpanServer { + fn normalize_and_validate_ident(&mut self, string: &str) -> Result { + // FIXME: nfc-normalize and validate idents + Ok(::intern_symbol(string)) + } +} + +impl server::Server for RaSpanServer { + fn globals(&mut self) -> bridge::ExpnGlobals { + bridge::ExpnGlobals { + def_site: self.def_site, + call_site: self.call_site, + mixed_site: self.mixed_site, + } + } + + fn intern_symbol(ident: &str) -> Self::Symbol { + // FIXME: should be `self.interner` once the proc-macro api allows it. + Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) + } + + fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { + // FIXME: should be `self.interner` once the proc-macro api allows it. + f(symbol.text(&SYMBOL_INTERNER).as_str()) + } +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_id.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_id.rs new file mode 100644 index 0000000000000..12526ad4f3ae6 --- /dev/null +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_id.rs @@ -0,0 +1,380 @@ +//! proc-macro server backend based on [`proc_macro_api::msg::TokenId`] as the backing span. +//! This backend is rather inflexible, used by RustRover and older rust-analyzer versions. +use std::{ + iter, + ops::{Bound, Range}, +}; + +use proc_macro::bridge::{self, server}; + +use crate::server::{ + delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, + Symbol, SymbolInternerRef, SYMBOL_INTERNER, +}; +mod tt { + pub use proc_macro_api::msg::TokenId; + + pub use ::tt::*; + + pub type Subtree = ::tt::Subtree; + pub type TokenTree = ::tt::TokenTree; + pub type Leaf = ::tt::Leaf; + pub type Literal = ::tt::Literal; + pub type Punct = ::tt::Punct; + pub type Ident = ::tt::Ident; +} +type Group = tt::Subtree; +type TokenTree = tt::TokenTree; +#[allow(unused)] +type Punct = tt::Punct; +type Spacing = tt::Spacing; +#[allow(unused)] +type Literal = tt::Literal; +type Span = tt::TokenId; +type TokenStream = crate::server::TokenStream; + +#[derive(Clone)] +pub struct SourceFile; +pub struct FreeFunctions; + +pub struct TokenIdServer { + pub(crate) interner: SymbolInternerRef, + pub call_site: Span, + pub def_site: Span, + pub mixed_site: Span, +} + +impl server::Types for TokenIdServer { + type FreeFunctions = FreeFunctions; + type TokenStream = TokenStream; + type SourceFile = SourceFile; + type Span = Span; + type Symbol = Symbol; +} + +impl server::FreeFunctions for TokenIdServer { + fn injected_env_var(&mut self, _: &str) -> Option { + None + } + fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {} + fn track_path(&mut self, _path: &str) {} + fn literal_from_str( + &mut self, + s: &str, + ) -> Result, ()> { + // FIXME: keep track of LitKind and Suffix + Ok(bridge::Literal { + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, s), + suffix: None, + span: self.call_site, + }) + } + + fn emit_diagnostic(&mut self, _: bridge::Diagnostic) {} +} + +impl server::TokenStream for TokenIdServer { + fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { + stream.is_empty() + } + fn from_str(&mut self, src: &str) -> Self::TokenStream { + Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") + } + fn to_string(&mut self, stream: &Self::TokenStream) -> String { + stream.to_string() + } + fn from_token_tree( + &mut self, + tree: bridge::TokenTree, + ) -> Self::TokenStream { + match tree { + bridge::TokenTree::Group(group) => { + let group = Group { + delimiter: delim_to_internal(group.delimiter, group.span), + token_trees: match group.stream { + Some(stream) => stream.into_iter().collect(), + None => Vec::new(), + }, + }; + let tree = TokenTree::from(group); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Ident(ident) => { + let text = ident.sym.text(self.interner); + let text = + if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; + let ident: tt::Ident = tt::Ident { text, span: ident.span }; + let leaf = tt::Leaf::from(ident); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Literal(literal) => { + let literal = LiteralFormatter(literal); + let text = literal.with_stringify_parts(self.interner, |parts| { + ::tt::SmolStr::from_iter(parts.iter().copied()) + }); + + let literal = tt::Literal { text, span: literal.0.span }; + let leaf = tt::Leaf::from(literal); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Punct(p) => { + let punct = tt::Punct { + char: p.ch as char, + spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, + span: p.span, + }; + let leaf = tt::Leaf::from(punct); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + } + } + + fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result { + Ok(self_.clone()) + } + + fn concat_trees( + &mut self, + base: Option, + trees: Vec>, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for tree in trees { + builder.push(self.from_token_tree(tree)); + } + builder.build() + } + + fn concat_streams( + &mut self, + base: Option, + streams: Vec, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for stream in streams { + builder.push(stream); + } + builder.build() + } + + fn into_trees( + &mut self, + stream: Self::TokenStream, + ) -> Vec> { + stream + .into_iter() + .map(|tree| match tree { + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { + bridge::TokenTree::Ident(bridge::Ident { + sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")), + is_raw: ident.text.starts_with("r#"), + span: ident.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + bridge::TokenTree::Literal(bridge::Literal { + // FIXME: handle literal kinds + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, &lit.text), + // FIXME: handle suffixes + suffix: None, + span: lit.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { + bridge::TokenTree::Punct(bridge::Punct { + ch: punct.char as u8, + joint: punct.spacing == Spacing::Joint, + span: punct.span, + }) + } + tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { + delimiter: delim_to_external(subtree.delimiter), + stream: if subtree.token_trees.is_empty() { + None + } else { + Some(subtree.token_trees.into_iter().collect()) + }, + span: bridge::DelimSpan::from_single(subtree.delimiter.open), + }), + }) + .collect() + } +} + +impl server::SourceFile for TokenIdServer { + fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { + true + } + fn path(&mut self, _file: &Self::SourceFile) -> String { + String::new() + } + fn is_real(&mut self, _file: &Self::SourceFile) -> bool { + true + } +} + +impl server::Span for TokenIdServer { + fn debug(&mut self, span: Self::Span) -> String { + format!("{:?}", span.0) + } + fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { + SourceFile {} + } + fn save_span(&mut self, _span: Self::Span) -> usize { + 0 + } + fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { + self.call_site + } + /// Recent feature, not yet in the proc_macro + /// + /// See PR: + /// https://github.com/rust-lang/rust/pull/55780 + fn source_text(&mut self, _span: Self::Span) -> Option { + None + } + + fn parent(&mut self, _span: Self::Span) -> Option { + None + } + fn source(&mut self, span: Self::Span) -> Self::Span { + span + } + fn byte_range(&mut self, _span: Self::Span) -> Range { + Range { start: 0, end: 0 } + } + fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option { + // Just return the first span again, because some macros will unwrap the result. + Some(first) + } + fn subspan( + &mut self, + span: Self::Span, + _start: Bound, + _end: Bound, + ) -> Option { + // Just return the span again, because some macros will unwrap the result. + Some(span) + } + fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { + self.call_site + } + + fn end(&mut self, _self_: Self::Span) -> Self::Span { + self.call_site + } + + fn start(&mut self, _self_: Self::Span) -> Self::Span { + self.call_site + } + + fn line(&mut self, _span: Self::Span) -> usize { + 0 + } + + fn column(&mut self, _span: Self::Span) -> usize { + 0 + } +} + +impl server::Symbol for TokenIdServer { + fn normalize_and_validate_ident(&mut self, string: &str) -> Result { + // FIXME: nfc-normalize and validate idents + Ok(::intern_symbol(string)) + } +} + +impl server::Server for TokenIdServer { + fn globals(&mut self) -> bridge::ExpnGlobals { + bridge::ExpnGlobals { + def_site: self.def_site, + call_site: self.call_site, + mixed_site: self.mixed_site, + } + } + + fn intern_symbol(ident: &str) -> Self::Symbol { + Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) + } + + fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { + f(symbol.text(&SYMBOL_INTERNER).as_str()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_ra_server_to_string() { + let s = TokenStream { + token_trees: vec![ + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "struct".into(), + span: tt::TokenId(0), + })), + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "T".into(), + span: tt::TokenId(0), + })), + tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter { + open: tt::TokenId(0), + close: tt::TokenId(0), + kind: tt::DelimiterKind::Brace, + }, + token_trees: vec![], + }), + ], + }; + + assert_eq!(s.to_string(), "struct T {}"); + } + + #[test] + fn test_ra_server_from_str() { + let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter { + open: tt::TokenId(0), + close: tt::TokenId(0), + kind: tt::DelimiterKind::Parenthesis, + }, + token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "a".into(), + span: tt::TokenId(0), + }))], + }); + + let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap(); + assert_eq!(t1.token_trees.len(), 1); + assert_eq!(t1.token_trees[0], subtree_paren_a); + + let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap(); + assert_eq!(t2.token_trees.len(), 2); + assert_eq!(t2.token_trees[0], subtree_paren_a); + + let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap(); + assert_eq!( + underscore.token_trees[0], + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "_".into(), + span: tt::TokenId(0), + })) + ); + } +} diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs index 36be88250388d..8f669a30494ba 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs @@ -1,20 +1,24 @@ //! TokenStream implementation used by sysroot ABI -use proc_macro_api::msg::TokenId; +use tt::TokenTree; -use crate::tt::{self, TokenTree}; +#[derive(Debug, Clone)] +pub struct TokenStream { + pub(super) token_trees: Vec>, +} -#[derive(Debug, Default, Clone)] -pub struct TokenStream { - pub(super) token_trees: Vec, +impl Default for TokenStream { + fn default() -> Self { + Self { token_trees: vec![] } + } } -impl TokenStream { +impl TokenStream { pub(crate) fn new() -> Self { - TokenStream::default() + TokenStream { token_trees: vec![] } } - pub(crate) fn with_subtree(subtree: tt::Subtree) -> Self { + pub(crate) fn with_subtree(subtree: tt::Subtree) -> Self { if subtree.delimiter.kind != tt::DelimiterKind::Invisible { TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] } } else { @@ -22,7 +26,10 @@ impl TokenStream { } } - pub(crate) fn into_subtree(self, call_site: TokenId) -> tt::Subtree { + pub(crate) fn into_subtree(self, call_site: S) -> tt::Subtree + where + S: Copy, + { tt::Subtree { delimiter: tt::Delimiter { open: call_site, @@ -39,37 +46,37 @@ impl TokenStream { } /// Creates a token stream containing a single token tree. -impl From for TokenStream { - fn from(tree: TokenTree) -> TokenStream { +impl From> for TokenStream { + fn from(tree: TokenTree) -> TokenStream { TokenStream { token_trees: vec![tree] } } } /// Collects a number of token trees into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(trees: I) -> Self { +impl FromIterator> for TokenStream { + fn from_iter>>(trees: I) -> Self { trees.into_iter().map(TokenStream::from).collect() } } /// A "flattening" operation on token streams, collects token trees /// from multiple token streams into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { +impl FromIterator> for TokenStream { + fn from_iter>>(streams: I) -> Self { let mut builder = TokenStreamBuilder::new(); streams.into_iter().for_each(|stream| builder.push(stream)); builder.build() } } -impl Extend for TokenStream { - fn extend>(&mut self, trees: I) { +impl Extend> for TokenStream { + fn extend>>(&mut self, trees: I) { self.extend(trees.into_iter().map(TokenStream::from)); } } -impl Extend for TokenStream { - fn extend>(&mut self, streams: I) { +impl Extend> for TokenStream { + fn extend>>(&mut self, streams: I) { for item in streams { for tkn in item { match tkn { @@ -87,22 +94,21 @@ impl Extend for TokenStream { } } -pub(super) struct TokenStreamBuilder { - acc: TokenStream, +pub(super) struct TokenStreamBuilder { + acc: TokenStream, } /// pub(super)lic implementation details for the `TokenStream` type, such as iterators. pub(super) mod token_stream { - use proc_macro_api::msg::TokenId; - use super::{tt, TokenStream, TokenTree}; + use super::{TokenStream, TokenTree}; /// An iterator over `TokenStream`'s `TokenTree`s. /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, /// and returns whole groups as token trees. - impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = std::vec::IntoIter; + impl IntoIterator for TokenStream { + type Item = TokenTree; + type IntoIter = std::vec::IntoIter>; fn into_iter(self) -> Self::IntoIter { self.token_trees.into_iter() @@ -119,71 +125,34 @@ pub(super) mod token_stream { /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to /// change these errors into `LexError`s later. #[rustfmt::skip] - impl /*FromStr for*/ TokenStream { + impl /*FromStr for*/ TokenStream { // type Err = LexError; - pub(crate) fn from_str(src: &str, call_site: TokenId) -> Result { + pub(crate) fn from_str(src: &str, call_site: S) -> Result, LexError> { let subtree = mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?; - let subtree = subtree_replace_token_ids_with_call_site(subtree,call_site); Ok(TokenStream::with_subtree(subtree)) } } - impl ToString for TokenStream { + impl ToString for TokenStream { fn to_string(&self) -> String { ::tt::pretty(&self.token_trees) } } - - fn subtree_replace_token_ids_with_call_site( - subtree: tt::Subtree, - call_site: TokenId, - ) -> tt::Subtree { - tt::Subtree { - delimiter: tt::Delimiter { open: call_site, close: call_site, ..subtree.delimiter }, - token_trees: subtree - .token_trees - .into_iter() - .map(|it| token_tree_replace_token_ids_with_call_site(it, call_site)) - .collect(), - } - } - - fn token_tree_replace_token_ids_with_call_site( - tt: tt::TokenTree, - call_site: TokenId, - ) -> tt::TokenTree { - match tt { - tt::TokenTree::Leaf(leaf) => { - tt::TokenTree::Leaf(leaf_replace_token_ids_with_call_site(leaf, call_site)) - } - tt::TokenTree::Subtree(subtree) => { - tt::TokenTree::Subtree(subtree_replace_token_ids_with_call_site(subtree, call_site)) - } - } - } - - fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> tt::Leaf { - match leaf { - tt::Leaf::Literal(lit) => tt::Leaf::Literal(tt::Literal { span: call_site, ..lit }), - tt::Leaf::Punct(punct) => tt::Leaf::Punct(tt::Punct { span: call_site, ..punct }), - tt::Leaf::Ident(ident) => tt::Leaf::Ident(tt::Ident { span: call_site, ..ident }), - } - } } -impl TokenStreamBuilder { - pub(super) fn new() -> TokenStreamBuilder { +impl TokenStreamBuilder { + pub(super) fn new() -> TokenStreamBuilder { TokenStreamBuilder { acc: TokenStream::new() } } - pub(super) fn push(&mut self, stream: TokenStream) { + pub(super) fn push(&mut self, stream: TokenStream) { self.acc.extend(stream.into_iter()) } - pub(super) fn build(self) -> TokenStream { + pub(super) fn build(self) -> TokenStream { self.acc } } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs index b04e3ca19ac1b..87d832cc76fa0 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs @@ -8,7 +8,7 @@ use expect_test::expect; #[test] fn test_derive_empty() { - assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"]); + assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"], expect!["SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"]); } #[test] @@ -23,6 +23,13 @@ fn test_derive_error() { SUBTREE () 1 1 LITERAL "#[derive(DeriveError)] struct S ;" 1 PUNCH ; [alone] 1"##]], + expect![[r##" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT compile_error SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ! [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE () SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "#[derive(DeriveError)] struct S ;" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ; [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]], ); } @@ -40,6 +47,15 @@ fn test_fn_like_macro_noop() { LITERAL 1 1 PUNCH , [alone] 1 SUBTREE [] 1 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT ident SpanData { range: 0..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 5..6, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 0 SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 8..9, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 1 SpanData { range: 10..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 11..12, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE [] SpanData { range: 13..14, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 14..15, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -53,6 +69,11 @@ fn test_fn_like_macro_clone_ident_subtree() { IDENT ident 1 PUNCH , [alone] 1 SUBTREE [] 1 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT ident SpanData { range: 0..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 5..6, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE [] SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -64,6 +85,41 @@ fn test_fn_like_macro_clone_raw_ident() { expect![[r#" SUBTREE $$ 1 1 IDENT r#async 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT r#async SpanData { range: 0..7, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], + ); +} + +#[test] +fn test_fn_like_fn_like_span_join() { + assert_expand( + "fn_like_span_join", + "foo bar", + expect![[r#" + SUBTREE $$ 1 1 + IDENT r#joined 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT r#joined SpanData { range: 0..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], + ); +} + +#[test] +fn test_fn_like_fn_like_span_ops() { + assert_expand( + "fn_like_span_ops", + "set_def_site resolved_at_def_site start_span", + expect![[r#" + SUBTREE $$ 1 1 + IDENT set_def_site 0 + IDENT resolved_at_def_site 1 + IDENT start_span 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT set_def_site SpanData { range: 0..150, anchor: SpanAnchor(FileId(41), 1), ctx: SyntaxContextId(0) } + IDENT resolved_at_def_site SpanData { range: 13..33, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT start_span SpanData { range: 34..34, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -81,6 +137,15 @@ fn test_fn_like_mk_literals() { LITERAL 3.14 1 LITERAL 123i64 1 LITERAL 123 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL b"byte_string" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 'c' SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "string" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 3.14f64 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 3.14 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 123i64 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 123 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -93,6 +158,10 @@ fn test_fn_like_mk_idents() { SUBTREE $$ 1 1 IDENT standard 1 IDENT r#raw 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT standard SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT r#raw SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -113,6 +182,18 @@ fn test_fn_like_macro_clone_literals() { LITERAL 3.14f32 1 PUNCH , [alone] 1 LITERAL "hello bridge" 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 4..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 2_u32 SpanData { range: 6..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 11..12, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH - [alone] SpanData { range: 13..14, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 4i64 SpanData { range: 14..18, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -132,6 +213,13 @@ fn test_attr_macro() { SUBTREE () 1 1 LITERAL "#[attr_error(some arguments)] mod m {}" 1 PUNCH ; [alone] 1"##]], + expect![[r##" + SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT compile_error SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ! [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE () SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "#[attr_error(some arguments)] mod m {}" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ; [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]], ); } @@ -147,6 +235,8 @@ fn list_test_macros() { fn_like_clone_tokens [FuncLike] fn_like_mk_literals [FuncLike] fn_like_mk_idents [FuncLike] + fn_like_span_join [FuncLike] + fn_like_span_ops [FuncLike] attr_noop [Attr] attr_panic [Attr] attr_error [Attr] diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index c12096d140c98..9a1311d9550a2 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -2,47 +2,96 @@ use expect_test::Expect; use proc_macro_api::msg::TokenId; +use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; +use tt::TextRange; use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv}; -fn parse_string(code: &str, call_site: TokenId) -> Option { - // This is a bit strange. We need to parse a string into a token stream into - // order to create a tt::SubTree from it in fixtures. `into_subtree` is - // implemented by all the ABIs we have so we arbitrarily choose one ABI to - // write a `parse_string` function for and use that. The tests don't really - // care which ABI we're using as the `into_subtree` function isn't part of - // the ABI and shouldn't change between ABI versions. - crate::server::TokenStream::from_str(code, call_site).ok() +fn parse_string(call_site: TokenId, src: &str) -> crate::server::TokenStream { + crate::server::TokenStream::with_subtree( + mbe::parse_to_token_tree_static_span(call_site, src).unwrap(), + ) } -pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) { - assert_expand_impl(macro_name, ra_fixture, None, expect); +fn parse_string_spanned( + anchor: SpanAnchor, + call_site: SyntaxContextId, + src: &str, +) -> crate::server::TokenStream { + crate::server::TokenStream::with_subtree( + mbe::parse_to_token_tree(anchor, call_site, src).unwrap(), + ) } -pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, expect: Expect) { - assert_expand_impl(macro_name, ra_fixture, Some(attr_args), expect); +pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect, expect_s: Expect) { + assert_expand_impl(macro_name, ra_fixture, None, expect, expect_s); } -fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) { +pub fn assert_expand_attr( + macro_name: &str, + ra_fixture: &str, + attr_args: &str, + expect: Expect, + expect_s: Expect, +) { + assert_expand_impl(macro_name, ra_fixture, Some(attr_args), expect, expect_s); +} + +fn assert_expand_impl( + macro_name: &str, + input: &str, + attr: Option<&str>, + expect: Expect, + expect_s: Expect, +) { + let path = proc_macro_test_dylib_path(); + let expander = dylib::Expander::new(&path).unwrap(); + let def_site = TokenId(0); let call_site = TokenId(1); let mixed_site = TokenId(2); - let path = proc_macro_test_dylib_path(); - let expander = dylib::Expander::new(&path).unwrap(); - let fixture = parse_string(input, call_site).unwrap(); - let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site)); + let input_ts = parse_string(call_site, input); + let attr_ts = attr.map(|attr| parse_string(call_site, attr).into_subtree(call_site)); let res = expander .expand( macro_name, - &fixture.into_subtree(call_site), - attr.as_ref(), + input_ts.into_subtree(call_site), + attr_ts, def_site, call_site, mixed_site, ) .unwrap(); expect.assert_eq(&format!("{res:?}")); + + let def_site = Span { + range: TextRange::new(0.into(), 150.into()), + anchor: SpanAnchor { + file_id: FileId::from_raw(41), + ast_id: ErasedFileAstId::from_raw(From::from(1)), + }, + ctx: SyntaxContextId::ROOT, + }; + let call_site = Span { + range: TextRange::new(0.into(), 100.into()), + anchor: SpanAnchor { + file_id: FileId::from_raw(42), + ast_id: ErasedFileAstId::from_raw(From::from(2)), + }, + ctx: SyntaxContextId::ROOT, + }; + let mixed_site = call_site; + + let fixture = parse_string_spanned(call_site.anchor, call_site.ctx, input); + let attr = attr.map(|attr| { + parse_string_spanned(call_site.anchor, call_site.ctx, attr).into_subtree(call_site) + }); + + let res = expander + .expand(macro_name, fixture.into_subtree(call_site), attr, def_site, call_site, mixed_site) + .unwrap(); + expect_s.assert_eq(&format!("{res:?}")); } pub(crate) fn list() -> Vec { diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml deleted file mode 100644 index 12d7c07d3ed9e..0000000000000 --- a/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "proc-macro-test" -version = "0.0.0" -publish = false - -authors.workspace = true -edition.workspace = true -license.workspace = true -rust-version.workspace = true - -[lib] -doctest = false - -[build-dependencies] -cargo_metadata.workspace = true - -proc-macro-test-impl = { path = "imp", version = "0.0.0" } - -# local deps -toolchain.workspace = true diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml index 56ce9d11c085f..5350023c88fac 100644 --- a/src/tools/rust-analyzer/crates/profile/Cargo.toml +++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml @@ -31,3 +31,6 @@ jemalloc = ["jemalloc-ctl"] # Uncomment to enable for the whole crate graph # default = [ "cpu_profiler" ] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml index 3e48de6456b0a..3552ed191628e 100644 --- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml +++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml @@ -14,8 +14,8 @@ doctest = false [dependencies] anyhow.workspace = true cargo_metadata.workspace = true -rustc-hash = "1.1.0" -semver = "1.0.14" +rustc-hash.workspace = true +semver.workspace = true serde_json.workspace = true serde.workspace = true tracing.workspace = true @@ -33,3 +33,6 @@ toolchain.workspace = true [dev-dependencies] expect-test = "1.4.0" + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index ca3d6e0596ca4..d89c4598afc71 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -330,6 +330,7 @@ impl CargoWorkspace { cargo_metadata::Edition::E2015 => Edition::Edition2015, cargo_metadata::Edition::E2018 => Edition::Edition2018, cargo_metadata::Edition::E2021 => Edition::Edition2021, + cargo_metadata::Edition::_E2024 => Edition::Edition2024, _ => { tracing::error!("Unsupported edition `{:?}`", edition); Edition::CURRENT diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index 931eba1157663..cf3231498f3e7 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -213,6 +213,8 @@ enum EditionData { Edition2018, #[serde(rename = "2021")] Edition2021, + #[serde(rename = "2024")] + Edition2024, } impl From for Edition { @@ -221,6 +223,7 @@ impl From for Edition { EditionData::Edition2015 => Edition::Edition2015, EditionData::Edition2018 => Edition::Edition2018, EditionData::Edition2021 => Edition::Edition2021, + EditionData::Edition2024 => Edition::Edition2024, } } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 9333570354a0c..4057493fa3a65 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -7,7 +7,7 @@ use std::{collections::VecDeque, fmt, fs, iter, process::Command, str::FromStr, use anyhow::{format_err, Context}; use base_db::{ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, - Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, TargetLayoutLoadResult, + Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, }; use cfg::{CfgDiff, CfgOptions}; use paths::{AbsPath, AbsPathBuf}; @@ -619,7 +619,7 @@ impl ProjectWorkspace { sysroot.as_ref().ok(), extra_env, Err("rust-project.json projects have no target layout set".into()), - toolchain.as_ref().and_then(|it| ReleaseChannel::from_str(it.pre.as_str())), + toolchain.clone(), ) } ProjectWorkspace::Cargo { @@ -644,7 +644,7 @@ impl ProjectWorkspace { Ok(it) => Ok(Arc::from(it.as_str())), Err(it) => Err(Arc::from(it.as_str())), }, - toolchain.as_ref().and_then(|it| ReleaseChannel::from_str(it.pre.as_str())), + toolchain.as_ref(), ), ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { detached_files_to_crate_graph( @@ -733,7 +733,7 @@ fn project_json_to_crate_graph( sysroot: Option<&Sysroot>, extra_env: &FxHashMap, target_layout: TargetLayoutLoadResult, - channel: Option, + toolchain: Option, ) -> (CrateGraph, ProcMacroPaths) { let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let (crate_graph, proc_macros) = &mut res; @@ -744,7 +744,7 @@ fn project_json_to_crate_graph( rustc_cfg.clone(), target_layout.clone(), load, - channel, + toolchain.as_ref(), ) }); @@ -807,7 +807,7 @@ fn project_json_to_crate_graph( CrateOrigin::Local { repo: None, name: None } }, target_layout.clone(), - channel, + toolchain.clone(), ); if *is_proc_macro { if let Some(path) = proc_macro_dylib_path.clone() { @@ -853,7 +853,7 @@ fn cargo_to_crate_graph( forced_cfg: Option, build_scripts: &WorkspaceBuildScripts, target_layout: TargetLayoutLoadResult, - channel: Option, + toolchain: Option<&Version>, ) -> (CrateGraph, ProcMacroPaths) { let _p = profile::span("cargo_to_crate_graph"); let mut res = (CrateGraph::default(), ProcMacroPaths::default()); @@ -866,7 +866,7 @@ fn cargo_to_crate_graph( rustc_cfg.clone(), target_layout.clone(), load, - channel, + toolchain, ), None => (SysrootPublicDeps::default(), None), }; @@ -950,7 +950,7 @@ fn cargo_to_crate_graph( is_proc_macro, target_layout.clone(), false, - channel, + toolchain.cloned(), ); if kind == TargetKind::Lib { lib_tgt = Some((crate_id, name.clone())); @@ -1038,7 +1038,7 @@ fn cargo_to_crate_graph( rustc_build_scripts }, target_layout, - channel, + toolchain, ); } } @@ -1117,7 +1117,7 @@ fn handle_rustc_crates( override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, target_layout: TargetLayoutLoadResult, - channel: Option, + toolchain: Option<&Version>, ) { let mut rustc_pkg_crates = FxHashMap::default(); // The root package of the rustc-dev component is rustc_driver, so we match that @@ -1172,7 +1172,7 @@ fn handle_rustc_crates( rustc_workspace[tgt].is_proc_macro, target_layout.clone(), true, - channel, + toolchain.cloned(), ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate @@ -1248,7 +1248,7 @@ fn add_target_crate_root( is_proc_macro: bool, target_layout: TargetLayoutLoadResult, rustc_crate: bool, - channel: Option, + toolchain: Option, ) -> CrateId { let edition = pkg.edition; let potential_cfg_options = if pkg.features.is_empty() { @@ -1304,7 +1304,7 @@ fn add_target_crate_root( CrateOrigin::Library { repo: pkg.repository.clone(), name: pkg.name.clone() } }, target_layout, - channel, + toolchain, ); if is_proc_macro { let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { @@ -1346,7 +1346,7 @@ fn sysroot_to_crate_graph( rustc_cfg: Vec, target_layout: TargetLayoutLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, - channel: Option, + toolchain: Option<&Version>, ) -> (SysrootPublicDeps, Option) { let _p = profile::span("sysroot_to_crate_graph"); let cfg_options = create_cfg_options(rustc_cfg.clone()); @@ -1357,7 +1357,7 @@ fn sysroot_to_crate_graph( rustc_cfg, cfg_options, target_layout, - channel, + toolchain, crate_graph, sysroot, ), @@ -1380,7 +1380,7 @@ fn sysroot_to_crate_graph( false, CrateOrigin::Lang(LangCrateOrigin::from(&*sysroot[krate].name)), target_layout.clone(), - channel, + toolchain.cloned(), ); Some((krate, crate_id)) }) @@ -1412,7 +1412,7 @@ fn handle_hack_cargo_workspace( rustc_cfg: Vec, cfg_options: CfgOptions, target_layout: Result, Arc>, - channel: Option, + toolchain: Option<&Version>, crate_graph: &mut CrateGraph, sysroot: &Sysroot, ) -> FxHashMap { @@ -1426,7 +1426,7 @@ fn handle_hack_cargo_workspace( Some(cfg_options), &WorkspaceBuildScripts::default(), target_layout, - channel, + toolchain, ); crate_graph.extend(cg, &mut pm); for crate_name in ["std", "alloc", "core"] { diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt index e98f016ca7df3..d8d9e559e5c1d 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt @@ -62,7 +62,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -135,7 +135,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -208,7 +208,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -281,7 +281,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -350,6 +350,6 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, } \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt index e98f016ca7df3..d8d9e559e5c1d 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt @@ -62,7 +62,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -135,7 +135,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -208,7 +208,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -281,7 +281,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -350,6 +350,6 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, } \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt index 7ecd53572e2f3..e0ba5ed498fa8 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt @@ -61,7 +61,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -133,7 +133,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -205,7 +205,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -277,7 +277,7 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -346,6 +346,6 @@ target_layout: Err( "target_data_layout not loaded", ), - channel: None, + toolchain: None, }, } \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt index 581a6afc148fa..e35f0fc7327e2 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt @@ -39,7 +39,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -72,7 +72,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -105,7 +105,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -138,7 +138,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -188,7 +188,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 5: CrateData { root_file_id: FileId( @@ -221,7 +221,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 6: CrateData { root_file_id: FileId( @@ -319,7 +319,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 7: CrateData { root_file_id: FileId( @@ -352,7 +352,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 8: CrateData { root_file_id: FileId( @@ -385,7 +385,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 9: CrateData { root_file_id: FileId( @@ -418,7 +418,7 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, 10: CrateData { root_file_id: FileId( @@ -495,6 +495,6 @@ target_layout: Err( "rust-project.json projects have no target layout set", ), - channel: None, + toolchain: None, }, } \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml index 39ac338aa1a92..ad24d6d28cd7a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml @@ -24,12 +24,12 @@ crossbeam-channel = "0.5.5" dissimilar.workspace = true itertools.workspace = true scip = "0.3.1" -lsp-types = { version = "=0.94.0", features = ["proposed"] } +lsp-types = { version = "=0.95.0", features = ["proposed"] } parking_lot = "0.12.1" xflags = "0.3.0" oorandom = "11.1.3" rayon.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true serde_json = { workspace = true, features = ["preserve_order"] } serde.workspace = true num_cpus = "1.15.0" @@ -76,6 +76,7 @@ expect-test = "1.4.0" xshell.workspace = true test-utils.workspace = true +test-fixture.workspace = true sourcegen.workspace = true mbe.workspace = true @@ -93,3 +94,6 @@ in-rust-tree = [ "hir-def/in-rust-tree", "hir-ty/in-rust-tree", ] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index 8472e49de9838..7432f0f7a7ce1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -5,8 +5,7 @@ #![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #[cfg(feature = "in-rust-tree")] -#[allow(unused_extern_crates)] -extern crate rustc_driver; +extern crate rustc_driver as _; mod logger; mod rustc_wrapper; @@ -172,7 +171,15 @@ fn run_server() -> anyhow::Result<()> { let (connection, io_threads) = Connection::stdio(); - let (initialize_id, initialize_params) = connection.initialize_start()?; + let (initialize_id, initialize_params) = match connection.initialize_start() { + Ok(it) => it, + Err(e) => { + if e.channel_is_disconnected() { + io_threads.join()?; + } + return Err(e.into()); + } + }; tracing::info!("InitializeParams: {}", initialize_params); let lsp_types::InitializeParams { root_uri, @@ -240,7 +247,12 @@ fn run_server() -> anyhow::Result<()> { let initialize_result = serde_json::to_value(initialize_result).unwrap(); - connection.initialize_finish(initialize_id, initialize_result)?; + if let Err(e) = connection.initialize_finish(initialize_id, initialize_result) { + if e.channel_is_disconnected() { + io_threads.join()?; + } + return Err(e.into()); + } if !config.has_linked_projects() && config.detached_files().is_empty() { config.rediscover_workspaces(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs index 8c9261ab05ee6..94eab97e8fcd2 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs @@ -157,6 +157,8 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { "ssr": true, "workspaceSymbolScopeKindFiltering": true, })), + diagnostic_provider: None, + inline_completion_provider: None, } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs index 728bade0d0a5f..0190ca3cab858 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs @@ -209,7 +209,7 @@ mod tests { use super::*; use cfg::CfgExpr; - use mbe::{syntax_node_to_token_tree, DummyTestSpanMap}; + use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; use syntax::{ ast::{self, AstNode}, SmolStr, @@ -219,7 +219,7 @@ mod tests { let cfg_expr = { let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap); + let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap, DUMMY); CfgExpr::parse(&tt) }; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs index c89b88ac0f9e6..b8f6138161e5c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -4,8 +4,8 @@ use std::{ cell::RefCell, collections::HashMap, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf, }; -use hir::Crate; -use ide::{AnalysisHost, Change, DiagnosticCode, DiagnosticsConfig}; +use hir::{Change, Crate}; +use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig}; use profile::StopWatch; use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot}; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index 30e11402cd8d0..95c8798d43c80 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -278,8 +278,8 @@ fn token_to_symbol(token: &TokenStaticData) -> Option { mod test { use super::*; use ide::{AnalysisHost, FilePosition, StaticIndex, TextSize}; - use ide_db::base_db::fixture::ChangeFixture; use scip::symbol::format_symbol; + use test_fixture::ChangeFixture; fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) { let mut host = AnalysisHost::default(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 258f74106395d..88fb3708449f7 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -7,7 +7,11 @@ //! configure the server itself, feature flags are passed into analysis, and //! tweak things like automatic insertion of `()` in completions. -use std::{fmt, iter, ops::Not, path::PathBuf}; +use std::{ + fmt, iter, + ops::Not, + path::{Path, PathBuf}, +}; use cfg::{CfgAtom, CfgDiff}; use flycheck::FlycheckConfig; @@ -105,6 +109,9 @@ config_data! { /// ``` /// . cargo_buildScripts_overrideCommand: Option> = "null", + /// Rerun proc-macros building/build-scripts running when proc-macro + /// or build-script sources change and are saved. + cargo_buildScripts_rebuildOnSave: bool = "false", /// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to /// avoid checking unnecessary things. cargo_buildScripts_useRustcWrapper: bool = "true", @@ -164,15 +171,15 @@ config_data! { /// Specifies the working directory for running checks. /// - "workspace": run checks for workspaces in the corresponding workspaces' root directories. // FIXME: Ideally we would support this in some way - /// This falls back to "root" if `#rust-analyzer.cargo.check.invocationStrategy#` is set to `once`. + /// This falls back to "root" if `#rust-analyzer.check.invocationStrategy#` is set to `once`. /// - "root": run checks in the project's root directory. - /// This config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#` + /// This config only has an effect when `#rust-analyzer.check.overrideCommand#` /// is set. check_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"", /// Specifies the invocation strategy to use when running the check command. /// If `per_workspace` is set, the command will be executed for each workspace. /// If `once` is set, the command will be executed once. - /// This config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#` + /// This config only has an effect when `#rust-analyzer.check.overrideCommand#` /// is set. check_invocationStrategy | checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"", /// Whether to pass `--no-default-features` to Cargo. Defaults to @@ -191,8 +198,8 @@ config_data! { /// If there are multiple linked projects/workspaces, this command is invoked for /// each of them, with the working directory being the workspace root /// (i.e., the folder containing the `Cargo.toml`). This can be overwritten - /// by changing `#rust-analyzer.cargo.check.invocationStrategy#` and - /// `#rust-analyzer.cargo.check.invocationLocation#`. + /// by changing `#rust-analyzer.check.invocationStrategy#` and + /// `#rust-analyzer.check.invocationLocation#`. /// /// An example command would be: /// @@ -917,7 +924,19 @@ impl Config { pub fn has_linked_projects(&self) -> bool { !self.data.linkedProjects.is_empty() } - pub fn linked_projects(&self) -> Vec { + pub fn linked_manifests(&self) -> impl Iterator + '_ { + self.data.linkedProjects.iter().filter_map(|it| match it { + ManifestOrProjectJson::Manifest(p) => Some(&**p), + ManifestOrProjectJson::ProjectJson(_) => None, + }) + } + pub fn has_linked_project_jsons(&self) -> bool { + self.data + .linkedProjects + .iter() + .any(|it| matches!(it, ManifestOrProjectJson::ProjectJson(_))) + } + pub fn linked_or_discovered_projects(&self) -> Vec { match self.data.linkedProjects.as_slice() { [] => { let exclude_dirs: Vec<_> = @@ -952,15 +971,6 @@ impl Config { } } - pub fn add_linked_projects(&mut self, linked_projects: Vec) { - let mut linked_projects = linked_projects - .into_iter() - .map(ManifestOrProjectJson::ProjectJson) - .collect::>(); - - self.data.linkedProjects.append(&mut linked_projects); - } - pub fn did_save_text_document_dynamic_registration(&self) -> bool { let caps = try_or_def!(self.caps.text_document.as_ref()?.synchronization.clone()?); caps.did_save == Some(true) && caps.dynamic_registration == Some(true) @@ -1369,6 +1379,10 @@ impl Config { self.data.checkOnSave } + pub fn script_rebuild_on_save(&self) -> bool { + self.data.cargo_buildScripts_rebuildOnSave + } + pub fn runnables(&self) -> RunnablesConfig { RunnablesConfig { override_cargo: self.data.runnables_command.clone(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 0f31fe16054a5..f57a27305f03c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -7,7 +7,8 @@ use std::time::Instant; use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; -use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId}; +use hir::Change; +use ide::{Analysis, AnalysisHost, Cancellable, FileId}; use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase}; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index f9070d2735396..7e6219991b665 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -130,6 +130,13 @@ pub(crate) fn handle_did_save_text_document( state: &mut GlobalState, params: DidSaveTextDocumentParams, ) -> anyhow::Result<()> { + if state.config.script_rebuild_on_save() && state.proc_macro_changed { + // reset the flag + state.proc_macro_changed = false; + // rebuild the proc macros + state.fetch_build_data_queue.request_op("ScriptRebuildOnSave".to_owned(), ()); + } + if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { // Re-fetch workspaces if a workspace related file has changed if let Some(abs_path) = vfs_path.as_path() { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index d8a590c808849..f1317ce2b4011 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -453,7 +453,7 @@ pub(crate) fn handle_document_symbol( pub(crate) fn handle_workspace_symbol( snap: GlobalStateSnapshot, params: WorkspaceSymbolParams, -) -> anyhow::Result>> { +) -> anyhow::Result> { let _p = profile::span("handle_workspace_symbol"); let config = snap.config.workspace_symbol(); @@ -479,7 +479,7 @@ pub(crate) fn handle_workspace_symbol( res = exec_query(&snap, query)?; } - return Ok(Some(res)); + return Ok(Some(lsp_types::WorkspaceSymbolResponse::Nested(res))); fn decide_search_scope_and_kind( params: &WorkspaceSymbolParams, @@ -519,13 +519,12 @@ pub(crate) fn handle_workspace_symbol( fn exec_query( snap: &GlobalStateSnapshot, query: Query, - ) -> anyhow::Result> { + ) -> anyhow::Result> { let mut res = Vec::new(); for nav in snap.analysis.symbol_search(query)? { let container_name = nav.container_name.as_ref().map(|v| v.to_string()); - #[allow(deprecated)] - let info = SymbolInformation { + let info = lsp_types::WorkspaceSymbol { name: match &nav.alias { Some(alias) => format!("{} (alias for {})", alias, nav.name), None => format!("{}", nav.name), @@ -534,10 +533,11 @@ pub(crate) fn handle_workspace_symbol( .kind .map(to_proto::symbol_kind) .unwrap_or(lsp_types::SymbolKind::VARIABLE), + // FIXME: Set deprecation tags: None, - location: to_proto::location_from_nav(snap, nav)?, container_name, - deprecated: None, + location: lsp_types::OneOf::Left(to_proto::location_from_nav(snap, nav)?), + data: None, }; res.push(info); } @@ -801,7 +801,7 @@ pub(crate) fn handle_runnables( } } None => { - if !snap.config.linked_projects().is_empty() { + if !snap.config.linked_or_discovered_projects().is_empty() { res.push(lsp_ext::Runnable { label: "cargo check --workspace".to_string(), location: None, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs index 41ff17f5e4386..d94f7cefa60ee 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -10,7 +10,8 @@ //! in release mode in VS Code. There's however "rust-analyzer: Copy Run Command Line" //! which you can use to paste the command in terminal and add `--release` manually. -use ide::{CallableSnippets, Change, CompletionConfig, FilePosition, TextSize}; +use hir::Change; +use ide::{CallableSnippets, CompletionConfig, FilePosition, TextSize}; use ide_db::{ imports::insert_use::{ImportGranularity, InsertUseConfig}, SnippetCap, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs index ad56899163d3c..35c8fad37415f 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs @@ -627,7 +627,7 @@ pub enum WorkspaceSymbol {} impl Request for WorkspaceSymbol { type Params = WorkspaceSymbolParams; - type Result = Option>; + type Result = Option; const METHOD: &'static str = "workspace/symbol"; } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs index dae560c5de12b..7f3c3aa7a15ba 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs @@ -857,7 +857,7 @@ pub(crate) fn location_from_nav( ) -> Cancellable { let url = url(snap, nav.file_id); let line_index = snap.file_line_index(nav.file_id)?; - let range = range(&line_index, nav.full_range); + let range = range(&line_index, nav.focus_or_full_range()); let loc = lsp_types::Location::new(url, range); Ok(loc) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs index b388b317599a3..a4417e4d4a143 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/utils.rs @@ -171,30 +171,19 @@ pub(crate) fn apply_document_changes( file_contents: impl FnOnce() -> String, mut content_changes: Vec, ) -> String { - // Skip to the last full document change, as it invalidates all previous changes anyways. - let mut start = content_changes - .iter() - .rev() - .position(|change| change.range.is_none()) - .map(|idx| content_changes.len() - idx - 1) - .unwrap_or(0); - - let mut text: String = match content_changes.get_mut(start) { - // peek at the first content change as an optimization - Some(lsp_types::TextDocumentContentChangeEvent { range: None, text, .. }) => { - let text = mem::take(text); - start += 1; - - // The only change is a full document update - if start == content_changes.len() { - return text; + // If at least one of the changes is a full document change, use the last + // of them as the starting point and ignore all previous changes. + let (mut text, content_changes) = + match content_changes.iter().rposition(|change| change.range.is_none()) { + Some(idx) => { + let text = mem::take(&mut content_changes[idx].text); + (text, &content_changes[idx + 1..]) } - text - } - Some(_) => file_contents(), - // we received no content changes - None => return file_contents(), - }; + None => (file_contents(), &content_changes[..]), + }; + if content_changes.is_empty() { + return text; + } let mut line_index = LineIndex { // the index will be overwritten in the bottom loop's first iteration diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 7ab528f497511..91dc6c2e4b411 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -16,10 +16,9 @@ use std::{iter, mem}; use flycheck::{FlycheckConfig, FlycheckHandle}; -use hir::db::DefDatabase; -use ide::Change; +use hir::{db::DefDatabase, Change, ProcMacros}; use ide_db::{ - base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, ProcMacros}, + base_db::{salsa::Durability, CrateGraph, ProcMacroPaths}, FxHashMap, }; use itertools::Itertools; @@ -81,7 +80,8 @@ impl GlobalState { &self.config.lru_query_capacities().cloned().unwrap_or_default(), ); } - if self.config.linked_projects() != old_config.linked_projects() { + if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects() + { self.fetch_workspaces_queue.request_op("linked projects changed".to_string(), false) } else if self.config.flycheck() != old_config.flycheck() { self.reload_flycheck(); @@ -129,7 +129,7 @@ impl GlobalState { status.health = lsp_ext::Health::Warning; message.push_str("Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required.\n\n"); } - if self.config.linked_projects().is_empty() + if self.config.linked_or_discovered_projects().is_empty() && self.config.detached_files().is_empty() && self.config.notifications().cargo_toml_not_found { @@ -175,7 +175,21 @@ impl GlobalState { if let Err(_) = self.fetch_workspace_error() { status.health = lsp_ext::Health::Error; - message.push_str("Failed to load workspaces.\n\n"); + message.push_str("Failed to load workspaces."); + + if self.config.has_linked_projects() { + message.push_str( + "`rust-analyzer.linkedProjects` have been specified, which may be incorrect. Specified project paths:\n", + ); + message.push_str(&format!( + " {}", + self.config.linked_manifests().map(|it| it.display()).format("\n ") + )); + if self.config.has_linked_project_jsons() { + message.push_str("\nAdditionally, one or more project jsons are specified") + } + } + message.push_str("\n\n"); } if !message.is_empty() { @@ -188,7 +202,7 @@ impl GlobalState { tracing::info!(%cause, "will fetch workspaces"); self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, { - let linked_projects = self.config.linked_projects(); + let linked_projects = self.config.linked_or_discovered_projects(); let detached_files = self.config.detached_files().to_vec(); let cargo_config = self.config.cargo(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index ec8e5c6dd9681..78411e2d58d09 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -832,7 +832,7 @@ fn main() { } #[test] -#[cfg(feature = "sysroot-abi")] +#[cfg(any(feature = "sysroot-abi", rust_analyzer))] fn resolve_proc_macro() { use expect_test::expect; if skip_slow_tests() { diff --git a/src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml b/src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml index 1b3b6ec735e75..0bf04301df169 100644 --- a/src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml +++ b/src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml @@ -18,3 +18,6 @@ ra-ap-rustc_abi = { version = "0.21.0", default-features = false } [features] in-rust-tree = [] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml index 0514af8e7839f..d5ea4c39aa175 100644 --- a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml +++ b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml @@ -2,6 +2,7 @@ name = "sourcegen" version = "0.0.0" description = "TBD" +publish = false authors.workspace = true edition.workspace = true @@ -13,3 +14,6 @@ doctest = false [dependencies] xshell.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/span/Cargo.toml b/src/tools/rust-analyzer/crates/span/Cargo.toml new file mode 100644 index 0000000000000..69b88b5a17b53 --- /dev/null +++ b/src/tools/rust-analyzer/crates/span/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "span" +version = "0.0.0" +rust-version.workspace = true +edition.workspace = true +license.workspace = true +authors.workspace = true + + +[dependencies] +la-arena.workspace = true +rust-analyzer-salsa.workspace = true + + +# local deps +vfs.workspace = true +syntax.workspace = true +stdx.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/base-db/src/span.rs b/src/tools/rust-analyzer/crates/span/src/lib.rs similarity index 72% rename from src/tools/rust-analyzer/crates/base-db/src/span.rs rename to src/tools/rust-analyzer/crates/span/src/lib.rs index d8990eb7cae0d..7617acde64a27 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/span.rs +++ b/src/tools/rust-analyzer/crates/span/src/lib.rs @@ -1,10 +1,28 @@ //! File and span related types. -// FIXME: This should probably be moved into its own crate. +// FIXME: This should be moved into its own crate to get rid of the dependency inversion, base-db +// has business depending on tt, tt should depend on a span crate only (which unforunately will have +// to depend on salsa) use std::fmt; use salsa::InternId; -use tt::SyntaxContext; -use vfs::FileId; + +mod map; + +pub use crate::map::{RealSpanMap, SpanMap}; +pub use syntax::{TextRange, TextSize}; +pub use vfs::FileId; + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct FilePosition { + pub file_id: FileId, + pub offset: TextSize, +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct FileRange { + pub file_id: FileId, + pub range: TextRange, +} pub type ErasedFileAstId = la_arena::Idx; @@ -12,7 +30,33 @@ pub type ErasedFileAstId = la_arena::Idx; pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0)); -pub type SpanData = tt::SpanData; +/// FileId used as the span for syntax node fixups. Any Span containing this file id is to be +/// considered fake. +pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId = + // we pick the second to last for this in case we every consider making this a NonMaxU32, this + // is required to be stable for the proc-macro-server + la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(!0 - 1)); + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub struct SpanData { + /// The text range of this span, relative to the anchor. + /// We need the anchor for incrementality, as storing absolute ranges will require + /// recomputation on every change in a file at all times. + pub range: TextRange, + pub anchor: SpanAnchor, + /// The syntax context of the span. + pub ctx: Ctx, +} +impl Span { + #[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"] + pub const DUMMY: Self = SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }, + ctx: SyntaxContextId::ROOT, + }; +} + +pub type Span = SpanData; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct SyntaxContextId(InternId); @@ -33,7 +77,15 @@ impl fmt::Debug for SyntaxContextId { } } } -crate::impl_intern_key!(SyntaxContextId); + +impl salsa::InternKey for SyntaxContextId { + fn from_intern_id(v: salsa::InternId) -> Self { + SyntaxContextId(v) + } + fn as_intern_id(&self) -> salsa::InternId { + self.0 + } +} impl fmt::Display for SyntaxContextId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -41,9 +93,6 @@ impl fmt::Display for SyntaxContextId { } } -impl SyntaxContext for SyntaxContextId { - const DUMMY: Self = Self::ROOT; -} // inherent trait impls please tyvm impl SyntaxContextId { pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) }); @@ -55,6 +104,14 @@ impl SyntaxContextId { pub fn is_root(self) -> bool { self == Self::ROOT } + + pub fn into_u32(self) -> u32 { + self.0.as_u32() + } + + pub fn from_u32(u32: u32) -> Self { + Self(InternId::from(u32)) + } } #[derive(Copy, Clone, PartialEq, Eq, Hash)] @@ -69,10 +126,6 @@ impl fmt::Debug for SpanAnchor { } } -impl tt::SpanAnchor for SpanAnchor { - const DUMMY: Self = SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }; -} - /// Input to the analyzer is a set of files, where each file is identified by /// `FileId` and contains source code. However, another source of source code in /// Rust are macros: each macro can be thought of as producing a "temporary @@ -90,6 +143,7 @@ impl tt::SpanAnchor for SpanAnchor { /// The two variants are encoded in a single u32 which are differentiated by the MSB. /// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a /// `MacroCallId`. +// FIXME: Give this a better fitting name #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct HirFileId(u32); @@ -120,7 +174,15 @@ pub struct MacroFileId { /// `println!("Hello, {}", world)`. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct MacroCallId(salsa::InternId); -crate::impl_intern_key!(MacroCallId); + +impl salsa::InternKey for MacroCallId { + fn from_intern_id(v: salsa::InternId) -> Self { + MacroCallId(v) + } + fn as_intern_id(&self) -> salsa::InternId { + self.0 + } +} impl MacroCallId { pub fn as_file(self) -> HirFileId { diff --git a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs b/src/tools/rust-analyzer/crates/span/src/map.rs similarity index 59% rename from src/tools/rust-analyzer/crates/mbe/src/token_map.rs rename to src/tools/rust-analyzer/crates/span/src/map.rs index 7d15812f8cb60..d69df91b63ef4 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs +++ b/src/tools/rust-analyzer/crates/span/src/map.rs @@ -1,18 +1,23 @@ -//! Mapping between `TokenId`s and the token's position in macro definitions or inputs. +//! A map that maps a span to every position in a file. Usually maps a span to some range of positions. +//! Allows bidirectional lookup. use std::hash::Hash; use stdx::{always, itertools::Itertools}; use syntax::{TextRange, TextSize}; -use tt::Span; +use vfs::FileId; + +use crate::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; /// Maps absolute text ranges for the corresponding file to the relevant span data. #[derive(Debug, PartialEq, Eq, Clone, Hash)] -pub struct SpanMap { +pub struct SpanMap { spans: Vec<(TextSize, S)>, + // FIXME: Should be + // spans: Vec<(TextSize, crate::SyntaxContextId)>, } -impl SpanMap { +impl SpanMap { /// Creates a new empty [`SpanMap`]. pub fn empty() -> Self { Self { spans: Vec::new() } @@ -44,7 +49,10 @@ impl SpanMap { /// Returns all [`TextRange`]s that correspond to the given span. /// /// Note this does a linear search through the entire backing vector. - pub fn ranges_with_span(&self, span: S) -> impl Iterator + '_ { + pub fn ranges_with_span(&self, span: S) -> impl Iterator + '_ + where + S: Eq, + { // FIXME: This should ignore the syntax context! self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| { if s != span { @@ -74,3 +82,50 @@ impl SpanMap { self.spans.iter().copied() } } + +#[derive(PartialEq, Eq, Hash, Debug)] +pub struct RealSpanMap { + file_id: FileId, + /// Invariant: Sorted vec over TextSize + // FIXME: SortedVec<(TextSize, ErasedFileAstId)>? + pairs: Box<[(TextSize, ErasedFileAstId)]>, + end: TextSize, +} + +impl RealSpanMap { + /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id). + pub fn absolute(file_id: FileId) -> Self { + RealSpanMap { + file_id, + pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]), + end: TextSize::new(!0), + } + } + + pub fn from_file( + file_id: FileId, + pairs: Box<[(TextSize, ErasedFileAstId)]>, + end: TextSize, + ) -> Self { + Self { file_id, pairs, end } + } + + pub fn span_for_range(&self, range: TextRange) -> Span { + assert!( + range.end() <= self.end, + "range {range:?} goes beyond the end of the file {:?}", + self.end + ); + let start = range.start(); + let idx = self + .pairs + .binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less)) + .unwrap_err(); + let (offset, ast_id) = self.pairs[idx - 1]; + Span { + range: range - offset, + anchor: SpanAnchor { file_id: self.file_id, ast_id }, + ctx: SyntaxContextId::ROOT, + } + } +} diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml index c914ae2144b59..e6014cf812e56 100644 --- a/src/tools/rust-analyzer/crates/stdx/Cargo.toml +++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml @@ -27,3 +27,6 @@ winapi = { version = "0.3.9", features = ["winerror"] } [features] # Uncomment to enable for the whole crate graph # default = [ "backtrace" ] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml index 7a7c0d267fede..40a93fec2cec2 100644 --- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml +++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml @@ -17,7 +17,7 @@ cov-mark = "2.0.0-pre.1" either.workspace = true itertools.workspace = true rowan = "0.15.15" -rustc-hash = "1.1.0" +rustc-hash.workspace = true once_cell = "1.17.0" indexmap.workspace = true smol_str.workspace = true @@ -42,3 +42,6 @@ sourcegen.workspace = true [features] in-rust-tree = ["rustc-dependencies/in-rust-tree"] + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml index 6070222f1f192..ebf538aa24718 100644 --- a/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml +++ b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml @@ -24,3 +24,6 @@ path = "fuzz_targets/parser.rs" [[bin]] name = "reparse" path = "fuzz_targets/reparse.rs" + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index 37d8212042da0..4c2878f49f0e2 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -13,7 +13,7 @@ use crate::{ SyntaxNode, SyntaxToken, }; -use super::{HasArgList, HasName}; +use super::{GenericParam, HasArgList, HasName}; pub trait GenericParamsOwnerEdit: ast::HasGenericParams { fn get_or_create_generic_param_list(&self) -> ast::GenericParamList; @@ -272,6 +272,36 @@ impl ast::GenericParamList { } } + /// Find the params corresponded to generic arg + pub fn find_generic_arg(&self, generic_arg: &ast::GenericArg) -> Option { + self.generic_params().find_map(move |param| match (¶m, &generic_arg) { + (ast::GenericParam::LifetimeParam(a), ast::GenericArg::LifetimeArg(b)) => { + (a.lifetime()?.lifetime_ident_token()?.text() + == b.lifetime()?.lifetime_ident_token()?.text()) + .then_some(param) + } + (ast::GenericParam::TypeParam(a), ast::GenericArg::TypeArg(b)) => { + debug_assert_eq!(b.syntax().first_token(), b.syntax().last_token()); + (a.name()?.text() == b.syntax().first_token()?.text()).then_some(param) + } + (ast::GenericParam::ConstParam(a), ast::GenericArg::TypeArg(b)) => { + debug_assert_eq!(b.syntax().first_token(), b.syntax().last_token()); + (a.name()?.text() == b.syntax().first_token()?.text()).then_some(param) + } + _ => None, + }) + } + + /// Removes the corresponding generic arg + pub fn remove_generic_arg(&self, generic_arg: &ast::GenericArg) -> Option { + let param_to_remove = self.find_generic_arg(generic_arg); + + if let Some(param) = ¶m_to_remove { + self.remove_generic_param(param.clone()); + } + param_to_remove + } + /// Constructs a matching [`ast::GenericArgList`] pub fn to_generic_args(&self) -> ast::GenericArgList { let args = self.generic_params().filter_map(|param| match param { @@ -300,6 +330,20 @@ impl ast::WhereClause { } ted::append_child(self.syntax(), predicate.syntax()); } + + pub fn remove_predicate(&self, predicate: ast::WherePred) { + if let Some(previous) = predicate.syntax().prev_sibling() { + if let Some(next_token) = previous.next_sibling_or_token() { + ted::remove_all(next_token..=predicate.syntax().clone().into()); + } + } else if let Some(next) = predicate.syntax().next_sibling() { + if let Some(next_token) = next.prev_sibling_or_token() { + ted::remove_all(predicate.syntax().clone().into()..=next_token); + } + } else { + ted::remove(predicate.syntax()); + } + } } impl ast::TypeParam { @@ -414,6 +458,7 @@ impl ast::UseTree { u.remove_recursive(); } } + u.remove_unnecessary_braces(); } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index ad63cc558629f..2abbfc81f675f 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -207,10 +207,28 @@ fn merge_gen_params( (None, Some(bs)) => Some(bs), (Some(ps), None) => Some(ps), (Some(ps), Some(bs)) => { - for b in bs.generic_params() { - ps.add_generic_param(b); - } - Some(ps) + // make sure lifetime is placed before other generic params + let generic_params = ps.generic_params().merge_by(bs.generic_params(), |_, b| { + !matches!(b, ast::GenericParam::LifetimeParam(_)) + }); + Some(generic_param_list(generic_params)) + } + } +} + +fn merge_where_clause( + ps: Option, + bs: Option, +) -> Option { + match (ps, bs) { + (None, None) => None, + (None, Some(bs)) => Some(bs), + (Some(ps), None) => Some(ps), + (Some(ps), Some(bs)) => { + let preds = where_clause(std::iter::empty()).clone_for_update(); + ps.predicates().for_each(|p| preds.add_predicate(p)); + bs.predicates().for_each(|p| preds.add_predicate(p)); + Some(preds) } } } @@ -251,9 +269,9 @@ pub fn impl_( pub fn impl_trait( is_unsafe: bool, trait_gen_params: Option, - trait_gen_args: Option, + trait_gen_args: Option, type_gen_params: Option, - type_gen_args: Option, + type_gen_args: Option, is_negative: bool, path_type: ast::Type, ty: ast::Type, @@ -262,15 +280,9 @@ pub fn impl_trait( body: Option>>, ) -> ast::Impl { let is_unsafe = if is_unsafe { "unsafe " } else { "" }; - let ty_gen_args = match merge_gen_params(type_gen_params.clone(), type_gen_args) { - Some(pars) => pars.to_generic_args().to_string(), - None => String::new(), - }; - let tr_gen_args = match merge_gen_params(trait_gen_params.clone(), trait_gen_args) { - Some(pars) => pars.to_generic_args().to_string(), - None => String::new(), - }; + let trait_gen_args = trait_gen_args.map(|args| args.to_string()).unwrap_or_default(); + let type_gen_args = type_gen_args.map(|args| args.to_string()).unwrap_or_default(); let gen_params = match merge_gen_params(trait_gen_params, type_gen_params) { Some(pars) => pars.to_string(), @@ -279,25 +291,15 @@ pub fn impl_trait( let is_negative = if is_negative { "! " } else { "" }; - let where_clause = match (ty_where_clause, trait_where_clause) { - (None, None) => " ".to_string(), - (None, Some(tr)) => format!("\n{}\n", tr).to_string(), - (Some(ty), None) => format!("\n{}\n", ty).to_string(), - (Some(ty), Some(tr)) => { - let updated = ty.clone_for_update(); - tr.predicates().for_each(|p| { - ty.add_predicate(p); - }); - format!("\n{}\n", updated).to_string() - } - }; + let where_clause = merge_where_clause(ty_where_clause, trait_where_clause) + .map_or_else(|| " ".to_string(), |wc| format!("\n{}\n", wc)); let body = match body { Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""), None => String::new(), }; - ast_from_text(&format!("{is_unsafe}impl{gen_params} {is_negative}{path_type}{tr_gen_args} for {ty}{ty_gen_args}{where_clause}{{{}}}" , body)) + ast_from_text(&format!("{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{{{}}}" , body)) } pub fn impl_trait_type(bounds: ast::TypeBoundList) -> ast::ImplTraitType { @@ -922,6 +924,10 @@ pub fn type_param(name: ast::Name, bounds: Option) -> ast::T ast_from_text(&format!("fn f<{name}{bounds}>() {{ }}")) } +pub fn const_param(name: ast::Name, ty: ast::Type) -> ast::ConstParam { + ast_from_text(&format!("fn f() {{ }}")) +} + pub fn lifetime_param(lifetime: ast::Lifetime) -> ast::LifetimeParam { ast_from_text(&format!("fn f<{lifetime}>() {{ }}")) } @@ -948,9 +954,7 @@ pub fn turbofish_generic_arg_list( ast_from_text(&format!("const S: T::<{args}> = ();")) } -pub(crate) fn generic_arg_list( - args: impl IntoIterator, -) -> ast::GenericArgList { +pub fn generic_arg_list(args: impl IntoIterator) -> ast::GenericArgList { let args = args.into_iter().join(", "); ast_from_text(&format!("const S: T<{args}> = ();")) } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs index f81dff8840ccc..a7e4899fb7eeb 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs @@ -11,7 +11,7 @@ use rowan::{GreenNodeData, GreenTokenData}; use crate::{ ast::{self, support, AstNode, AstToken, HasAttrs, HasGenericParams, HasName, SyntaxNode}, - NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T, + ted, NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T, }; impl ast::Lifetime { @@ -323,6 +323,10 @@ impl ast::UseTree { pub fn is_simple_path(&self) -> bool { self.use_tree_list().is_none() && self.star_token().is_none() } + + pub fn parent_use_tree_list(&self) -> Option { + self.syntax().parent().and_then(ast::UseTreeList::cast) + } } impl ast::UseTreeList { @@ -340,6 +344,34 @@ impl ast::UseTreeList { .find_map(ast::Comment::cast) .is_some() } + + pub fn comma(&self) -> impl Iterator { + self.syntax() + .children_with_tokens() + .filter_map(|it| it.into_token().filter(|it| it.kind() == T![,])) + } + + /// Remove the unnecessary braces in current `UseTreeList` + pub fn remove_unnecessary_braces(mut self) { + let remove_brace_in_use_tree_list = |u: &ast::UseTreeList| { + let use_tree_count = u.use_trees().count(); + if use_tree_count == 1 { + u.l_curly_token().map(ted::remove); + u.r_curly_token().map(ted::remove); + u.comma().for_each(ted::remove); + } + }; + + // take `use crate::{{{{A}}}}` for example + // the below remove the innermost {}, got `use crate::{{{A}}}` + remove_brace_in_use_tree_list(&self); + + // the below remove othe unnecessary {}, got `use crate::A` + while let Some(parent_use_tree_list) = self.parent_use_tree().parent_use_tree_list() { + remove_brace_in_use_tree_list(&parent_use_tree_list); + self = parent_use_tree_list; + } + } } impl ast::Impl { @@ -585,6 +617,16 @@ impl ast::Item { } } +impl ast::Type { + pub fn generic_arg_list(&self) -> Option { + if let ast::Type::PathType(path_type) = self { + path_type.path()?.segment()?.generic_arg_list() + } else { + None + } + } +} + #[derive(Debug, Clone, PartialEq, Eq)] pub enum FieldKind { Name(ast::NameRef), diff --git a/src/tools/rust-analyzer/crates/test-fixture/Cargo.toml b/src/tools/rust-analyzer/crates/test-fixture/Cargo.toml new file mode 100644 index 0000000000000..35e39229894f2 --- /dev/null +++ b/src/tools/rust-analyzer/crates/test-fixture/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "test-fixture" +version = "0.0.0" +rust-version.workspace = true +edition.workspace = true +license.workspace = true +authors.workspace = true +publish = false + +[dependencies] +hir-expand.workspace = true +test-utils.workspace = true +tt.workspace = true +cfg.workspace = true +base-db.workspace = true +rustc-hash.workspace = true +span.workspace = true +stdx.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs similarity index 87% rename from src/tools/rust-analyzer/crates/base-db/src/fixture.rs rename to src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index bfdd21555f0aa..1a042b2dea20c 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -1,27 +1,30 @@ //! A set of high-level utility fixture methods to use in tests. -use std::{mem, str::FromStr, sync}; +use std::{mem, ops::Not, str::FromStr, sync}; +use base_db::{ + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, + Edition, Env, FileChange, FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, + VfsPath, +}; use cfg::CfgOptions; +use hir_expand::{ + change::Change, + db::ExpandDatabase, + proc_macro::{ + ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacros, + }, +}; use rustc_hash::FxHashMap; +use span::{FileId, FilePosition, FileRange, Span}; use test_utils::{ extract_range_or_offset, Fixture, FixtureWithProjectMeta, RangeOrOffset, CURSOR_MARKER, ESCAPED_CURSOR_MARKER, }; -use triomphe::Arc; use tt::{Leaf, Subtree, TokenTree}; -use vfs::{file_set::FileSet, VfsPath}; - -use crate::{ - input::{CrateName, CrateOrigin, LangCrateOrigin}, - span::SpanData, - Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env, - FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, - ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId, -}; -pub const WORKSPACE: SourceRootId = SourceRootId(0); +pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0); -pub trait WithFixture: Default + SourceDatabaseExt + 'static { +pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static { #[track_caller] fn with_single_file(ra_fixture: &str) -> (Self, FileId) { let fixture = ChangeFixture::parse(ra_fixture); @@ -80,6 +83,7 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static { let fixture = ChangeFixture::parse(ra_fixture); let mut db = Self::default(); fixture.change.apply(&mut db); + let (file_id, range_or_offset) = fixture .file_position .expect("Could not find file position in fixture. Did you forget to add an `$0`?"); @@ -95,7 +99,7 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static { } } -impl WithFixture for DB {} +impl WithFixture for DB {} pub struct ChangeFixture { pub file_position: Option<(FileId, RangeOrOffset)>, @@ -116,13 +120,11 @@ impl ChangeFixture { ) -> ChangeFixture { let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } = FixtureWithProjectMeta::parse(ra_fixture); - let toolchain = toolchain - .map(|it| { - ReleaseChannel::from_str(&it) - .unwrap_or_else(|| panic!("unknown release channel found: {it}")) - }) - .unwrap_or(ReleaseChannel::Stable); - let mut change = Change::new(); + let toolchain = Some({ + let channel = toolchain.as_deref().unwrap_or("stable"); + Version::parse(&format!("1.76.0-{channel}")).unwrap() + }); + let mut source_change = FileChange::new(); let mut files = Vec::new(); let mut crate_graph = CrateGraph::default(); @@ -187,9 +189,9 @@ impl ChangeFixture { origin, meta.target_data_layout .as_deref() - .map(Arc::from) + .map(From::from) .ok_or_else(|| "target_data_layout unset".into()), - Some(toolchain), + toolchain.clone(), ); let prev = crates.insert(crate_name.clone(), crate_id); assert!(prev.is_none(), "multiple crates with same name: {}", crate_name); @@ -206,7 +208,7 @@ impl ChangeFixture { default_target_data_layout = meta.target_data_layout; } - change.change_file(file_id, Some(Arc::from(text))); + source_change.change_file(file_id, Some(text.into())); let path = VfsPath::new_virtual_path(meta.path); file_set.insert(file_id, path); files.push(file_id); @@ -229,7 +231,7 @@ impl ChangeFixture { default_target_data_layout .map(|it| it.into()) .ok_or_else(|| "target_data_layout unset".into()), - Some(toolchain), + toolchain.clone(), ); } else { for (from, to, prelude) in crate_deps { @@ -261,7 +263,7 @@ impl ChangeFixture { fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string())); roots.push(SourceRoot::new_library(fs)); - change.change_file(core_file, Some(Arc::from(mini_core.source_code()))); + source_change.change_file(core_file, Some(mini_core.source_code().into())); let all_crates = crate_graph.crates_in_topological_order(); @@ -276,7 +278,7 @@ impl ChangeFixture { false, CrateOrigin::Lang(LangCrateOrigin::Core), target_layout.clone(), - Some(toolchain), + toolchain.clone(), ); for krate in all_crates { @@ -306,7 +308,7 @@ impl ChangeFixture { ); roots.push(SourceRoot::new_library(fs)); - change.change_file(proc_lib_file, Some(Arc::from(source))); + source_change.change_file(proc_lib_file, Some(source.into())); let all_crates = crate_graph.crates_in_topological_order(); @@ -321,7 +323,7 @@ impl ChangeFixture { true, CrateOrigin::Local { repo: None, name: None }, target_layout, - Some(toolchain), + toolchain, ); proc_macros.insert(proc_macros_crate, Ok(proc_macro)); @@ -344,11 +346,17 @@ impl ChangeFixture { SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)), }; roots.push(root); - change.set_roots(roots); - change.set_crate_graph(crate_graph); - change.set_proc_macros(proc_macros); - - ChangeFixture { file_position, files, change } + source_change.set_roots(roots); + source_change.set_crate_graph(crate_graph); + + ChangeFixture { + file_position, + files, + change: Change { + source_change, + proc_macros: proc_macros.is_empty().not().then(|| proc_macros), + }, + } } } @@ -364,7 +372,7 @@ pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream { .into(), ProcMacro { name: "identity".into(), - kind: crate::ProcMacroKind::Attr, + kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityProcMacroExpander), }, ), @@ -378,7 +386,7 @@ pub fn derive_identity(item: TokenStream) -> TokenStream { .into(), ProcMacro { name: "DeriveIdentity".into(), - kind: crate::ProcMacroKind::CustomDerive, + kind: ProcMacroKind::CustomDerive, expander: sync::Arc::new(IdentityProcMacroExpander), }, ), @@ -392,7 +400,7 @@ pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream { .into(), ProcMacro { name: "input_replace".into(), - kind: crate::ProcMacroKind::Attr, + kind: ProcMacroKind::Attr, expander: sync::Arc::new(AttributeInputReplaceProcMacroExpander), }, ), @@ -406,7 +414,7 @@ pub fn mirror(input: TokenStream) -> TokenStream { .into(), ProcMacro { name: "mirror".into(), - kind: crate::ProcMacroKind::FuncLike, + kind: ProcMacroKind::FuncLike, expander: sync::Arc::new(MirrorProcMacroExpander), }, ), @@ -420,7 +428,7 @@ pub fn shorten(input: TokenStream) -> TokenStream { .into(), ProcMacro { name: "shorten".into(), - kind: crate::ProcMacroKind::FuncLike, + kind: ProcMacroKind::FuncLike, expander: sync::Arc::new(ShortenProcMacroExpander), }, ), @@ -539,13 +547,13 @@ struct IdentityProcMacroExpander; impl ProcMacroExpander for IdentityProcMacroExpander { fn expand( &self, - subtree: &Subtree, - _: Option<&Subtree>, + subtree: &Subtree, + _: Option<&Subtree>, _: &Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result, ProcMacroExpansionError> { + _: Span, + _: Span, + _: Span, + ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } } @@ -556,13 +564,13 @@ struct AttributeInputReplaceProcMacroExpander; impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander { fn expand( &self, - _: &Subtree, - attrs: Option<&Subtree>, + _: &Subtree, + attrs: Option<&Subtree>, _: &Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result, ProcMacroExpansionError> { + _: Span, + _: Span, + _: Span, + ) -> Result, ProcMacroExpansionError> { attrs .cloned() .ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into())) @@ -574,14 +582,14 @@ struct MirrorProcMacroExpander; impl ProcMacroExpander for MirrorProcMacroExpander { fn expand( &self, - input: &Subtree, - _: Option<&Subtree>, + input: &Subtree, + _: Option<&Subtree>, _: &Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result, ProcMacroExpansionError> { - fn traverse(input: &Subtree) -> Subtree { + _: Span, + _: Span, + _: Span, + ) -> Result, ProcMacroExpansionError> { + fn traverse(input: &Subtree) -> Subtree { let mut token_trees = vec![]; for tt in input.token_trees.iter().rev() { let tt = match tt { @@ -604,16 +612,16 @@ struct ShortenProcMacroExpander; impl ProcMacroExpander for ShortenProcMacroExpander { fn expand( &self, - input: &Subtree, - _: Option<&Subtree>, + input: &Subtree, + _: Option<&Subtree>, _: &Env, - _: SpanData, - _: SpanData, - _: SpanData, - ) -> Result, ProcMacroExpansionError> { + _: Span, + _: Span, + _: Span, + ) -> Result, ProcMacroExpansionError> { return Ok(traverse(input)); - fn traverse(input: &Subtree) -> Subtree { + fn traverse(input: &Subtree) -> Subtree { let token_trees = input .token_trees .iter() @@ -625,7 +633,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander { Subtree { delimiter: input.delimiter, token_trees } } - fn modify_leaf(leaf: &Leaf) -> Leaf { + fn modify_leaf(leaf: &Leaf) -> Leaf { let mut leaf = leaf.clone(); match &mut leaf { Leaf::Literal(it) => { diff --git a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml index 438b599ffaae5..56067d8341789 100644 --- a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml +++ b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml @@ -15,7 +15,10 @@ doctest = false # Avoid adding deps here, this crate is widely used in tests it should compile fast! dissimilar = "1.0.7" text-size.workspace = true -rustc-hash = "1.1.0" +rustc-hash.workspace = true stdx.workspace = true profile.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index f766747d707ca..1f3136404c615 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -1381,6 +1381,7 @@ mod macros { // region:assert #[macro_export] #[rustc_builtin_macro] + #[allow_internal_unstable(core_panic, edition_panic, generic_assert_internals)] macro_rules! assert { ($($arg:tt)*) => { /* compiler built-in */ @@ -1389,6 +1390,7 @@ mod macros { // endregion:assert // region:fmt + #[allow_internal_unstable(fmt_internals, const_fmt_arguments_new)] #[macro_export] #[rustc_builtin_macro] macro_rules! const_format_args { @@ -1396,6 +1398,7 @@ mod macros { ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; } + #[allow_internal_unstable(fmt_internals)] #[macro_export] #[rustc_builtin_macro] macro_rules! format_args { @@ -1403,6 +1406,7 @@ mod macros { ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; } + #[allow_internal_unstable(fmt_internals)] #[macro_export] #[rustc_builtin_macro] macro_rules! format_args_nl { diff --git a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml index 4620cc72d0a22..f745674794c99 100644 --- a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml +++ b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml @@ -14,3 +14,6 @@ doctest = false [dependencies] itertools.workspace = true text-size.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml index a283f9a884214..f9b120772f038 100644 --- a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml +++ b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml @@ -13,3 +13,6 @@ doctest = false [dependencies] home = "0.5.4" + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/tt/Cargo.toml b/src/tools/rust-analyzer/crates/tt/Cargo.toml index 57222449790ec..77683fd48afb2 100644 --- a/src/tools/rust-analyzer/crates/tt/Cargo.toml +++ b/src/tools/rust-analyzer/crates/tt/Cargo.toml @@ -16,3 +16,9 @@ smol_str.workspace = true text-size.workspace = true stdx.workspace = true + +# FIXME: Remove this dependency once the `Span` trait is gone (that is once Span::DUMMY has been removed) +span.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs index 481d575403aca..b3b0eeda75af5 100644 --- a/src/tools/rust-analyzer/crates/tt/src/lib.rs +++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs @@ -11,46 +11,10 @@ use stdx::impl_from; pub use smol_str::SmolStr; pub use text_size::{TextRange, TextSize}; -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] -pub struct SpanData { - /// The text range of this span, relative to the anchor. - /// We need the anchor for incrementality, as storing absolute ranges will require - /// recomputation on every change in a file at all times. - pub range: TextRange, - pub anchor: Anchor, - /// The syntax context of the span. - pub ctx: Ctx, -} - -impl Span for SpanData { - #[allow(deprecated)] - const DUMMY: Self = SpanData { - range: TextRange::empty(TextSize::new(0)), - anchor: Anchor::DUMMY, - ctx: Ctx::DUMMY, - }; -} - -pub trait Span: std::fmt::Debug + Copy + Sized + Eq { - // FIXME: Should not exist. Dummy spans will always be wrong if they leak somewhere. Instead, - // the call site or def site spans should be used in relevant places, its just that we don't - // expose those everywhere in the yet. - const DUMMY: Self; -} +pub trait Span: std::fmt::Debug + Copy + Sized + Eq {} -// FIXME: Should not exist -pub trait SpanAnchor: - std::fmt::Debug + Copy + Sized + Eq + Copy + fmt::Debug + std::hash::Hash -{ - #[deprecated(note = "this should not exist")] - const DUMMY: Self; -} - -// FIXME: Should not exist -pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq { - #[deprecated(note = "this should not exist")] - const DUMMY: Self; -} +impl Span for span::SpanData where span::SpanData: std::fmt::Debug + Copy + Sized + Eq +{} #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TokenTree { @@ -66,15 +30,7 @@ impl TokenTree { }) } - pub fn subtree_or_wrap(self) -> Subtree { - match self { - TokenTree::Leaf(_) => { - Subtree { delimiter: Delimiter::DUMMY_INVISIBLE, token_trees: vec![self] } - } - TokenTree::Subtree(s) => s, - } - } - pub fn subtree_or_wrap2(self, span: DelimSpan) -> Subtree { + pub fn subtree_or_wrap(self, span: DelimSpan) -> Subtree { match self { TokenTree::Leaf(_) => Subtree { delimiter: Delimiter::invisible_delim_spanned(span), @@ -83,6 +39,13 @@ impl TokenTree { TokenTree::Subtree(s) => s, } } + + pub fn first_span(&self) -> S { + match self { + TokenTree::Leaf(l) => *l.span(), + TokenTree::Subtree(s) => s.delimiter.open, + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -134,11 +97,6 @@ pub struct DelimSpan { pub close: S, } -impl DelimSpan { - // FIXME should not exist - pub const DUMMY: Self = Self { open: S::DUMMY, close: S::DUMMY }; -} - #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct Delimiter { pub open: S, @@ -147,15 +105,6 @@ pub struct Delimiter { } impl Delimiter { - // FIXME should not exist - pub const DUMMY_INVISIBLE: Self = - Self { open: S::DUMMY, close: S::DUMMY, kind: DelimiterKind::Invisible }; - - // FIXME should not exist - pub const fn dummy_invisible() -> Self { - Self::DUMMY_INVISIBLE - } - pub const fn invisible_spanned(span: S) -> Self { Delimiter { open: span, close: span, kind: DelimiterKind::Invisible } } diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml index fe6cb0a2c3fd9..a6d5027c3a60a 100644 --- a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml +++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml @@ -20,3 +20,6 @@ notify = "6.1.1" stdx.workspace = true vfs.workspace = true paths.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/vfs/Cargo.toml b/src/tools/rust-analyzer/crates/vfs/Cargo.toml index 11409f2eb8195..c88f346655984 100644 --- a/src/tools/rust-analyzer/crates/vfs/Cargo.toml +++ b/src/tools/rust-analyzer/crates/vfs/Cargo.toml @@ -12,10 +12,13 @@ rust-version.workspace = true doctest = false [dependencies] -rustc-hash = "1.1.0" +rustc-hash.workspace = true fst = "0.4.7" indexmap.workspace = true nohash-hasher.workspace = true paths.workspace = true stdx.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/docs/dev/architecture.md b/src/tools/rust-analyzer/docs/dev/architecture.md index b7d585cafb358..4303a800a04ac 100644 --- a/src/tools/rust-analyzer/docs/dev/architecture.md +++ b/src/tools/rust-analyzer/docs/dev/architecture.md @@ -134,29 +134,29 @@ This is to enable parallel parsing of all files. **Architecture Invariant:** Syntax trees are by design incomplete and do not enforce well-formedness. If an AST method returns an `Option`, it *can* be `None` at runtime, even if this is forbidden by the grammar. -### `crates/base_db` +### `crates/base-db` We use the [salsa](https://github.com/salsa-rs/salsa) crate for incremental and on-demand computation. Roughly, you can think of salsa as a key-value store, but it can also compute derived values using specified functions. -The `base_db` crate provides basic infrastructure for interacting with salsa. +The `base-db` crate provides basic infrastructure for interacting with salsa. Crucially, it defines most of the "input" queries: facts supplied by the client of the analyzer. Reading the docs of the `base_db::input` module should be useful: everything else is strictly derived from those inputs. **Architecture Invariant:** particularities of the build system are *not* the part of the ground state. -In particular, `base_db` knows nothing about cargo. +In particular, `base-db` knows nothing about cargo. For example, `cfg` flags are a part of `base_db`, but `feature`s are not. A `foo` feature is a Cargo-level concept, which is lowered by Cargo to `--cfg feature=foo` argument on the command line. The `CrateGraph` structure is used to represent the dependencies between the crates abstractly. -**Architecture Invariant:** `base_db` doesn't know about file system and file paths. +**Architecture Invariant:** `base-db` doesn't know about file system and file paths. Files are represented with opaque `FileId`, there's no operation to get an `std::path::Path` out of the `FileId`. -### `crates/hir_expand`, `crates/hir_def`, `crates/hir_ty` +### `crates/hir-expand`, `crates/hir-def`, `crates/hir_ty` These crates are the *brain* of rust-analyzer. This is the compiler part of the IDE. -`hir_xxx` crates have a strong [ECS](https://en.wikipedia.org/wiki/Entity_component_system) flavor, in that they work with raw ids and directly query the database. +`hir-xxx` crates have a strong [ECS](https://en.wikipedia.org/wiki/Entity_component_system) flavor, in that they work with raw ids and directly query the database. There's little abstraction here. These crates integrate deeply with salsa and chalk. @@ -186,7 +186,7 @@ If you think about "using rust-analyzer as a library", `hir` crate is most likel It wraps ECS-style internal API into a more OO-flavored API (with an extra `db` argument for each call). **Architecture Invariant:** `hir` provides a static, fully resolved view of the code. -While internal `hir_*` crates _compute_ things, `hir`, from the outside, looks like an inert data structure. +While internal `hir-*` crates _compute_ things, `hir`, from the outside, looks like an inert data structure. `hir` also handles the delicate task of going from syntax to the corresponding `hir`. Remember that the mapping here is one-to-many. @@ -200,7 +200,7 @@ Then we look for our node in the set of children. This is the heart of many IDE features, like goto definition, which start with figuring out the hir node at the cursor. This is some kind of (yet unnamed) uber-IDE pattern, as it is present in Roslyn and Kotlin as well. -### `crates/ide` +### `crates/ide`, `crates/ide-db`, `crates/ide-assists`, `crates/ide-completion`, `crates/ide-diagnostics`, `crates/ide-ssr` The `ide` crate builds on top of `hir` semantic model to provide high-level IDE features like completion or goto definition. It is an **API Boundary**. @@ -217,8 +217,8 @@ Shout outs to LSP developers for popularizing the idea that "UI" is a good place `AnalysisHost` is a state to which you can transactionally `apply_change`. `Analysis` is an immutable snapshot of the state. -Internally, `ide` is split across several crates. `ide_assists`, `ide_completion` and `ide_ssr` implement large isolated features. -`ide_db` implements common IDE functionality (notably, reference search is implemented here). +Internally, `ide` is split across several crates. `ide-assists`, `ide-completion`, `ide-diagnostics` and `ide-ssr` implement large isolated features. +`ide-db` implements common IDE functionality (notably, reference search is implemented here). The `ide` contains a public API/façade, as well as implementation for a plethora of smaller features. **Architecture Invariant:** `ide` crate strives to provide a _perfect_ API. @@ -251,14 +251,14 @@ This is a tricky business. **Architecture Invariant:** `rust-analyzer` should be partially available even when the build is broken. Reloading process should not prevent IDE features from working. -### `crates/toolchain`, `crates/project_model`, `crates/flycheck` +### `crates/toolchain`, `crates/project-model`, `crates/flycheck` These crates deal with invoking `cargo` to learn about project structure and get compiler errors for the "check on save" feature. -They use `crates/path` heavily instead of `std::path`. +They use `crates/paths` heavily instead of `std::path`. A single `rust-analyzer` process can serve many projects, so it is important that server's current directory does not leak. -### `crates/mbe`, `crates/tt`, `crates/proc_macro_api`, `crates/proc_macro_srv` +### `crates/mbe`, `crates/tt`, `crates/proc-macro-api`, `crates/proc-macro-srv`, `crates/proc-macro-srv-cli` These crates implement macros as token tree -> token tree transforms. They are independent from the rest of the code. @@ -268,8 +268,8 @@ They are independent from the rest of the code. And it also handles the actual parsing and expansion of declarative macro (a-la "Macros By Example" or mbe). For proc macros, the client-server model are used. -We start a separate process (`proc_macro_srv`) which loads and runs the proc-macros for us. -And the client (`proc_macro_api`) provides an interface to talk to that server separately. +We start a separate process (`proc-macro-srv-cli`) which loads and runs the proc-macros for us. +And the client (`proc-macro-api`) provides an interface to talk to that server separately. And then token trees are passed from client, and the server will load the corresponding dynamic library (which built by `cargo`). And due to the fact the api for getting result from proc macro are always unstable in `rustc`, @@ -283,7 +283,7 @@ And they may be non-deterministic which conflict how `salsa` works, so special a This crate is responsible for parsing, evaluation and general definition of `cfg` attributes. -### `crates/vfs`, `crates/vfs-notify` +### `crates/vfs`, `crates/vfs-notify`, `crates/paths` These crates implement a virtual file system. They provide consistent snapshots of the underlying file system and insulate messy OS paths. @@ -301,6 +301,25 @@ as copies of unstable std items we would like to make use of already, like `std: This crate contains utilities for CPU and memory profiling. +### `crates/intern` + +This crate contains infrastructure for globally interning things via `Arc`. + +### `crates/load-cargo` + +This crate exposes several utilities for loading projects, used by the main `rust-analyzer` crate +and other downstream consumers. + +### `crates/rustc-dependencies` + +This crate wraps the `rustc_*` crates rust-analyzer relies on and conditionally points them to +mirrored crates-io releases such that rust-analyzer keeps building on stable. + +### `crates/span` + +This crate exposes types and functions related to rust-analyzer's span for macros. + +A span is effectively a text range relative to some item in a file with a given `SyntaxContext` (hygiene). ## Cross-Cutting Concerns diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md index b66c9c943a161..3251dd752682e 100644 --- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md +++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ $DIR/missing-type.rs:4:12 + --> $DIR/missing-type.rs:5:12 | LL | let x: Iter; | ^^^^ not found in this scope diff --git a/tests/ui/annotate-snippet/multispan.rs b/tests/ui/annotate-snippet/multispan.rs index 69d7e1a9d116a..d7241b8036492 100644 --- a/tests/ui/annotate-snippet/multispan.rs +++ b/tests/ui/annotate-snippet/multispan.rs @@ -1,4 +1,5 @@ // aux-build:multispan.rs +// error-pattern:hello to you, too! // compile-flags: --error-format human-annotate-rs -Z unstable-options #![feature(proc_macro_hygiene)] @@ -12,17 +13,17 @@ fn main() { hello!(); // Exactly one 'hi'. - hello!(hi); //~ ERROR hello to you, too! + hello!(hi); // Now two, back to back. - hello!(hi hi); //~ ERROR hello to you, too! + hello!(hi hi); // Now three, back to back. - hello!(hi hi hi); //~ ERROR hello to you, too! + hello!(hi hi hi); // Now several, with spacing. - hello!(hi hey hi yo hi beep beep hi hi); //~ ERROR hello to you, too! - hello!(hi there, hi how are you? hi... hi.); //~ ERROR hello to you, too! - hello!(whoah. hi di hi di ho); //~ ERROR hello to you, too! - hello!(hi good hi and good bye); //~ ERROR hello to you, too! + hello!(hi hey hi yo hi beep beep hi hi); + hello!(hi there, hi how are you? hi... hi.); + hello!(whoah. hi di hi di ho); + hello!(hi good hi and good bye); } diff --git a/tests/ui/annotate-snippet/multispan.stderr b/tests/ui/annotate-snippet/multispan.stderr index baed54c59a4e9..833b67730325e 100644 --- a/tests/ui/annotate-snippet/multispan.stderr +++ b/tests/ui/annotate-snippet/multispan.stderr @@ -1,41 +1,41 @@ error: hello to you, too! - --> $DIR/multispan.rs:15:5 + --> $DIR/multispan.rs:16:5 | LL | hello!(hi); | ^^^^^^^^^^ | error: hello to you, too! - --> $DIR/multispan.rs:18:5 + --> $DIR/multispan.rs:19:5 | LL | hello!(hi hi); | ^^^^^^^^^^^^^ | error: hello to you, too! - --> $DIR/multispan.rs:21:5 + --> $DIR/multispan.rs:22:5 | LL | hello!(hi hi hi); | ^^^^^^^^^^^^^^^^ | error: hello to you, too! - --> $DIR/multispan.rs:24:5 + --> $DIR/multispan.rs:25:5 | LL | hello!(hi hey hi yo hi beep beep hi hi); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | error: hello to you, too! - --> $DIR/multispan.rs:25:5 + --> $DIR/multispan.rs:26:5 | LL | hello!(hi there, hi how are you? hi... hi.); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | error: hello to you, too! - --> $DIR/multispan.rs:26:5 + --> $DIR/multispan.rs:27:5 | LL | hello!(whoah. hi di hi di ho); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | error: hello to you, too! - --> $DIR/multispan.rs:27:5 + --> $DIR/multispan.rs:28:5 | LL | hello!(hi good hi and good bye); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/ui/asm/named-asm-labels.rs b/tests/ui/asm/named-asm-labels.rs index 160dbf617c4f6..24586b39aacc0 100644 --- a/tests/ui/asm/named-asm-labels.rs +++ b/tests/ui/asm/named-asm-labels.rs @@ -120,6 +120,27 @@ fn main() { // is there an example that is valid x86 for this test? asm!(":bbb nop"); + // non-ascii characters are not allowed in labels, so should not trigger the lint + asm!("Ù: nop"); + asm!("testÙ: nop"); + asm!("_Ù_: nop"); + + // Format arguments should be conservatively assumed to be valid characters in labels + // Would emit `test_rax:` or similar + #[allow(asm_sub_register)] + { + asm!("test_{}: nop", in(reg) 10); //~ ERROR avoid using named labels + } + asm!("test_{}: nop", const 10); //~ ERROR avoid using named labels + asm!("test_{}: nop", sym main); //~ ERROR avoid using named labels + asm!("{}_test: nop", const 10); //~ ERROR avoid using named labels + asm!("test_{}_test: nop", const 10); //~ ERROR avoid using named labels + asm!("{}: nop", const 10); //~ ERROR avoid using named labels + + asm!("{uwu}: nop", uwu = const 10); //~ ERROR avoid using named labels + asm!("{0}: nop", const 10); //~ ERROR avoid using named labels + asm!("{1}: nop", "/* {0} */", const 10, const 20); //~ ERROR avoid using named labels + // Test include_str in asm asm!(include_str!("named-asm-labels.s")); //~ ERROR avoid using named labels diff --git a/tests/ui/asm/named-asm-labels.stderr b/tests/ui/asm/named-asm-labels.stderr index c8380629e12ea..89c058499675c 100644 --- a/tests/ui/asm/named-asm-labels.stderr +++ b/tests/ui/asm/named-asm-labels.stderr @@ -245,7 +245,88 @@ LL | ab: nop // ab: does foo = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:124:14 + --> $DIR/named-asm-labels.rs:132:19 + | +LL | asm!("test_{}: nop", in(reg) 10); + | ^^^^^^^ + | + = help: only local labels of the form `:` should be used in inline asm + = note: see the asm section of Rust By Example for more information + +error: avoid using named labels in inline assembly + --> $DIR/named-asm-labels.rs:134:15 + | +LL | asm!("test_{}: nop", const 10); + | ^^^^^^^ + | + = help: only local labels of the form `:` should be used in inline asm + = note: see the asm section of Rust By Example for more information + +error: avoid using named labels in inline assembly + --> $DIR/named-asm-labels.rs:135:15 + | +LL | asm!("test_{}: nop", sym main); + | ^^^^^^^ + | + = help: only local labels of the form `:` should be used in inline asm + = note: see the asm section of Rust By Example for more information + +error: avoid using named labels in inline assembly + --> $DIR/named-asm-labels.rs:136:15 + | +LL | asm!("{}_test: nop", const 10); + | ^^^^^^^ + | + = help: only local labels of the form `:` should be used in inline asm + = note: see the asm section of Rust By Example for more information + +error: avoid using named labels in inline assembly + --> $DIR/named-asm-labels.rs:137:15 + | +LL | asm!("test_{}_test: nop", const 10); + | ^^^^^^^^^^^^ + | + = help: only local labels of the form `:` should be used in inline asm + = note: see the asm section of Rust By Example for more information + +error: avoid using named labels in inline assembly + --> $DIR/named-asm-labels.rs:138:15 + | +LL | asm!("{}: nop", const 10); + | ^^ + | + = help: only local labels of the form `:` should be used in inline asm + = note: see the asm section of Rust By Example for more information + +error: avoid using named labels in inline assembly + --> $DIR/named-asm-labels.rs:140:15 + | +LL | asm!("{uwu}: nop", uwu = const 10); + | ^^^^^ + | + = help: only local labels of the form `:` should be used in inline asm + = note: see the asm section of Rust By Example for more information + +error: avoid using named labels in inline assembly + --> $DIR/named-asm-labels.rs:141:15 + | +LL | asm!("{0}: nop", const 10); + | ^^^ + | + = help: only local labels of the form `:` should be used in inline asm + = note: see the asm section of Rust By Example for more information + +error: avoid using named labels in inline assembly + --> $DIR/named-asm-labels.rs:142:15 + | +LL | asm!("{1}: nop", "/* {0} */", const 10, const 20); + | ^^^ + | + = help: only local labels of the form `:` should be used in inline asm + = note: see the asm section of Rust By Example for more information + +error: avoid using named labels in inline assembly + --> $DIR/named-asm-labels.rs:145:14 | LL | asm!(include_str!("named-asm-labels.s")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -254,7 +335,7 @@ LL | asm!(include_str!("named-asm-labels.s")); = note: see the asm section of Rust By Example for more information warning: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:134:19 + --> $DIR/named-asm-labels.rs:155:19 | LL | asm!("warned: nop"); | ^^^^^^ @@ -262,13 +343,13 @@ LL | asm!("warned: nop"); = help: only local labels of the form `:` should be used in inline asm = note: see the asm section of Rust By Example for more information note: the lint level is defined here - --> $DIR/named-asm-labels.rs:132:16 + --> $DIR/named-asm-labels.rs:153:16 | LL | #[warn(named_asm_labels)] | ^^^^^^^^^^^^^^^^ error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:143:20 + --> $DIR/named-asm-labels.rs:164:20 | LL | unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noreturn)) } | ^^^^^ @@ -277,7 +358,7 @@ LL | unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noret = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:149:20 + --> $DIR/named-asm-labels.rs:170:20 | LL | unsafe { asm!(".Lbar: mov rax, {}; ret;", "nop", const 1, options(noreturn)) } | ^^^^^ @@ -286,7 +367,7 @@ LL | unsafe { asm!(".Lbar: mov rax, {}; ret;", "nop", const 1, options(noret = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:157:20 + --> $DIR/named-asm-labels.rs:178:20 | LL | unsafe { asm!(".Laaa: nop; ret;", options(noreturn)) } | ^^^^^ @@ -295,7 +376,7 @@ LL | unsafe { asm!(".Laaa: nop; ret;", options(noreturn)) } = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:167:24 + --> $DIR/named-asm-labels.rs:188:24 | LL | unsafe { asm!(".Lbbb: nop; ret;", options(noreturn)) } | ^^^^^ @@ -304,7 +385,7 @@ LL | unsafe { asm!(".Lbbb: nop; ret;", options(noreturn)) } = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:176:15 + --> $DIR/named-asm-labels.rs:197:15 | LL | asm!("closure1: nop"); | ^^^^^^^^ @@ -313,7 +394,7 @@ LL | asm!("closure1: nop"); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:180:15 + --> $DIR/named-asm-labels.rs:201:15 | LL | asm!("closure2: nop"); | ^^^^^^^^ @@ -322,7 +403,7 @@ LL | asm!("closure2: nop"); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:190:19 + --> $DIR/named-asm-labels.rs:211:19 | LL | asm!("closure3: nop"); | ^^^^^^^^ @@ -330,5 +411,5 @@ LL | asm!("closure3: nop"); = help: only local labels of the form `:` should be used in inline asm = note: see the asm section of Rust By Example for more information -error: aborting due to 35 previous errors; 1 warning emitted +error: aborting due to 44 previous errors; 1 warning emitted diff --git a/tests/ui/associated-types/associated-types-no-suitable-supertrait.stderr b/tests/ui/associated-types/associated-types-no-suitable-supertrait.stderr index 9ebc45387e86e..5443699eb019f 100644 --- a/tests/ui/associated-types/associated-types-no-suitable-supertrait.stderr +++ b/tests/ui/associated-types/associated-types-no-suitable-supertrait.stderr @@ -1,3 +1,15 @@ +error[E0277]: the trait bound `(T, U): Get` is not satisfied + --> $DIR/associated-types-no-suitable-supertrait.rs:22:5 + | +LL | fn uhoh(&self, foo: U, bar: <(T, U) as Get>::Value) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Get` is not implemented for `(T, U)` + | +help: this trait has no implementations, consider adding one + --> $DIR/associated-types-no-suitable-supertrait.rs:12:1 + | +LL | trait Get { + | ^^^^^^^^^ + error[E0277]: the trait bound `(T, U): Get` is not satisfied --> $DIR/associated-types-no-suitable-supertrait.rs:22:40 | @@ -21,18 +33,6 @@ help: consider further restricting `Self` LL | fn uhoh(&self, foo: U, bar: ::Value) where Self: Get {} | +++++++++++++++ -error[E0277]: the trait bound `(T, U): Get` is not satisfied - --> $DIR/associated-types-no-suitable-supertrait.rs:22:5 - | -LL | fn uhoh(&self, foo: U, bar: <(T, U) as Get>::Value) {} - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Get` is not implemented for `(T, U)` - | -help: this trait has no implementations, consider adding one - --> $DIR/associated-types-no-suitable-supertrait.rs:12:1 - | -LL | trait Get { - | ^^^^^^^^^ - error: aborting due to 3 previous errors For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/associated-types/defaults-cyclic-fail-1.rs b/tests/ui/associated-types/defaults-cyclic-fail-1.rs index 61ef013236e8d..1ec5faca7c5b3 100644 --- a/tests/ui/associated-types/defaults-cyclic-fail-1.rs +++ b/tests/ui/associated-types/defaults-cyclic-fail-1.rs @@ -24,13 +24,13 @@ impl Tr for u32 { // ...but not in an impl that redefines one of the types. impl Tr for bool { type A = Box; - //~^ ERROR overflow evaluating the requirement `::B == _` + //~^ ERROR overflow evaluating the requirement `::A == _` } // (the error is shown twice for some reason) impl Tr for usize { type B = &'static Self::A; - //~^ ERROR overflow evaluating the requirement `::A == _` + //~^ ERROR overflow evaluating the requirement `::B == _` } fn main() { diff --git a/tests/ui/associated-types/defaults-cyclic-fail-1.stderr b/tests/ui/associated-types/defaults-cyclic-fail-1.stderr index 008eddcb29dbc..876fdaec29632 100644 --- a/tests/ui/associated-types/defaults-cyclic-fail-1.stderr +++ b/tests/ui/associated-types/defaults-cyclic-fail-1.stderr @@ -1,10 +1,10 @@ -error[E0275]: overflow evaluating the requirement `::B == _` +error[E0275]: overflow evaluating the requirement `::A == _` --> $DIR/defaults-cyclic-fail-1.rs:26:14 | LL | type A = Box; | ^^^^^^^^^^^^ -error[E0275]: overflow evaluating the requirement `::A == _` +error[E0275]: overflow evaluating the requirement `::B == _` --> $DIR/defaults-cyclic-fail-1.rs:32:14 | LL | type B = &'static Self::A; diff --git a/tests/ui/associated-types/defaults-cyclic-fail-2.rs b/tests/ui/associated-types/defaults-cyclic-fail-2.rs index e91c9f2d29a82..bec1bde71a109 100644 --- a/tests/ui/associated-types/defaults-cyclic-fail-2.rs +++ b/tests/ui/associated-types/defaults-cyclic-fail-2.rs @@ -25,13 +25,13 @@ impl Tr for u32 { impl Tr for bool { type A = Box; - //~^ ERROR overflow evaluating the requirement `::B == _` + //~^ ERROR overflow evaluating the requirement `::A == _` } // (the error is shown twice for some reason) impl Tr for usize { type B = &'static Self::A; - //~^ ERROR overflow evaluating the requirement `::A == _` + //~^ ERROR overflow evaluating the requirement `::B == _` } fn main() { diff --git a/tests/ui/associated-types/defaults-cyclic-fail-2.stderr b/tests/ui/associated-types/defaults-cyclic-fail-2.stderr index d0fbab077153f..ec0c9973c0a2e 100644 --- a/tests/ui/associated-types/defaults-cyclic-fail-2.stderr +++ b/tests/ui/associated-types/defaults-cyclic-fail-2.stderr @@ -1,10 +1,10 @@ -error[E0275]: overflow evaluating the requirement `::B == _` +error[E0275]: overflow evaluating the requirement `::A == _` --> $DIR/defaults-cyclic-fail-2.rs:27:14 | LL | type A = Box; | ^^^^^^^^^^^^ -error[E0275]: overflow evaluating the requirement `::A == _` +error[E0275]: overflow evaluating the requirement `::B == _` --> $DIR/defaults-cyclic-fail-2.rs:33:14 | LL | type B = &'static Self::A; diff --git a/tests/ui/associated-types/substs-ppaux.rs b/tests/ui/associated-types/substs-ppaux.rs index 66cd94d7a1b37..db6e7a4cf051f 100644 --- a/tests/ui/associated-types/substs-ppaux.rs +++ b/tests/ui/associated-types/substs-ppaux.rs @@ -1,7 +1,7 @@ // // revisions: verbose normal // -//[verbose] compile-flags: -Z verbose +//[verbose] compile-flags: -Z verbose-internals trait Foo<'b, 'c, S=u32> { fn bar<'a, T>() where T: 'a {} diff --git a/tests/ui/async-await/issue-66312.stderr b/tests/ui/async-await/issue-66312.stderr index dad5807cb504d..2875af8a97e04 100644 --- a/tests/ui/async-await/issue-66312.stderr +++ b/tests/ui/async-await/issue-66312.stderr @@ -1,3 +1,9 @@ +error[E0308]: mismatched types + --> $DIR/issue-66312.rs:9:8 + | +LL | if x.is_some() { + | ^^^^^^^^^^^ expected `bool`, found `()` + error[E0307]: invalid `self` parameter type: T --> $DIR/issue-66312.rs:4:22 | @@ -7,12 +13,6 @@ LL | fn is_some(self: T); = note: type of `self` must be `Self` or a type that dereferences to it = help: consider changing to `self`, `&self`, `&mut self`, `self: Box`, `self: Rc`, `self: Arc`, or `self: Pin

` (where P is one of the previous types except `Self`) -error[E0308]: mismatched types - --> $DIR/issue-66312.rs:9:8 - | -LL | if x.is_some() { - | ^^^^^^^^^^^ expected `bool`, found `()` - error: aborting due to 2 previous errors Some errors have detailed explanations: E0307, E0308. diff --git a/tests/ui/binop/binary-op-suggest-deref.fixed b/tests/ui/binop/binary-op-suggest-deref.fixed deleted file mode 100644 index 1ff3599137b16..0000000000000 --- a/tests/ui/binop/binary-op-suggest-deref.fixed +++ /dev/null @@ -1,8 +0,0 @@ -// Issue #52544 -// run-rustfix - -fn main() { - let i: &i64 = &1; - if *i < 0 {} - //~^ ERROR mismatched types [E0308] -} diff --git a/tests/ui/binop/binary-op-suggest-deref.rs b/tests/ui/binop/binary-op-suggest-deref.rs index 12505a9ac27e8..57f24a4c28ed4 100644 --- a/tests/ui/binop/binary-op-suggest-deref.rs +++ b/tests/ui/binop/binary-op-suggest-deref.rs @@ -1,8 +1,75 @@ -// Issue #52544 -// run-rustfix +#![allow(dead_code)] -fn main() { +fn foo() { + // Issue #52544 let i: &i64 = &1; if i < 0 {} //~^ ERROR mismatched types [E0308] } + +fn bar() { + // Issue #40660 + let foo = &&0; + + // Dereference LHS + _ = foo == 0; + //~^ERROR can't compare `&&{integer}` with `{integer}` [E0277] + _ = foo == &0; + //~^ERROR can't compare `&{integer}` with `{integer}` [E0277] + _ = &&&&foo == 0; + //~^ERROR can't compare `&&&&&&{integer}` with `{integer}` [E0277] + _ = *foo == 0; + //~^ERROR can't compare `&{integer}` with `{integer}` [E0277] + _ = &&foo == &&0; + //~^ERROR can't compare `&&{integer}` with `{integer}` [E0277] + _ = &Box::new(42) == 42; + //~^ERROR can't compare `&Box<{integer}>` with `{integer}` [E0277] + _ = &Box::new(&Box::new(&42)) == 42; + //~^ERROR can't compare `&Box<&Box<&{integer}>>` with `{integer}` [E0277] + + // Dereference RHS + _ = 0 == foo; + //~^ERROR can't compare `{integer}` with `&&{integer}` [E0277] + _ = &0 == foo; + //~^ERROR can't compare `{integer}` with `&{integer}` [E0277] + _ = 0 == &&&&foo; + //~^ERROR can't compare `{integer}` with `&&&&&&{integer}` [E0277] + _ = 0 == *foo; + //~^ERROR can't compare `{integer}` with `&{integer}` [E0277] + _ = &&0 == &&foo; + //~^ERROR can't compare `{integer}` with `&&{integer}` [E0277] + + // Dereference both sides + _ = &Box::new(Box::new(42)) == &foo; + //~^ERROR can't compare `Box>` with `&&{integer}` [E0277] + _ = &Box::new(42) == &foo; + //~^ERROR can't compare `Box<{integer}>` with `&&{integer}` [E0277] + _ = &Box::new(Box::new(Box::new(Box::new(42)))) == &foo; + //~^ERROR can't compare `Box>>>` with `&&{integer}` [E0277] + _ = &foo == &Box::new(Box::new(Box::new(Box::new(42)))); + //~^ERROR can't compare `&&{integer}` with `Box>>>` [E0277] + + // Don't suggest dereferencing the LHS; suggest boxing the RHS instead + _ = Box::new(42) == 42; + //~^ERROR mismatched types [E0308] + + // Don't suggest dereferencing with types that can't be compared + struct Foo; + _ = &&0 == Foo; + //~^ERROR can't compare `&&{integer}` with `Foo` [E0277] + _ = Foo == &&0; + //~^ERROR binary operation `==` cannot be applied to type `Foo` [E0369] +} + +fn baz() { + // Issue #44695 + let owned = "foo".to_owned(); + let string_ref = &owned; + let partial = "foobar"; + _ = string_ref == partial[..3]; + //~^ERROR can't compare `&String` with `str` [E0277] + _ = partial[..3] == string_ref; + //~^ERROR can't compare `str` with `&String` [E0277] +} + +fn main() {} diff --git a/tests/ui/binop/binary-op-suggest-deref.stderr b/tests/ui/binop/binary-op-suggest-deref.stderr index d1d0089ece795..68b5a24bf974d 100644 --- a/tests/ui/binop/binary-op-suggest-deref.stderr +++ b/tests/ui/binop/binary-op-suggest-deref.stderr @@ -9,6 +9,293 @@ help: consider dereferencing the borrow LL | if *i < 0 {} | + -error: aborting due to 1 previous error +error[E0277]: can't compare `&&{integer}` with `{integer}` + --> $DIR/binary-op-suggest-deref.rs:15:13 + | +LL | _ = foo == 0; + | ^^ no implementation for `&&{integer} == {integer}` + | + = help: the trait `PartialEq<{integer}>` is not implemented for `&&{integer}` +help: consider dereferencing here + | +LL | _ = **foo == 0; + | ++ + +error[E0277]: can't compare `&{integer}` with `{integer}` + --> $DIR/binary-op-suggest-deref.rs:17:13 + | +LL | _ = foo == &0; + | ^^ no implementation for `&{integer} == {integer}` + | + = help: the trait `PartialEq<{integer}>` is not implemented for `&{integer}` + = note: required for `&&{integer}` to implement `PartialEq<&{integer}>` +help: consider dereferencing here + | +LL | _ = *foo == &0; + | + + +error[E0277]: can't compare `&&&&&&{integer}` with `{integer}` + --> $DIR/binary-op-suggest-deref.rs:19:17 + | +LL | _ = &&&&foo == 0; + | ^^ no implementation for `&&&&&&{integer} == {integer}` + | + = help: the trait `PartialEq<{integer}>` is not implemented for `&&&&&&{integer}` +help: consider removing the borrows and dereferencing instead + | +LL - _ = &&&&foo == 0; +LL + _ = **foo == 0; + | + +error[E0277]: can't compare `&{integer}` with `{integer}` + --> $DIR/binary-op-suggest-deref.rs:21:14 + | +LL | _ = *foo == 0; + | ^^ no implementation for `&{integer} == {integer}` + | + = help: the trait `PartialEq<{integer}>` is not implemented for `&{integer}` +help: consider dereferencing here + | +LL | _ = **foo == 0; + | + + +error[E0277]: can't compare `&&{integer}` with `{integer}` + --> $DIR/binary-op-suggest-deref.rs:23:15 + | +LL | _ = &&foo == &&0; + | ^^ no implementation for `&&{integer} == {integer}` + | + = help: the trait `PartialEq<{integer}>` is not implemented for `&&{integer}` + = note: required for `&&&{integer}` to implement `PartialEq<&{integer}>` + = note: 1 redundant requirement hidden + = note: required for `&&&&{integer}` to implement `PartialEq<&&{integer}>` +help: consider removing the borrows + | +LL - _ = &&foo == &&0; +LL + _ = foo == &&0; + | + +error[E0277]: can't compare `&Box<{integer}>` with `{integer}` + --> $DIR/binary-op-suggest-deref.rs:25:23 + | +LL | _ = &Box::new(42) == 42; + | ^^ no implementation for `&Box<{integer}> == {integer}` + | + = help: the trait `PartialEq<{integer}>` is not implemented for `&Box<{integer}>` +help: consider removing the borrow and dereferencing instead + | +LL - _ = &Box::new(42) == 42; +LL + _ = *Box::new(42) == 42; + | + +error[E0277]: can't compare `&Box<&Box<&{integer}>>` with `{integer}` + --> $DIR/binary-op-suggest-deref.rs:27:35 + | +LL | _ = &Box::new(&Box::new(&42)) == 42; + | ^^ no implementation for `&Box<&Box<&{integer}>> == {integer}` + | + = help: the trait `PartialEq<{integer}>` is not implemented for `&Box<&Box<&{integer}>>` +help: consider removing the borrow and dereferencing instead + | +LL - _ = &Box::new(&Box::new(&42)) == 42; +LL + _ = ****Box::new(&Box::new(&42)) == 42; + | + +error[E0277]: can't compare `{integer}` with `&&{integer}` + --> $DIR/binary-op-suggest-deref.rs:31:11 + | +LL | _ = 0 == foo; + | ^^ no implementation for `{integer} == &&{integer}` + | + = help: the trait `PartialEq<&&{integer}>` is not implemented for `{integer}` +help: consider dereferencing here + | +LL | _ = 0 == **foo; + | ++ + +error[E0277]: can't compare `{integer}` with `&{integer}` + --> $DIR/binary-op-suggest-deref.rs:33:12 + | +LL | _ = &0 == foo; + | ^^ no implementation for `{integer} == &{integer}` + | + = help: the trait `PartialEq<&{integer}>` is not implemented for `{integer}` + = note: required for `&{integer}` to implement `PartialEq<&&{integer}>` +help: consider dereferencing here + | +LL | _ = &0 == *foo; + | + + +error[E0277]: can't compare `{integer}` with `&&&&&&{integer}` + --> $DIR/binary-op-suggest-deref.rs:35:11 + | +LL | _ = 0 == &&&&foo; + | ^^ no implementation for `{integer} == &&&&&&{integer}` + | + = help: the trait `PartialEq<&&&&&&{integer}>` is not implemented for `{integer}` +help: consider removing the borrows and dereferencing instead + | +LL - _ = 0 == &&&&foo; +LL + _ = 0 == **foo; + | + +error[E0277]: can't compare `{integer}` with `&{integer}` + --> $DIR/binary-op-suggest-deref.rs:37:11 + | +LL | _ = 0 == *foo; + | ^^ no implementation for `{integer} == &{integer}` + | + = help: the trait `PartialEq<&{integer}>` is not implemented for `{integer}` +help: consider dereferencing here + | +LL | _ = 0 == **foo; + | + + +error[E0277]: can't compare `{integer}` with `&&{integer}` + --> $DIR/binary-op-suggest-deref.rs:39:13 + | +LL | _ = &&0 == &&foo; + | ^^ no implementation for `{integer} == &&{integer}` + | + = help: the trait `PartialEq<&&{integer}>` is not implemented for `{integer}` + = note: required for `&{integer}` to implement `PartialEq<&&&{integer}>` + = note: 1 redundant requirement hidden + = note: required for `&&{integer}` to implement `PartialEq<&&&&{integer}>` +help: consider removing the borrows + | +LL - _ = &&0 == &&foo; +LL + _ = &&0 == foo; + | + +error[E0277]: can't compare `Box>` with `&&{integer}` + --> $DIR/binary-op-suggest-deref.rs:43:33 + | +LL | _ = &Box::new(Box::new(42)) == &foo; + | ^^ no implementation for `Box> == &&{integer}` + | + = help: the trait `PartialEq<&&{integer}>` is not implemented for `Box>` + = note: required for `&Box>` to implement `PartialEq<&&&{integer}>` +help: consider dereferencing both sides of the expression + | +LL - _ = &Box::new(Box::new(42)) == &foo; +LL + _ = **Box::new(Box::new(42)) == **foo; + | + +error[E0277]: can't compare `Box<{integer}>` with `&&{integer}` + --> $DIR/binary-op-suggest-deref.rs:45:23 + | +LL | _ = &Box::new(42) == &foo; + | ^^ no implementation for `Box<{integer}> == &&{integer}` + | + = help: the trait `PartialEq<&&{integer}>` is not implemented for `Box<{integer}>` + = note: required for `&Box<{integer}>` to implement `PartialEq<&&&{integer}>` +help: consider dereferencing both sides of the expression + | +LL - _ = &Box::new(42) == &foo; +LL + _ = *Box::new(42) == **foo; + | + +error[E0277]: can't compare `Box>>>` with `&&{integer}` + --> $DIR/binary-op-suggest-deref.rs:47:53 + | +LL | _ = &Box::new(Box::new(Box::new(Box::new(42)))) == &foo; + | ^^ no implementation for `Box>>> == &&{integer}` + | + = help: the trait `PartialEq<&&{integer}>` is not implemented for `Box>>>` + = note: required for `&Box>>>` to implement `PartialEq<&&&{integer}>` +help: consider dereferencing both sides of the expression + | +LL - _ = &Box::new(Box::new(Box::new(Box::new(42)))) == &foo; +LL + _ = ****Box::new(Box::new(Box::new(Box::new(42)))) == **foo; + | + +error[E0277]: can't compare `&&{integer}` with `Box>>>` + --> $DIR/binary-op-suggest-deref.rs:49:14 + | +LL | _ = &foo == &Box::new(Box::new(Box::new(Box::new(42)))); + | ^^ no implementation for `&&{integer} == Box>>>` + | + = help: the trait `PartialEq>>>>` is not implemented for `&&{integer}` + = note: required for `&&&{integer}` to implement `PartialEq<&Box>>>>` +help: consider dereferencing both sides of the expression + | +LL - _ = &foo == &Box::new(Box::new(Box::new(Box::new(42)))); +LL + _ = **foo == ****Box::new(Box::new(Box::new(Box::new(42)))); + | + +error[E0308]: mismatched types + --> $DIR/binary-op-suggest-deref.rs:53:25 + | +LL | _ = Box::new(42) == 42; + | ------------ ^^ expected `Box<{integer}>`, found integer + | | + | expected because this is `Box<{integer}>` + | + = note: expected struct `Box<{integer}>` + found type `{integer}` + = note: for more on the distinction between the stack and the heap, read https://doc.rust-lang.org/book/ch15-01-box.html, https://doc.rust-lang.org/rust-by-example/std/box.html, and https://doc.rust-lang.org/std/boxed/index.html +help: store this in the heap by calling `Box::new` + | +LL | _ = Box::new(42) == Box::new(42); + | +++++++++ + + +error[E0277]: can't compare `&&{integer}` with `Foo` + --> $DIR/binary-op-suggest-deref.rs:58:13 + | +LL | _ = &&0 == Foo; + | ^^ no implementation for `&&{integer} == Foo` + | + = help: the trait `PartialEq` is not implemented for `&&{integer}` + = help: the following other types implement trait `PartialEq`: + isize + i8 + i16 + i32 + i64 + i128 + usize + u8 + and 6 others + +error[E0369]: binary operation `==` cannot be applied to type `Foo` + --> $DIR/binary-op-suggest-deref.rs:60:13 + | +LL | _ = Foo == &&0; + | --- ^^ --- &&{integer} + | | + | Foo + | +note: an implementation of `PartialEq<&&{integer}>` might be missing for `Foo` + --> $DIR/binary-op-suggest-deref.rs:57:5 + | +LL | struct Foo; + | ^^^^^^^^^^ must implement `PartialEq<&&{integer}>` + +error[E0277]: can't compare `&String` with `str` + --> $DIR/binary-op-suggest-deref.rs:69:20 + | +LL | _ = string_ref == partial[..3]; + | ^^ no implementation for `&String == str` + | + = help: the trait `PartialEq` is not implemented for `&String` +help: consider dereferencing here + | +LL | _ = *string_ref == partial[..3]; + | + + +error[E0277]: can't compare `str` with `&String` + --> $DIR/binary-op-suggest-deref.rs:71:22 + | +LL | _ = partial[..3] == string_ref; + | ^^ no implementation for `str == &String` + | + = help: the trait `PartialEq<&String>` is not implemented for `str` +help: consider dereferencing here + | +LL | _ = partial[..3] == *string_ref; + | + + +error: aborting due to 22 previous errors -For more information about this error, try `rustc --explain E0308`. +Some errors have detailed explanations: E0277, E0308, E0369. +For more information about an error, try `rustc --explain E0277`. diff --git a/tests/ui/cast/cast-to-slice.rs b/tests/ui/cast/cast-to-slice.rs new file mode 100644 index 0000000000000..a6c784a3d4777 --- /dev/null +++ b/tests/ui/cast/cast-to-slice.rs @@ -0,0 +1,8 @@ +fn main() { + "example".as_bytes() as [char]; + //~^ ERROR cast to unsized type + + let arr: &[u8] = &[0, 2, 3]; + arr as [char]; + //~^ ERROR cast to unsized type +} diff --git a/tests/ui/cast/cast-to-slice.stderr b/tests/ui/cast/cast-to-slice.stderr new file mode 100644 index 0000000000000..8f862c0001401 --- /dev/null +++ b/tests/ui/cast/cast-to-slice.stderr @@ -0,0 +1,19 @@ +error[E0620]: cast to unsized type: `&[u8]` as `[char]` + --> $DIR/cast-to-slice.rs:2:5 + | +LL | "example".as_bytes() as [char]; + | ^^^^^^^^^^^^^^^^^^^^^^^^------ + | | + | help: try casting to a reference instead: `&[char]` + +error[E0620]: cast to unsized type: `&[u8]` as `[char]` + --> $DIR/cast-to-slice.rs:6:5 + | +LL | arr as [char]; + | ^^^^^^^------ + | | + | help: try casting to a reference instead: `&[char]` + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0620`. diff --git a/tests/ui/check-cfg/cargo-feature.none.stderr b/tests/ui/check-cfg/cargo-feature.none.stderr index 44c8f7e30728e..9a30842948478 100644 --- a/tests/ui/check-cfg/cargo-feature.none.stderr +++ b/tests/ui/check-cfg/cargo-feature.none.stderr @@ -8,9 +8,18 @@ LL | #[cfg(feature = "serde")] = note: see for more information about checking conditional configuration = note: `#[warn(unexpected_cfgs)]` on by default -warning: unexpected `cfg` condition name: `tokio_unstable` +warning: unexpected `cfg` condition name: `feature` --> $DIR/cargo-feature.rs:18:7 | +LL | #[cfg(feature)] + | ^^^^^^^ + | + = help: consider defining some features in `Cargo.toml` + = note: see for more information about checking conditional configuration + +warning: unexpected `cfg` condition name: `tokio_unstable` + --> $DIR/cargo-feature.rs:23:7 + | LL | #[cfg(tokio_unstable)] | ^^^^^^^^^^^^^^ | @@ -19,7 +28,7 @@ LL | #[cfg(tokio_unstable)] = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition name: `CONFIG_NVME` - --> $DIR/cargo-feature.rs:22:7 + --> $DIR/cargo-feature.rs:27:7 | LL | #[cfg(CONFIG_NVME = "m")] | ^^^^^^^^^^^^^^^^^ @@ -27,5 +36,5 @@ LL | #[cfg(CONFIG_NVME = "m")] = help: consider using a Cargo feature instead or adding `println!("cargo:rustc-check-cfg=cfg(CONFIG_NVME, values(\"m\"))");` to the top of a `build.rs` = note: see for more information about checking conditional configuration -warning: 3 warnings emitted +warning: 4 warnings emitted diff --git a/tests/ui/check-cfg/cargo-feature.rs b/tests/ui/check-cfg/cargo-feature.rs index fe343d0a678cd..f2fd0fd6420f6 100644 --- a/tests/ui/check-cfg/cargo-feature.rs +++ b/tests/ui/check-cfg/cargo-feature.rs @@ -15,6 +15,11 @@ //[some]~^^ WARNING unexpected `cfg` condition value fn ser() {} +#[cfg(feature)] +//[none]~^ WARNING unexpected `cfg` condition name +//[some]~^^ WARNING unexpected `cfg` condition value +fn feat() {} + #[cfg(tokio_unstable)] //~^ WARNING unexpected `cfg` condition name fn tokio() {} diff --git a/tests/ui/check-cfg/cargo-feature.some.stderr b/tests/ui/check-cfg/cargo-feature.some.stderr index 92d63d0153487..fc6951b56174a 100644 --- a/tests/ui/check-cfg/cargo-feature.some.stderr +++ b/tests/ui/check-cfg/cargo-feature.some.stderr @@ -9,9 +9,18 @@ LL | #[cfg(feature = "serde")] = note: see for more information about checking conditional configuration = note: `#[warn(unexpected_cfgs)]` on by default -warning: unexpected `cfg` condition name: `tokio_unstable` +warning: unexpected `cfg` condition value: (none) --> $DIR/cargo-feature.rs:18:7 | +LL | #[cfg(feature)] + | ^^^^^^^- help: specify a config value: `= "bitcode"` + | + = note: expected values for `feature` are: `bitcode` + = note: see for more information about checking conditional configuration + +warning: unexpected `cfg` condition name: `tokio_unstable` + --> $DIR/cargo-feature.rs:23:7 + | LL | #[cfg(tokio_unstable)] | ^^^^^^^^^^^^^^ | @@ -20,7 +29,7 @@ LL | #[cfg(tokio_unstable)] = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `m` - --> $DIR/cargo-feature.rs:22:7 + --> $DIR/cargo-feature.rs:27:7 | LL | #[cfg(CONFIG_NVME = "m")] | ^^^^^^^^^^^^^^--- @@ -31,5 +40,5 @@ LL | #[cfg(CONFIG_NVME = "m")] = help: consider using a Cargo feature instead or adding `println!("cargo:rustc-check-cfg=cfg(CONFIG_NVME, values(\"m\"))");` to the top of a `build.rs` = note: see for more information about checking conditional configuration -warning: 3 warnings emitted +warning: 4 warnings emitted diff --git a/tests/ui/closures/issue-112547.rs b/tests/ui/closures/issue-112547.rs deleted file mode 100644 index 8ecb2abccd4f9..0000000000000 --- a/tests/ui/closures/issue-112547.rs +++ /dev/null @@ -1,15 +0,0 @@ -#![feature(non_lifetime_binders)] - //~^ WARNING the feature `non_lifetime_binders` is incomplete and may not be safe to use and/or cause compiler crashes - -pub fn bar() -where - for V: IntoIterator -//~^ ERROR cannot find type `V` in this scope [E0412] -{ -} - -fn main() { - bar(); -} diff --git a/tests/ui/closures/print/closure-print-generic-trim-off-verbose-2.rs b/tests/ui/closures/print/closure-print-generic-trim-off-verbose-2.rs index 07bf8fe4c0076..b6c7659bc724c 100644 --- a/tests/ui/closures/print/closure-print-generic-trim-off-verbose-2.rs +++ b/tests/ui/closures/print/closure-print-generic-trim-off-verbose-2.rs @@ -1,4 +1,4 @@ -// compile-flags: -Ztrim-diagnostic-paths=off -Zverbose +// compile-flags: -Ztrim-diagnostic-paths=off -Zverbose-internals mod mod1 { pub fn f(t: T) diff --git a/tests/ui/closures/print/closure-print-generic-verbose-1.rs b/tests/ui/closures/print/closure-print-generic-verbose-1.rs index 67d37f1c59b40..6c631fabaa25d 100644 --- a/tests/ui/closures/print/closure-print-generic-verbose-1.rs +++ b/tests/ui/closures/print/closure-print-generic-verbose-1.rs @@ -1,4 +1,4 @@ -// compile-flags: -Zverbose +// compile-flags: -Zverbose-internals fn to_fn_once(f: F) -> F { f } diff --git a/tests/ui/closures/print/closure-print-generic-verbose-2.rs b/tests/ui/closures/print/closure-print-generic-verbose-2.rs index f460fedffb7fb..dcf7fb2865ccc 100644 --- a/tests/ui/closures/print/closure-print-generic-verbose-2.rs +++ b/tests/ui/closures/print/closure-print-generic-verbose-2.rs @@ -1,4 +1,4 @@ -// compile-flags: -Zverbose +// compile-flags: -Zverbose-internals mod mod1 { pub fn f(t: T) diff --git a/tests/ui/closures/print/closure-print-verbose.rs b/tests/ui/closures/print/closure-print-verbose.rs index 4b0438a91ed2e..76fe5471a601e 100644 --- a/tests/ui/closures/print/closure-print-verbose.rs +++ b/tests/ui/closures/print/closure-print-verbose.rs @@ -1,4 +1,4 @@ -// compile-flags: -Zverbose +// compile-flags: -Zverbose-internals // Same as closure-coerce-fn-1.rs diff --git a/tests/ui/const-generics/issues/issue-83765.rs b/tests/ui/const-generics/issues/issue-83765.rs index 71c164ab0a5bf..0959f771c22ed 100644 --- a/tests/ui/const-generics/issues/issue-83765.rs +++ b/tests/ui/const-generics/issues/issue-83765.rs @@ -4,6 +4,7 @@ trait TensorDimension { const DIM: usize; //~^ ERROR cycle detected when resolving instance + //~| ERROR cycle detected when resolving instance // FIXME Given the current state of the compiler its expected that we cycle here, // but the cycle is still wrong. const ISSCALAR: bool = Self::DIM == 0; @@ -79,6 +80,7 @@ impl<'a, R, T: Broadcastable, F: Fn(T::Element) -> R, const DIM: usize> TensorSi for BMap<'a, R, T, F, DIM> { fn size(&self) -> [usize; DIM] { + //~^ ERROR: method not compatible with trait self.reference.size() } } @@ -88,6 +90,7 @@ impl<'a, R, T: Broadcastable, F: Fn(T::Element) -> R, const DIM: usize> Broadcas { type Element = R; fn bget(&self, index: [usize; DIM]) -> Option { + //~^ ERROR: method not compatible with trait self.reference.bget(index).map(&self.closure) } } diff --git a/tests/ui/const-generics/issues/issue-83765.stderr b/tests/ui/const-generics/issues/issue-83765.stderr index d9956875cf888..c3292314f23b3 100644 --- a/tests/ui/const-generics/issues/issue-83765.stderr +++ b/tests/ui/const-generics/issues/issue-83765.stderr @@ -17,6 +17,44 @@ LL | trait TensorDimension { | ^^^^^^^^^^^^^^^^^^^^^ = note: see https://rustc-dev-guide.rust-lang.org/overview.html#queries and https://rustc-dev-guide.rust-lang.org/query.html for more information -error: aborting due to 1 previous error +error[E0391]: cycle detected when resolving instance `::DIM, DIM> as TensorDimension>::DIM` + --> $DIR/issue-83765.rs:5:5 + | +LL | const DIM: usize; + | ^^^^^^^^^^^^^^^^ + | +note: ...which requires computing candidate for `::DIM, DIM> as TensorDimension>`... + --> $DIR/issue-83765.rs:4:1 + | +LL | trait TensorDimension { + | ^^^^^^^^^^^^^^^^^^^^^ + = note: ...which again requires resolving instance `::DIM, DIM> as TensorDimension>::DIM`, completing the cycle +note: cycle used when checking that `` is well-formed + --> $DIR/issue-83765.rs:56:1 + | +LL | impl<'a, T: Broadcastable, const DIM: usize> Broadcastable for LazyUpdim<'a, T, { T::DIM }, DIM> { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: see https://rustc-dev-guide.rust-lang.org/overview.html#queries and https://rustc-dev-guide.rust-lang.org/query.html for more information + +error[E0308]: method not compatible with trait + --> $DIR/issue-83765.rs:82:5 + | +LL | fn size(&self) -> [usize; DIM] { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `Self::DIM`, found `DIM` + | + = note: expected constant `Self::DIM` + found constant `DIM` + +error[E0308]: method not compatible with trait + --> $DIR/issue-83765.rs:92:5 + | +LL | fn bget(&self, index: [usize; DIM]) -> Option { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `Self::DIM`, found `DIM` + | + = note: expected constant `Self::DIM` + found constant `DIM` + +error: aborting due to 4 previous errors -For more information about this error, try `rustc --explain E0391`. +Some errors have detailed explanations: E0308, E0391. +For more information about an error, try `rustc --explain E0308`. diff --git a/tests/ui/consts/const-assert-unchecked-ub.rs b/tests/ui/consts/const-assert-unchecked-ub.rs new file mode 100644 index 0000000000000..5c05b813048b8 --- /dev/null +++ b/tests/ui/consts/const-assert-unchecked-ub.rs @@ -0,0 +1,10 @@ +#![feature(hint_assert_unchecked)] +#![feature(const_hint_assert_unchecked)] + +const _: () = unsafe { + let n = u32::MAX.count_ones(); + std::hint::assert_unchecked(n < 32); //~ ERROR evaluation of constant value failed +}; + +fn main() { +} diff --git a/tests/ui/consts/const-assert-unchecked-ub.stderr b/tests/ui/consts/const-assert-unchecked-ub.stderr new file mode 100644 index 0000000000000..3957a3b1c246b --- /dev/null +++ b/tests/ui/consts/const-assert-unchecked-ub.stderr @@ -0,0 +1,9 @@ +error[E0080]: evaluation of constant value failed + --> $DIR/const-assert-unchecked-ub.rs:6:5 + | +LL | std::hint::assert_unchecked(n < 32); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `assume` called with `false` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0080`. diff --git a/tests/ui/consts/const-eval/issue-50814-2.mir-opt.stderr b/tests/ui/consts/const-eval/issue-50814-2.mir-opt.stderr index f3ec3200f9465..7e764ca72390d 100644 --- a/tests/ui/consts/const-eval/issue-50814-2.mir-opt.stderr +++ b/tests/ui/consts/const-eval/issue-50814-2.mir-opt.stderr @@ -10,12 +10,6 @@ note: erroneous constant encountered LL | & as Foo>::BAR | ^^^^^^^^^^^^^^^^^^^^^ -note: erroneous constant encountered - --> $DIR/issue-50814-2.rs:20:5 - | -LL | & as Foo>::BAR - | ^^^^^^^^^^^^^^^^^^^^^^ - error: aborting due to 1 previous error For more information about this error, try `rustc --explain E0080`. diff --git a/tests/ui/consts/const-fn-mismatch.rs b/tests/ui/consts/const-fn-mismatch.rs index 3107b8128e602..b17e4cedd3ff6 100644 --- a/tests/ui/consts/const-fn-mismatch.rs +++ b/tests/ui/consts/const-fn-mismatch.rs @@ -9,7 +9,7 @@ trait Foo { impl Foo for u32 { const fn f() -> u32 { - //~^ ERROR functions in traits cannot be declared const + //~^ ERROR functions in trait impls cannot be declared const 22 } } diff --git a/tests/ui/consts/const-fn-mismatch.stderr b/tests/ui/consts/const-fn-mismatch.stderr index beaf52c0cfb36..9e7d93b0c97bb 100644 --- a/tests/ui/consts/const-fn-mismatch.stderr +++ b/tests/ui/consts/const-fn-mismatch.stderr @@ -1,8 +1,11 @@ -error[E0379]: functions in traits cannot be declared const +error[E0379]: functions in trait impls cannot be declared const --> $DIR/const-fn-mismatch.rs:11:5 | LL | const fn f() -> u32 { - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in trait impls cannot be const + | help: remove the `const` error: aborting due to 1 previous error diff --git a/tests/ui/consts/const-fn-not-in-trait.stderr b/tests/ui/consts/const-fn-not-in-trait.stderr index 5d364eb882dba..04430610ad00c 100644 --- a/tests/ui/consts/const-fn-not-in-trait.stderr +++ b/tests/ui/consts/const-fn-not-in-trait.stderr @@ -2,13 +2,19 @@ error[E0379]: functions in traits cannot be declared const --> $DIR/const-fn-not-in-trait.rs:5:5 | LL | const fn f() -> u32; - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in traits cannot be const + | help: remove the `const` error[E0379]: functions in traits cannot be declared const --> $DIR/const-fn-not-in-trait.rs:7:5 | LL | const fn g() -> u32 { - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in traits cannot be const + | help: remove the `const` error: aborting due to 2 previous errors diff --git a/tests/ui/consts/const-unsized.stderr b/tests/ui/consts/const-unsized.stderr index 674f0cb99e776..f70c9b2e0774f 100644 --- a/tests/ui/consts/const-unsized.stderr +++ b/tests/ui/consts/const-unsized.stderr @@ -6,39 +6,23 @@ LL | const CONST_0: dyn Debug + Sync = *(&0 as &(dyn Debug + Sync)); | = help: the trait `Sized` is not implemented for `(dyn Debug + Sync + 'static)` -error[E0277]: the size for values of type `str` cannot be known at compilation time - --> $DIR/const-unsized.rs:7:18 - | -LL | const CONST_FOO: str = *"foo"; - | ^^^ doesn't have a size known at compile-time - | - = help: the trait `Sized` is not implemented for `str` - error[E0277]: the size for values of type `(dyn Debug + Sync + 'static)` cannot be known at compilation time - --> $DIR/const-unsized.rs:11:18 + --> $DIR/const-unsized.rs:3:35 | -LL | static STATIC_1: dyn Debug + Sync = *(&1 as &(dyn Debug + Sync)); - | ^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time +LL | const CONST_0: dyn Debug + Sync = *(&0 as &(dyn Debug + Sync)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time | = help: the trait `Sized` is not implemented for `(dyn Debug + Sync + 'static)` + = note: constant expressions must have a statically known size error[E0277]: the size for values of type `str` cannot be known at compilation time - --> $DIR/const-unsized.rs:15:20 + --> $DIR/const-unsized.rs:7:18 | -LL | static STATIC_BAR: str = *"bar"; - | ^^^ doesn't have a size known at compile-time +LL | const CONST_FOO: str = *"foo"; + | ^^^ doesn't have a size known at compile-time | = help: the trait `Sized` is not implemented for `str` -error[E0277]: the size for values of type `(dyn Debug + Sync + 'static)` cannot be known at compilation time - --> $DIR/const-unsized.rs:3:35 - | -LL | const CONST_0: dyn Debug + Sync = *(&0 as &(dyn Debug + Sync)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time - | - = help: the trait `Sized` is not implemented for `(dyn Debug + Sync + 'static)` - = note: constant expressions must have a statically known size - error[E0277]: the size for values of type `str` cannot be known at compilation time --> $DIR/const-unsized.rs:7:24 | @@ -48,6 +32,14 @@ LL | const CONST_FOO: str = *"foo"; = help: the trait `Sized` is not implemented for `str` = note: constant expressions must have a statically known size +error[E0277]: the size for values of type `(dyn Debug + Sync + 'static)` cannot be known at compilation time + --> $DIR/const-unsized.rs:11:18 + | +LL | static STATIC_1: dyn Debug + Sync = *(&1 as &(dyn Debug + Sync)); + | ^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `(dyn Debug + Sync + 'static)` + error[E0277]: the size for values of type `(dyn Debug + Sync + 'static)` cannot be known at compilation time --> $DIR/const-unsized.rs:11:37 | @@ -57,6 +49,14 @@ LL | static STATIC_1: dyn Debug + Sync = *(&1 as &(dyn Debug + Sync)); = help: the trait `Sized` is not implemented for `(dyn Debug + Sync + 'static)` = note: constant expressions must have a statically known size +error[E0277]: the size for values of type `str` cannot be known at compilation time + --> $DIR/const-unsized.rs:15:20 + | +LL | static STATIC_BAR: str = *"bar"; + | ^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `str` + error[E0277]: the size for values of type `str` cannot be known at compilation time --> $DIR/const-unsized.rs:15:26 | diff --git a/tests/ui/consts/fn_trait_refs.stderr b/tests/ui/consts/fn_trait_refs.stderr index e5ebe1d852861..e6ea4108f4045 100644 --- a/tests/ui/consts/fn_trait_refs.stderr +++ b/tests/ui/consts/fn_trait_refs.stderr @@ -4,13 +4,13 @@ error[E0635]: unknown feature `const_fn_trait_ref_impls` LL | #![feature(const_fn_trait_ref_impls)] | ^^^^^^^^^^^^^^^^^^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/fn_trait_refs.rs:15:15 | LL | T: ~const Fn<()> + ~const Destruct, | ^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/fn_trait_refs.rs:15:15 | LL | T: ~const Fn<()> + ~const Destruct, @@ -18,13 +18,13 @@ LL | T: ~const Fn<()> + ~const Destruct, | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/fn_trait_refs.rs:22:15 | LL | T: ~const FnMut<()> + ~const Destruct, | ^^^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/fn_trait_refs.rs:22:15 | LL | T: ~const FnMut<()> + ~const Destruct, @@ -32,13 +32,13 @@ LL | T: ~const FnMut<()> + ~const Destruct, | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/fn_trait_refs.rs:29:15 | LL | T: ~const FnOnce<()>, | ^^^^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/fn_trait_refs.rs:29:15 | LL | T: ~const FnOnce<()>, @@ -46,13 +46,13 @@ LL | T: ~const FnOnce<()>, | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/fn_trait_refs.rs:36:15 | LL | T: ~const Fn<()> + ~const Destruct, | ^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/fn_trait_refs.rs:36:15 | LL | T: ~const Fn<()> + ~const Destruct, @@ -60,13 +60,13 @@ LL | T: ~const Fn<()> + ~const Destruct, | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/fn_trait_refs.rs:50:15 | LL | T: ~const FnMut<()> + ~const Destruct, | ^^^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/fn_trait_refs.rs:50:15 | LL | T: ~const FnMut<()> + ~const Destruct, diff --git a/tests/ui/consts/issue-39974.stderr b/tests/ui/consts/issue-39974.stderr index 4bde599039eb3..114c4cfeaf7c0 100644 --- a/tests/ui/consts/issue-39974.stderr +++ b/tests/ui/consts/issue-39974.stderr @@ -1,9 +1,3 @@ -error[E0308]: mismatched types - --> $DIR/issue-39974.rs:5:19 - | -LL | f: [[f64; 2]; LENGTH], - | ^^^^^^ expected `usize`, found `f64` - error[E0308]: mismatched types --> $DIR/issue-39974.rs:1:21 | @@ -13,6 +7,12 @@ LL | const LENGTH: f64 = 2; | expected `f64`, found integer | help: use a float literal: `2.0` +error[E0308]: mismatched types + --> $DIR/issue-39974.rs:5:19 + | +LL | f: [[f64; 2]; LENGTH], + | ^^^^^^ expected `usize`, found `f64` + error: aborting due to 2 previous errors For more information about this error, try `rustc --explain E0308`. diff --git a/tests/ui/consts/issue-54954.stderr b/tests/ui/consts/issue-54954.stderr index b0701bab793c5..03c47030c0e3f 100644 --- a/tests/ui/consts/issue-54954.stderr +++ b/tests/ui/consts/issue-54954.stderr @@ -2,7 +2,10 @@ error[E0379]: functions in traits cannot be declared const --> $DIR/issue-54954.rs:5:5 | LL | const fn const_val() -> usize { - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in traits cannot be const + | help: remove the `const` error[E0790]: cannot call associated function on trait without specifying the corresponding `impl` type --> $DIR/issue-54954.rs:1:24 diff --git a/tests/ui/consts/unstable-const-fn-in-libcore.stderr b/tests/ui/consts/unstable-const-fn-in-libcore.stderr index 4b649bf43ed3d..08147a4afaf37 100644 --- a/tests/ui/consts/unstable-const-fn-in-libcore.stderr +++ b/tests/ui/consts/unstable-const-fn-in-libcore.stderr @@ -1,4 +1,4 @@ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/unstable-const-fn-in-libcore.rs:19:39 | LL | const fn unwrap_or_else T>(self, f: F) -> T { diff --git a/tests/ui/coroutine/gen_block.e2024.stderr b/tests/ui/coroutine/gen_block.e2024.stderr index f250e2f79c77d..e32f80dafa0c1 100644 --- a/tests/ui/coroutine/gen_block.e2024.stderr +++ b/tests/ui/coroutine/gen_block.e2024.stderr @@ -8,10 +8,10 @@ LL | let _ = || yield true; = help: add `#![feature(coroutines)]` to the crate attributes to enable error[E0282]: type annotations needed - --> $DIR/gen_block.rs:6:17 + --> $DIR/gen_block.rs:6:13 | LL | let x = gen {}; - | ^^ cannot infer type + | ^^^^^^ cannot infer type error: aborting due to 2 previous errors diff --git a/tests/ui/coroutine/print/coroutine-print-verbose-1.rs b/tests/ui/coroutine/print/coroutine-print-verbose-1.rs index c47d7572ca77b..f0094aa694bb5 100644 --- a/tests/ui/coroutine/print/coroutine-print-verbose-1.rs +++ b/tests/ui/coroutine/print/coroutine-print-verbose-1.rs @@ -1,4 +1,4 @@ -// compile-flags: -Zverbose +// compile-flags: -Zverbose-internals // Same as: tests/ui/coroutine/issue-68112.stderr diff --git a/tests/ui/coroutine/print/coroutine-print-verbose-2.rs b/tests/ui/coroutine/print/coroutine-print-verbose-2.rs index c65c33cb4bacb..390bfc542b7ec 100644 --- a/tests/ui/coroutine/print/coroutine-print-verbose-2.rs +++ b/tests/ui/coroutine/print/coroutine-print-verbose-2.rs @@ -1,4 +1,4 @@ -// compile-flags: -Zverbose +// compile-flags: -Zverbose-internals // Same as test/ui/coroutine/not-send-sync.rs #![feature(coroutines)] diff --git a/tests/ui/coroutine/print/coroutine-print-verbose-3.rs b/tests/ui/coroutine/print/coroutine-print-verbose-3.rs index 3e4bb6281768a..49b54a4cd5b06 100644 --- a/tests/ui/coroutine/print/coroutine-print-verbose-3.rs +++ b/tests/ui/coroutine/print/coroutine-print-verbose-3.rs @@ -1,4 +1,4 @@ -// compile-flags: -Zverbose +// compile-flags: -Zverbose-internals #![feature(coroutines, coroutine_trait)] diff --git a/tests/ui/coroutine/sized-yield.stderr b/tests/ui/coroutine/sized-yield.stderr index 40663ac12de46..bbecaffa95a12 100644 --- a/tests/ui/coroutine/sized-yield.stderr +++ b/tests/ui/coroutine/sized-yield.stderr @@ -1,8 +1,8 @@ error[E0277]: the size for values of type `str` cannot be known at compilation time - --> $DIR/sized-yield.rs:8:27 + --> $DIR/sized-yield.rs:8:19 | LL | let mut gen = move || { - | ___________________________^ + | ___________________^ LL | | LL | | yield s[..]; LL | | }; diff --git a/tests/ui/diagnostic-width/flag-json.rs b/tests/ui/diagnostic-width/flag-json.rs index 51a1fb447c7d2..820f1a049e1f7 100644 --- a/tests/ui/diagnostic-width/flag-json.rs +++ b/tests/ui/diagnostic-width/flag-json.rs @@ -1,9 +1,9 @@ // compile-flags: --diagnostic-width=20 --error-format=json +// error-pattern:expected `()`, found integer // This test checks that `-Z output-width` effects the JSON error output by restricting it to an // arbitrarily low value so that the effect is visible. fn main() { let _: () = 42; - //~^ ERROR arguments to this function are incorrect } diff --git a/tests/ui/diagnostic-width/flag-json.stderr b/tests/ui/diagnostic-width/flag-json.stderr index f3bf4f97942c8..0a4b54ebc8565 100644 --- a/tests/ui/diagnostic-width/flag-json.stderr +++ b/tests/ui/diagnostic-width/flag-json.stderr @@ -24,8 +24,8 @@ This error occurs when an expression was used in a place where the compiler expected an expression of a different type. It can occur in several cases, the most common being when calling a function and passing an argument which has a different type than the matching type in the function declaration. -"},"level":"error","spans":[{"file_name":"$DIR/flag-json.rs","byte_start":243,"byte_end":245,"line_start":7,"line_end":7,"column_start":17,"column_end":19,"is_primary":true,"text":[{"text":" let _: () = 42;","highlight_start":17,"highlight_end":19}],"label":"expected `()`, found integer","suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/flag-json.rs","byte_start":238,"byte_end":240,"line_start":7,"line_end":7,"column_start":12,"column_end":14,"is_primary":false,"text":[{"text":" let _: () = 42;","highlight_start":12,"highlight_end":14}],"label":"expected due to this","suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[],"rendered":"error[E0308]: mismatched types - --> $DIR/flag-json.rs:7:17 +"},"level":"error","spans":[{"file_name":"$DIR/flag-json.rs","byte_start":289,"byte_end":291,"line_start":8,"line_end":8,"column_start":17,"column_end":19,"is_primary":true,"text":[{"text":" let _: () = 42;","highlight_start":17,"highlight_end":19}],"label":"expected `()`, found integer","suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/flag-json.rs","byte_start":284,"byte_end":286,"line_start":8,"line_end":8,"column_start":12,"column_end":14,"is_primary":false,"text":[{"text":" let _: () = 42;","highlight_start":12,"highlight_end":14}],"label":"expected due to this","suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[],"rendered":"error[E0308]: mismatched types + --> $DIR/flag-json.rs:8:17 | LL | ..._: () = 42; | -- ^^ expected `()`, found integer diff --git a/tests/ui/dst/issue-113447.fixed b/tests/ui/dst/issue-113447.fixed deleted file mode 100644 index 536f680f697c7..0000000000000 --- a/tests/ui/dst/issue-113447.fixed +++ /dev/null @@ -1,25 +0,0 @@ -// run-rustfix - -pub struct Bytes; - -impl Bytes { - pub fn as_slice(&self) -> &[u8] { - todo!() - } -} - -impl PartialEq<[u8]> for Bytes { - fn eq(&self, other: &[u8]) -> bool { - self.as_slice() == other - } -} - -impl PartialEq for &[u8] { - fn eq(&self, other: &Bytes) -> bool { - *other == **self - } -} - -fn main() { - let _ = &[0u8] == &[0xAA][..]; //~ ERROR can't compare `&[u8; 1]` with `[{integer}; 1]` -} diff --git a/tests/ui/dst/issue-113447.rs b/tests/ui/dst/issue-113447.rs index c10a4f2ff8ec4..75156a117e996 100644 --- a/tests/ui/dst/issue-113447.rs +++ b/tests/ui/dst/issue-113447.rs @@ -1,5 +1,3 @@ -// run-rustfix - pub struct Bytes; impl Bytes { diff --git a/tests/ui/dst/issue-113447.stderr b/tests/ui/dst/issue-113447.stderr index 266eb228046a2..4d0ed33a643a8 100644 --- a/tests/ui/dst/issue-113447.stderr +++ b/tests/ui/dst/issue-113447.stderr @@ -1,24 +1,15 @@ error[E0277]: can't compare `&[u8; 1]` with `[{integer}; 1]` - --> $DIR/issue-113447.rs:24:20 + --> $DIR/issue-113447.rs:22:20 | LL | let _ = &[0u8] == [0xAA]; | ^^ no implementation for `&[u8; 1] == [{integer}; 1]` | = help: the trait `PartialEq<[{integer}; 1]>` is not implemented for `&[u8; 1]` - = help: the following other types implement trait `PartialEq`: - <[A; N] as PartialEq<[B; N]>> - <[A; N] as PartialEq<[B]>> - <[A; N] as PartialEq<&[B]>> - <[A; N] as PartialEq<&mut [B]>> - <[T] as PartialEq>> - <[A] as PartialEq<[B]>> - <[B] as PartialEq<[A; N]>> - <&[u8] as PartialEq> - and 4 others -help: convert the array to a `&[u8]` slice instead +help: consider removing the borrow + | +LL - let _ = &[0u8] == [0xAA]; +LL + let _ = [0u8] == [0xAA]; | -LL | let _ = &[0u8] == &[0xAA][..]; - | + ++++ error: aborting due to 1 previous error diff --git a/tests/ui/error-codes/E0453.rs b/tests/ui/error-codes/E0453.rs index ca9573c5b4613..8ed724dd3fc4e 100644 --- a/tests/ui/error-codes/E0453.rs +++ b/tests/ui/error-codes/E0453.rs @@ -2,6 +2,5 @@ #[allow(non_snake_case)] //~^ ERROR allow(non_snake_case) incompatible -//~| ERROR allow(non_snake_case) incompatible fn main() { } diff --git a/tests/ui/error-codes/E0453.stderr b/tests/ui/error-codes/E0453.stderr index bb2c39298c080..9a89f0d41e49c 100644 --- a/tests/ui/error-codes/E0453.stderr +++ b/tests/ui/error-codes/E0453.stderr @@ -7,17 +7,6 @@ LL | LL | #[allow(non_snake_case)] | ^^^^^^^^^^^^^^ overruled by previous forbid -error[E0453]: allow(non_snake_case) incompatible with previous forbid - --> $DIR/E0453.rs:3:9 - | -LL | #![forbid(non_snake_case)] - | -------------- `forbid` level set here -LL | -LL | #[allow(non_snake_case)] - | ^^^^^^^^^^^^^^ overruled by previous forbid - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: aborting due to 2 previous errors +error: aborting due to 1 previous error For more information about this error, try `rustc --explain E0453`. diff --git a/tests/ui/error-codes/E0620.stderr b/tests/ui/error-codes/E0620.stderr index 5bc8903624cef..644ba813c9608 100644 --- a/tests/ui/error-codes/E0620.stderr +++ b/tests/ui/error-codes/E0620.stderr @@ -2,13 +2,9 @@ error[E0620]: cast to unsized type: `&[usize; 2]` as `[usize]` --> $DIR/E0620.rs:2:16 | LL | let _foo = &[1_usize, 2] as [usize]; - | ^^^^^^^^^^^^^^^^^^^^^^^^ - | -help: consider using an implicit coercion to `&[usize]` instead - --> $DIR/E0620.rs:2:16 - | -LL | let _foo = &[1_usize, 2] as [usize]; - | ^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^------- + | | + | help: try casting to a reference instead: `&[usize]` error: aborting due to 1 previous error diff --git a/tests/ui/feature-gates/feature-gate-cfg-sanitizer_cfi.rs b/tests/ui/feature-gates/feature-gate-cfg-sanitizer_cfi.rs new file mode 100644 index 0000000000000..76d96de750a62 --- /dev/null +++ b/tests/ui/feature-gates/feature-gate-cfg-sanitizer_cfi.rs @@ -0,0 +1,9 @@ +#[cfg(sanitizer_cfi_generalize_pointers)] +//~^ `cfg(sanitizer_cfi_generalize_pointers)` is experimental +fn foo() {} + +#[cfg(sanitizer_cfi_normalize_integers)] +//~^ `cfg(sanitizer_cfi_normalize_integers)` is experimental +fn bar() {} + +fn main() {} diff --git a/tests/ui/feature-gates/feature-gate-cfg-sanitizer_cfi.stderr b/tests/ui/feature-gates/feature-gate-cfg-sanitizer_cfi.stderr new file mode 100644 index 0000000000000..8c2a8411c7b4b --- /dev/null +++ b/tests/ui/feature-gates/feature-gate-cfg-sanitizer_cfi.stderr @@ -0,0 +1,21 @@ +error[E0658]: `cfg(sanitizer_cfi_generalize_pointers)` is experimental and subject to change + --> $DIR/feature-gate-cfg-sanitizer_cfi.rs:1:7 + | +LL | #[cfg(sanitizer_cfi_generalize_pointers)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: see issue #89653 for more information + = help: add `#![feature(cfg_sanitizer_cfi)]` to the crate attributes to enable + +error[E0658]: `cfg(sanitizer_cfi_normalize_integers)` is experimental and subject to change + --> $DIR/feature-gate-cfg-sanitizer_cfi.rs:5:7 + | +LL | #[cfg(sanitizer_cfi_normalize_integers)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: see issue #89653 for more information + = help: add `#![feature(cfg_sanitizer_cfi)]` to the crate attributes to enable + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/feature-gates/feature-gate-gen_blocks.e2024.stderr b/tests/ui/feature-gates/feature-gate-gen_blocks.e2024.stderr index c582ca7ba3d58..526354f6cfbdf 100644 --- a/tests/ui/feature-gates/feature-gate-gen_blocks.e2024.stderr +++ b/tests/ui/feature-gates/feature-gate-gen_blocks.e2024.stderr @@ -35,16 +35,16 @@ LL | async gen {}; = help: add `#![feature(gen_blocks)]` to the crate attributes to enable error[E0282]: type annotations needed - --> $DIR/feature-gate-gen_blocks.rs:5:9 + --> $DIR/feature-gate-gen_blocks.rs:5:5 | LL | gen {}; - | ^^ cannot infer type + | ^^^^^^ cannot infer type error[E0282]: type annotations needed - --> $DIR/feature-gate-gen_blocks.rs:12:15 + --> $DIR/feature-gate-gen_blocks.rs:12:5 | LL | async gen {}; - | ^^ cannot infer type + | ^^^^^^^^^^^^ cannot infer type error: aborting due to 6 previous errors diff --git a/tests/ui/feature-gates/feature-gate-min_const_fn.rs b/tests/ui/feature-gates/feature-gate-min_const_fn.rs index 8f9b433009d3c..3d61a9eb93777 100644 --- a/tests/ui/feature-gates/feature-gate-min_const_fn.rs +++ b/tests/ui/feature-gates/feature-gate-min_const_fn.rs @@ -8,7 +8,7 @@ trait Foo { } impl Foo for u32 { - const fn foo() -> u32 { 0 } //~ ERROR functions in traits cannot be declared const + const fn foo() -> u32 { 0 } //~ ERROR functions in trait impls cannot be declared const } trait Bar {} diff --git a/tests/ui/feature-gates/feature-gate-min_const_fn.stderr b/tests/ui/feature-gates/feature-gate-min_const_fn.stderr index d7a58591364ed..0b16f9abb7071 100644 --- a/tests/ui/feature-gates/feature-gate-min_const_fn.stderr +++ b/tests/ui/feature-gates/feature-gate-min_const_fn.stderr @@ -2,19 +2,28 @@ error[E0379]: functions in traits cannot be declared const --> $DIR/feature-gate-min_const_fn.rs:6:5 | LL | const fn foo() -> u32; - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in traits cannot be const + | help: remove the `const` error[E0379]: functions in traits cannot be declared const --> $DIR/feature-gate-min_const_fn.rs:7:5 | LL | const fn bar() -> u32 { 0 } - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in traits cannot be const + | help: remove the `const` -error[E0379]: functions in traits cannot be declared const +error[E0379]: functions in trait impls cannot be declared const --> $DIR/feature-gate-min_const_fn.rs:11:5 | LL | const fn foo() -> u32 { 0 } - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in trait impls cannot be const + | help: remove the `const` error: aborting due to 3 previous errors diff --git a/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.rs b/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.rs index 0467dea621b4c..4e296b96ca9c4 100644 --- a/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.rs +++ b/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.rs @@ -2,11 +2,7 @@ #![deny(multiple_supertrait_upcastable)] //~^ WARNING unknown lint: `multiple_supertrait_upcastable` -//~| WARNING unknown lint: `multiple_supertrait_upcastable` -//~| WARNING unknown lint: `multiple_supertrait_upcastable` #![warn(multiple_supertrait_upcastable)] //~^ WARNING unknown lint: `multiple_supertrait_upcastable` -//~| WARNING unknown lint: `multiple_supertrait_upcastable` -//~| WARNING unknown lint: `multiple_supertrait_upcastable` fn main() {} diff --git a/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.stderr b/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.stderr index 5e14bf6397fe6..f6fcf4ee3ed0d 100644 --- a/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.stderr +++ b/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.stderr @@ -9,7 +9,7 @@ LL | #![deny(multiple_supertrait_upcastable)] = note: `#[warn(unknown_lints)]` on by default warning: unknown lint: `multiple_supertrait_upcastable` - --> $DIR/feature-gate-multiple_supertrait_upcastable.rs:7:1 + --> $DIR/feature-gate-multiple_supertrait_upcastable.rs:5:1 | LL | #![warn(multiple_supertrait_upcastable)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -17,45 +17,5 @@ LL | #![warn(multiple_supertrait_upcastable)] = note: the `multiple_supertrait_upcastable` lint is unstable = help: add `#![feature(multiple_supertrait_upcastable)]` to the crate attributes to enable -warning: unknown lint: `multiple_supertrait_upcastable` - --> $DIR/feature-gate-multiple_supertrait_upcastable.rs:3:1 - | -LL | #![deny(multiple_supertrait_upcastable)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `multiple_supertrait_upcastable` lint is unstable - = help: add `#![feature(multiple_supertrait_upcastable)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `multiple_supertrait_upcastable` - --> $DIR/feature-gate-multiple_supertrait_upcastable.rs:7:1 - | -LL | #![warn(multiple_supertrait_upcastable)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `multiple_supertrait_upcastable` lint is unstable - = help: add `#![feature(multiple_supertrait_upcastable)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `multiple_supertrait_upcastable` - --> $DIR/feature-gate-multiple_supertrait_upcastable.rs:3:1 - | -LL | #![deny(multiple_supertrait_upcastable)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `multiple_supertrait_upcastable` lint is unstable - = help: add `#![feature(multiple_supertrait_upcastable)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `multiple_supertrait_upcastable` - --> $DIR/feature-gate-multiple_supertrait_upcastable.rs:7:1 - | -LL | #![warn(multiple_supertrait_upcastable)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `multiple_supertrait_upcastable` lint is unstable - = help: add `#![feature(multiple_supertrait_upcastable)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: 6 warnings emitted +warning: 2 warnings emitted diff --git a/tests/ui/feature-gates/feature-gate-naked_functions.stderr b/tests/ui/feature-gates/feature-gate-naked_functions.stderr index 4378fb36367ad..dc6c9138c5d2d 100644 --- a/tests/ui/feature-gates/feature-gate-naked_functions.stderr +++ b/tests/ui/feature-gates/feature-gate-naked_functions.stderr @@ -4,7 +4,7 @@ error[E0658]: the `#[naked]` attribute is an experimental feature LL | #[naked] | ^^^^^^^^ | - = note: see issue #32408 for more information + = note: see issue #90957 for more information = help: add `#![feature(naked_functions)]` to the crate attributes to enable error[E0658]: the `#[naked]` attribute is an experimental feature @@ -13,7 +13,7 @@ error[E0658]: the `#[naked]` attribute is an experimental feature LL | #[naked] | ^^^^^^^^ | - = note: see issue #32408 for more information + = note: see issue #90957 for more information = help: add `#![feature(naked_functions)]` to the crate attributes to enable error: aborting due to 2 previous errors diff --git a/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.rs b/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.rs index 1922bfb4913e4..1db3c2ccdde75 100644 --- a/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.rs +++ b/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.rs @@ -2,12 +2,8 @@ #![deny(non_exhaustive_omitted_patterns)] //~^ WARNING unknown lint: `non_exhaustive_omitted_patterns` -//~| WARNING unknown lint: `non_exhaustive_omitted_patterns` -//~| WARNING unknown lint: `non_exhaustive_omitted_patterns` #![allow(non_exhaustive_omitted_patterns)] //~^ WARNING unknown lint: `non_exhaustive_omitted_patterns` -//~| WARNING unknown lint: `non_exhaustive_omitted_patterns` -//~| WARNING unknown lint: `non_exhaustive_omitted_patterns` fn main() { enum Foo { @@ -19,9 +15,6 @@ fn main() { #[allow(non_exhaustive_omitted_patterns)] //~^ WARNING unknown lint: `non_exhaustive_omitted_patterns` //~| WARNING unknown lint: `non_exhaustive_omitted_patterns` - //~| WARNING unknown lint: `non_exhaustive_omitted_patterns` - //~| WARNING unknown lint: `non_exhaustive_omitted_patterns` - //~| WARNING unknown lint: `non_exhaustive_omitted_patterns` match Foo::A { //~^ ERROR non-exhaustive patterns: `Foo::C` not covered Foo::A => {} @@ -31,9 +24,6 @@ fn main() { #[warn(non_exhaustive_omitted_patterns)] //~^ WARNING unknown lint: `non_exhaustive_omitted_patterns` //~| WARNING unknown lint: `non_exhaustive_omitted_patterns` - //~| WARNING unknown lint: `non_exhaustive_omitted_patterns` - //~| WARNING unknown lint: `non_exhaustive_omitted_patterns` - //~| WARNING unknown lint: `non_exhaustive_omitted_patterns` match Foo::A { Foo::A => {} Foo::B => {} diff --git a/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.stderr b/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.stderr index a533371397762..955d7fe3f3eaf 100644 --- a/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.stderr +++ b/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.stderr @@ -10,7 +10,7 @@ LL | #![deny(non_exhaustive_omitted_patterns)] = note: `#[warn(unknown_lints)]` on by default warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:7:1 + --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:5:1 | LL | #![allow(non_exhaustive_omitted_patterns)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -20,7 +20,7 @@ LL | #![allow(non_exhaustive_omitted_patterns)] = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:19:5 + --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:15:5 | LL | #[allow(non_exhaustive_omitted_patterns)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -30,7 +30,7 @@ LL | #[allow(non_exhaustive_omitted_patterns)] = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:19:5 + --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:15:5 | LL | #[allow(non_exhaustive_omitted_patterns)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -41,7 +41,7 @@ LL | #[allow(non_exhaustive_omitted_patterns)] = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:31:5 + --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:24:5 | LL | #[warn(non_exhaustive_omitted_patterns)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -51,73 +51,7 @@ LL | #[warn(non_exhaustive_omitted_patterns)] = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:31:5 - | -LL | #[warn(non_exhaustive_omitted_patterns)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `non_exhaustive_omitted_patterns` lint is unstable - = note: see issue #89554 for more information - = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:3:1 - | -LL | #![deny(non_exhaustive_omitted_patterns)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `non_exhaustive_omitted_patterns` lint is unstable - = note: see issue #89554 for more information - = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:7:1 - | -LL | #![allow(non_exhaustive_omitted_patterns)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `non_exhaustive_omitted_patterns` lint is unstable - = note: see issue #89554 for more information - = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:19:5 - | -LL | #[allow(non_exhaustive_omitted_patterns)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `non_exhaustive_omitted_patterns` lint is unstable - = note: see issue #89554 for more information - = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:19:5 - | -LL | #[allow(non_exhaustive_omitted_patterns)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `non_exhaustive_omitted_patterns` lint is unstable - = note: see issue #89554 for more information - = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:31:5 - | -LL | #[warn(non_exhaustive_omitted_patterns)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `non_exhaustive_omitted_patterns` lint is unstable - = note: see issue #89554 for more information - = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:31:5 + --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:24:5 | LL | #[warn(non_exhaustive_omitted_patterns)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -128,13 +62,13 @@ LL | #[warn(non_exhaustive_omitted_patterns)] = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error[E0004]: non-exhaustive patterns: `Foo::C` not covered - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:25:11 + --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:18:11 | LL | match Foo::A { | ^^^^^^ pattern `Foo::C` not covered | note: `Foo` defined here - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:13:10 + --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:9:10 | LL | enum Foo { | ^^^ @@ -148,50 +82,6 @@ LL ~ Foo::B => {}, LL + Foo::C => todo!() | -warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:3:1 - | -LL | #![deny(non_exhaustive_omitted_patterns)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `non_exhaustive_omitted_patterns` lint is unstable - = note: see issue #89554 for more information - = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:7:1 - | -LL | #![allow(non_exhaustive_omitted_patterns)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `non_exhaustive_omitted_patterns` lint is unstable - = note: see issue #89554 for more information - = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:19:5 - | -LL | #[allow(non_exhaustive_omitted_patterns)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `non_exhaustive_omitted_patterns` lint is unstable - = note: see issue #89554 for more information - = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `non_exhaustive_omitted_patterns` - --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:31:5 - | -LL | #[warn(non_exhaustive_omitted_patterns)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `non_exhaustive_omitted_patterns` lint is unstable - = note: see issue #89554 for more information - = help: add `#![feature(non_exhaustive_omitted_patterns_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: aborting due to 1 previous error; 16 warnings emitted +error: aborting due to 1 previous error; 6 warnings emitted For more information about this error, try `rustc --explain E0004`. diff --git a/tests/ui/feature-gates/feature-gate-strict_provenance.rs b/tests/ui/feature-gates/feature-gate-strict_provenance.rs index 75d0ee5700d07..24b8369b3d8f4 100644 --- a/tests/ui/feature-gates/feature-gate-strict_provenance.rs +++ b/tests/ui/feature-gates/feature-gate-strict_provenance.rs @@ -2,12 +2,8 @@ #![deny(fuzzy_provenance_casts)] //~^ WARNING unknown lint: `fuzzy_provenance_casts` -//~| WARNING unknown lint: `fuzzy_provenance_casts` -//~| WARNING unknown lint: `fuzzy_provenance_casts` #![deny(lossy_provenance_casts)] //~^ WARNING unknown lint: `lossy_provenance_casts` -//~| WARNING unknown lint: `lossy_provenance_casts` -//~| WARNING unknown lint: `lossy_provenance_casts` fn main() { // no warnings emitted since the lints are not activated diff --git a/tests/ui/feature-gates/feature-gate-strict_provenance.stderr b/tests/ui/feature-gates/feature-gate-strict_provenance.stderr index 1e6d762a54060..36224ee864b34 100644 --- a/tests/ui/feature-gates/feature-gate-strict_provenance.stderr +++ b/tests/ui/feature-gates/feature-gate-strict_provenance.stderr @@ -10,7 +10,7 @@ LL | #![deny(fuzzy_provenance_casts)] = note: `#[warn(unknown_lints)]` on by default warning: unknown lint: `lossy_provenance_casts` - --> $DIR/feature-gate-strict_provenance.rs:7:1 + --> $DIR/feature-gate-strict_provenance.rs:5:1 | LL | #![deny(lossy_provenance_casts)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -19,49 +19,5 @@ LL | #![deny(lossy_provenance_casts)] = note: see issue #95228 for more information = help: add `#![feature(strict_provenance)]` to the crate attributes to enable -warning: unknown lint: `fuzzy_provenance_casts` - --> $DIR/feature-gate-strict_provenance.rs:3:1 - | -LL | #![deny(fuzzy_provenance_casts)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `fuzzy_provenance_casts` lint is unstable - = note: see issue #95228 for more information - = help: add `#![feature(strict_provenance)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `lossy_provenance_casts` - --> $DIR/feature-gate-strict_provenance.rs:7:1 - | -LL | #![deny(lossy_provenance_casts)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `lossy_provenance_casts` lint is unstable - = note: see issue #95228 for more information - = help: add `#![feature(strict_provenance)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `fuzzy_provenance_casts` - --> $DIR/feature-gate-strict_provenance.rs:3:1 - | -LL | #![deny(fuzzy_provenance_casts)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `fuzzy_provenance_casts` lint is unstable - = note: see issue #95228 for more information - = help: add `#![feature(strict_provenance)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `lossy_provenance_casts` - --> $DIR/feature-gate-strict_provenance.rs:7:1 - | -LL | #![deny(lossy_provenance_casts)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `lossy_provenance_casts` lint is unstable - = note: see issue #95228 for more information - = help: add `#![feature(strict_provenance)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: 6 warnings emitted +warning: 2 warnings emitted diff --git a/tests/ui/feature-gates/feature-gate-test_unstable_lint.rs b/tests/ui/feature-gates/feature-gate-test_unstable_lint.rs index c398394cbe1a6..3882ba9a2271c 100644 --- a/tests/ui/feature-gates/feature-gate-test_unstable_lint.rs +++ b/tests/ui/feature-gates/feature-gate-test_unstable_lint.rs @@ -3,7 +3,5 @@ // `test_unstable_lint` is for testing and should never be stabilized. #![allow(test_unstable_lint)] //~^ WARNING unknown lint: `test_unstable_lint` -//~| WARNING unknown lint: `test_unstable_lint` -//~| WARNING unknown lint: `test_unstable_lint` fn main() {} diff --git a/tests/ui/feature-gates/feature-gate-test_unstable_lint.stderr b/tests/ui/feature-gates/feature-gate-test_unstable_lint.stderr index 562aa478a93eb..aec32ac4abb2e 100644 --- a/tests/ui/feature-gates/feature-gate-test_unstable_lint.stderr +++ b/tests/ui/feature-gates/feature-gate-test_unstable_lint.stderr @@ -8,25 +8,5 @@ LL | #![allow(test_unstable_lint)] = help: add `#![feature(test_unstable_lint)]` to the crate attributes to enable = note: `#[warn(unknown_lints)]` on by default -warning: unknown lint: `test_unstable_lint` - --> $DIR/feature-gate-test_unstable_lint.rs:4:1 - | -LL | #![allow(test_unstable_lint)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `test_unstable_lint` lint is unstable - = help: add `#![feature(test_unstable_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `test_unstable_lint` - --> $DIR/feature-gate-test_unstable_lint.rs:4:1 - | -LL | #![allow(test_unstable_lint)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `test_unstable_lint` lint is unstable - = help: add `#![feature(test_unstable_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: 3 warnings emitted +warning: 1 warning emitted diff --git a/tests/ui/feature-gates/feature-gate-type_privacy_lints.rs b/tests/ui/feature-gates/feature-gate-type_privacy_lints.rs index 8bb9736f1b474..80e51b265db5a 100644 --- a/tests/ui/feature-gates/feature-gate-type_privacy_lints.rs +++ b/tests/ui/feature-gates/feature-gate-type_privacy_lints.rs @@ -1,6 +1,4 @@ // check-pass #![warn(unnameable_types)] //~ WARN unknown lint - //~| WARN unknown lint - //~| WARN unknown lint fn main() {} diff --git a/tests/ui/feature-gates/feature-gate-type_privacy_lints.stderr b/tests/ui/feature-gates/feature-gate-type_privacy_lints.stderr index 2614f2b3c35de..5cc30de9c578c 100644 --- a/tests/ui/feature-gates/feature-gate-type_privacy_lints.stderr +++ b/tests/ui/feature-gates/feature-gate-type_privacy_lints.stderr @@ -9,27 +9,5 @@ LL | #![warn(unnameable_types)] = help: add `#![feature(type_privacy_lints)]` to the crate attributes to enable = note: `#[warn(unknown_lints)]` on by default -warning: unknown lint: `unnameable_types` - --> $DIR/feature-gate-type_privacy_lints.rs:3:1 - | -LL | #![warn(unnameable_types)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `unnameable_types` lint is unstable - = note: see issue #48054 for more information - = help: add `#![feature(type_privacy_lints)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `unnameable_types` - --> $DIR/feature-gate-type_privacy_lints.rs:3:1 - | -LL | #![warn(unnameable_types)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `unnameable_types` lint is unstable - = note: see issue #48054 for more information - = help: add `#![feature(type_privacy_lints)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: 3 warnings emitted +warning: 1 warning emitted diff --git a/tests/ui/fn/signature-error-reporting-under-verbose.rs b/tests/ui/fn/signature-error-reporting-under-verbose.rs index d00cbd8a0f242..d28c8530d58ad 100644 --- a/tests/ui/fn/signature-error-reporting-under-verbose.rs +++ b/tests/ui/fn/signature-error-reporting-under-verbose.rs @@ -1,4 +1,4 @@ -// compile-flags: -Zverbose +// compile-flags: -Zverbose-internals fn foo(_: i32, _: i32) {} diff --git a/tests/ui/generic-associated-types/issue-84931.stderr b/tests/ui/generic-associated-types/issue-84931.stderr index 04e14b9c746f3..71d112277a37e 100644 --- a/tests/ui/generic-associated-types/issue-84931.stderr +++ b/tests/ui/generic-associated-types/issue-84931.stderr @@ -1,16 +1,3 @@ -error[E0309]: the parameter type `T` may not live long enough - --> $DIR/issue-84931.rs:14:21 - | -LL | type Item<'a> = &'a mut T; - | -- ^^^^^^^^^ ...so that the reference type `&'a mut T` does not outlive the data it points at - | | - | the parameter type `T` must be valid for the lifetime `'a` as defined here... - | -help: consider adding an explicit lifetime bound - | -LL | type Item<'a> = &'a mut T where T: 'a; - | +++++++++++ - error[E0477]: the type `StreamingSliceIter<'b, T>` does not fulfill the required lifetime --> $DIR/issue-84931.rs:14:21 | @@ -30,6 +17,19 @@ help: copy the `where` clause predicates from the trait LL | type Item<'a> = &'a mut T where Self: 'a; | ++++++++++++++ +error[E0309]: the parameter type `T` may not live long enough + --> $DIR/issue-84931.rs:14:21 + | +LL | type Item<'a> = &'a mut T; + | -- ^^^^^^^^^ ...so that the reference type `&'a mut T` does not outlive the data it points at + | | + | the parameter type `T` must be valid for the lifetime `'a` as defined here... + | +help: consider adding an explicit lifetime bound + | +LL | type Item<'a> = &'a mut T where T: 'a; + | +++++++++++ + error: aborting due to 2 previous errors Some errors have detailed explanations: E0309, E0477. diff --git a/tests/ui/generic-associated-types/unsatisfied-item-lifetime-bound.stderr b/tests/ui/generic-associated-types/unsatisfied-item-lifetime-bound.stderr index f73ed5956da21..8d21b9172c87a 100644 --- a/tests/ui/generic-associated-types/unsatisfied-item-lifetime-bound.stderr +++ b/tests/ui/generic-associated-types/unsatisfied-item-lifetime-bound.stderr @@ -11,6 +11,26 @@ note: the lint level is defined here LL | #![warn(unused_lifetimes)] | ^^^^^^^^^^^^^^^^ +error[E0478]: lifetime bound not satisfied + --> $DIR/unsatisfied-item-lifetime-bound.rs:9:18 + | +LL | type Y<'a: 'static>; + | ------------------- definition of `Y` from trait +... +LL | type Y<'a> = &'a (); + | ^^^^^^ + | +note: lifetime parameter instantiated with the lifetime `'a` as defined here + --> $DIR/unsatisfied-item-lifetime-bound.rs:9:12 + | +LL | type Y<'a> = &'a (); + | ^^ + = note: but lifetime parameter must outlive the static lifetime +help: copy the `where` clause predicates from the trait + | +LL | type Y<'a> = &'a () where 'a: 'static; + | +++++++++++++++++ + error[E0478]: lifetime bound not satisfied --> $DIR/unsatisfied-item-lifetime-bound.rs:14:8 | @@ -50,26 +70,6 @@ LL | struct D<'a> { | ^^ = note: but lifetime parameter must outlive the static lifetime -error[E0478]: lifetime bound not satisfied - --> $DIR/unsatisfied-item-lifetime-bound.rs:9:18 - | -LL | type Y<'a: 'static>; - | ------------------- definition of `Y` from trait -... -LL | type Y<'a> = &'a (); - | ^^^^^^ - | -note: lifetime parameter instantiated with the lifetime `'a` as defined here - --> $DIR/unsatisfied-item-lifetime-bound.rs:9:12 - | -LL | type Y<'a> = &'a (); - | ^^ - = note: but lifetime parameter must outlive the static lifetime -help: copy the `where` clause predicates from the trait - | -LL | type Y<'a> = &'a () where 'a: 'static; - | +++++++++++++++++ - error: aborting due to 4 previous errors; 1 warning emitted For more information about this error, try `rustc --explain E0478`. diff --git a/tests/ui/generic-const-items/const-trait-impl.rs b/tests/ui/generic-const-items/const-trait-impl.rs index 43cdf818c4694..04c3f3eb4340f 100644 --- a/tests/ui/generic-const-items/const-trait-impl.rs +++ b/tests/ui/generic-const-items/const-trait-impl.rs @@ -1,14 +1,12 @@ -// known-bug: #110395 -// FIXME check-pass +// check-pass // Test that we can call methods from const trait impls inside of generic const items. -#![feature(generic_const_items, const_trait_impl)] +#![feature(generic_const_items, const_trait_impl, effects)] #![allow(incomplete_features)] #![crate_type = "lib"] -// FIXME(generic_const_items, effects): Introduce `const` bounds to make this work. -const CREATE: T = T::create(); +const CREATE: T = T::create(); pub const K0: i32 = CREATE::; pub const K1: i32 = CREATE; // arg inferred @@ -23,3 +21,13 @@ impl const Create for i32 { 4096 } } + +trait Mod { // doesn't need to be a `#[const_trait]` + const CREATE: T; +} + +impl Mod for () { + const CREATE: T = T::create(); +} + +pub const K2: i32 = <() as Mod>::CREATE::; diff --git a/tests/ui/generic-const-items/const-trait-impl.stderr b/tests/ui/generic-const-items/const-trait-impl.stderr deleted file mode 100644 index cdcd24eceffe5..0000000000000 --- a/tests/ui/generic-const-items/const-trait-impl.stderr +++ /dev/null @@ -1,12 +0,0 @@ -error[E0015]: cannot call non-const fn `::create` in constants - --> $DIR/const-trait-impl.rs:11:30 - | -LL | const CREATE: T = T::create(); - | ^^^^^^^^^^^ - | - = note: calls in constants are limited to constant functions, tuple structs and tuple variants - = help: add `#![feature(effects)]` to the crate attributes to enable - -error: aborting due to 1 previous error - -For more information about this error, try `rustc --explain E0015`. diff --git a/tests/ui/half-open-range-patterns/range_pat_interactions1.rs b/tests/ui/half-open-range-patterns/range_pat_interactions1.rs index 9ffc2190d20d1..55353999b6788 100644 --- a/tests/ui/half-open-range-patterns/range_pat_interactions1.rs +++ b/tests/ui/half-open-range-patterns/range_pat_interactions1.rs @@ -17,7 +17,7 @@ fn main() { } match x as i32 { 0..5+1 => errors_only.push(x), - //~^ error: expected one of `,`, `=>`, `if`, `|`, or `}`, found `+` + //~^ error: expected one of `=>`, `if`, or `|`, found `+` 1 | -3..0 => first_or.push(x), y @ (0..5 | 6) => or_two.push(y), y @ 0..const { 5 + 1 } => assert_eq!(y, 5), diff --git a/tests/ui/half-open-range-patterns/range_pat_interactions1.stderr b/tests/ui/half-open-range-patterns/range_pat_interactions1.stderr index 05235c9b92295..19ebcaf0f3699 100644 --- a/tests/ui/half-open-range-patterns/range_pat_interactions1.stderr +++ b/tests/ui/half-open-range-patterns/range_pat_interactions1.stderr @@ -1,8 +1,8 @@ -error: expected one of `,`, `=>`, `if`, `|`, or `}`, found `+` +error: expected one of `=>`, `if`, or `|`, found `+` --> $DIR/range_pat_interactions1.rs:19:17 | LL | 0..5+1 => errors_only.push(x), - | ^ expected one of `,`, `=>`, `if`, `|`, or `}` + | ^ expected one of `=>`, `if`, or `|` error[E0408]: variable `n` is not bound in all patterns --> $DIR/range_pat_interactions1.rs:10:25 diff --git a/tests/ui/half-open-range-patterns/range_pat_interactions2.rs b/tests/ui/half-open-range-patterns/range_pat_interactions2.rs index b212bfbe093eb..0e96cfe785857 100644 --- a/tests/ui/half-open-range-patterns/range_pat_interactions2.rs +++ b/tests/ui/half-open-range-patterns/range_pat_interactions2.rs @@ -8,8 +8,7 @@ fn main() { for x in -9 + 1..=(9 - 2) { match x as i32 { 0..=(5+1) => errors_only.push(x), - //~^ error: inclusive range with no end - //~| error: expected one of `,`, `=>`, `if`, `|`, or `}`, found `(` + //~^ error: expected `)`, found `+` 1 | -3..0 => first_or.push(x), y @ (0..5 | 6) => or_two.push(y), y @ 0..const { 5 + 1 } => assert_eq!(y, 5), diff --git a/tests/ui/half-open-range-patterns/range_pat_interactions2.stderr b/tests/ui/half-open-range-patterns/range_pat_interactions2.stderr index 0129f927e3464..a54f29a3b3263 100644 --- a/tests/ui/half-open-range-patterns/range_pat_interactions2.stderr +++ b/tests/ui/half-open-range-patterns/range_pat_interactions2.stderr @@ -1,17 +1,8 @@ -error[E0586]: inclusive range with no end - --> $DIR/range_pat_interactions2.rs:10:14 +error: expected `)`, found `+` + --> $DIR/range_pat_interactions2.rs:10:19 | LL | 0..=(5+1) => errors_only.push(x), - | ^^^ help: use `..` instead - | - = note: inclusive ranges must be bounded at the end (`..=b` or `a..=b`) - -error: expected one of `,`, `=>`, `if`, `|`, or `}`, found `(` - --> $DIR/range_pat_interactions2.rs:10:17 - | -LL | 0..=(5+1) => errors_only.push(x), - | ^ expected one of `,`, `=>`, `if`, `|`, or `}` + | ^ expected `)` -error: aborting due to 2 previous errors +error: aborting due to 1 previous error -For more information about this error, try `rustc --explain E0586`. diff --git a/tests/ui/impl-trait/associated-impl-trait-type-issue-114325.rs b/tests/ui/impl-trait/associated-impl-trait-type-issue-114325.rs new file mode 100644 index 0000000000000..8173f8df11b0c --- /dev/null +++ b/tests/ui/impl-trait/associated-impl-trait-type-issue-114325.rs @@ -0,0 +1,55 @@ +// This is a non-regression test for issue #114325: an "unexpected unsized tail" ICE happened during +// codegen, and was fixed by MIR drop tracking #107421. + +// edition: 2021 +// build-pass: ICEd during codegen. + +#![feature(impl_trait_in_assoc_type)] + +use std::future::Future; + +fn main() { + RuntimeRef::spawn_local(actor_fn(http_actor)); +} + +async fn http_actor() { + async fn respond(body: impl Body) { + body.write_message().await; + } + + respond(&()).await; +} + +trait Body { + type WriteFuture: Future; + + fn write_message(self) -> Self::WriteFuture; +} + +impl Body for &'static () { + type WriteFuture = impl Future; + + fn write_message(self) -> Self::WriteFuture { + async {} + } +} + +trait NewActor { + type RuntimeAccess; +} + +fn actor_fn(_d: T) -> (T, A) { + loop {} +} + +impl A, A> NewActor for (F, A) { + type RuntimeAccess = RuntimeRef; +} +struct RuntimeRef(Vec<()>); + +impl RuntimeRef { + fn spawn_local>(_f: NA) { + struct ActorFuture(NA::RuntimeAccess); + (ActorFuture::(RuntimeRef(vec![])), _f); + } +} diff --git a/tests/ui/impl-trait/erased-regions-in-hidden-ty.rs b/tests/ui/impl-trait/erased-regions-in-hidden-ty.rs index b1f36fc247fe1..0458b56f95f7f 100644 --- a/tests/ui/impl-trait/erased-regions-in-hidden-ty.rs +++ b/tests/ui/impl-trait/erased-regions-in-hidden-ty.rs @@ -1,5 +1,5 @@ // revisions: current next -// compile-flags: -Zverbose +// compile-flags: -Zverbose-internals //[next] compile-flags: -Znext-solver // normalize-stderr-test "DefId\([^\)]+\)" -> "DefId(..)" diff --git a/tests/ui/impl-trait/issues/issue-78722-2.stderr b/tests/ui/impl-trait/issues/issue-78722-2.stderr index 8817eb7d243fb..69c734530f2a6 100644 --- a/tests/ui/impl-trait/issues/issue-78722-2.stderr +++ b/tests/ui/impl-trait/issues/issue-78722-2.stderr @@ -1,12 +1,3 @@ -error[E0658]: `async` blocks are not allowed in constants - --> $DIR/issue-78722-2.rs:15:20 - | -LL | let f: F = async { 1 }; - | ^^^^^^^^^^^ - | - = note: see issue #85368 for more information - = help: add `#![feature(const_async_blocks)]` to the crate attributes to enable - error[E0271]: expected `{async block@$DIR/issue-78722-2.rs:13:13: 13:21}` to be a future that resolves to `u8`, but it resolves to `()` --> $DIR/issue-78722-2.rs:11:30 | @@ -26,6 +17,15 @@ note: this item must mention the opaque type in its signature in order to be abl LL | let f: F = async { 1 }; | ^^^^^^^^^^^ +error[E0658]: `async` blocks are not allowed in constants + --> $DIR/issue-78722-2.rs:15:20 + | +LL | let f: F = async { 1 }; + | ^^^^^^^^^^^ + | + = note: see issue #85368 for more information + = help: add `#![feature(const_async_blocks)]` to the crate attributes to enable + error: aborting due to 3 previous errors Some errors have detailed explanations: E0271, E0658. diff --git a/tests/ui/impl-trait/issues/issue-86800.rs b/tests/ui/impl-trait/issues/issue-86800.rs index df70b324c5ec7..297b012d90a72 100644 --- a/tests/ui/impl-trait/issues/issue-86800.rs +++ b/tests/ui/impl-trait/issues/issue-86800.rs @@ -1,8 +1,8 @@ #![feature(type_alias_impl_trait)] // edition:2021 -// compile-flags:-Z treat-err-as-bug=1 -// error-pattern: aborting due to `-Z treat-err-as-bug=1` +// compile-flags:-Z treat-err-as-bug=2 +// error-pattern: due to `-Z treat-err-as-bug=2 // failure-status:101 // normalize-stderr-test ".*note: .*\n\n" -> "" // normalize-stderr-test "thread 'rustc' panicked.*:\n.*\n" -> "" diff --git a/tests/ui/impl-trait/issues/issue-86800.stderr b/tests/ui/impl-trait/issues/issue-86800.stderr index 8228f8ace9d67..07ba8eb021b31 100644 --- a/tests/ui/impl-trait/issues/issue-86800.stderr +++ b/tests/ui/impl-trait/issues/issue-86800.stderr @@ -4,9 +4,18 @@ error: unconstrained opaque type LL | type TransactionFuture<'__, O> = impl '__ + Future>; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | +error[E0792]: expected generic lifetime parameter, found `'_` + --> $DIR/issue-86800.rs:39:5 + | +LL | type TransactionFuture<'__, O> = impl '__ + Future>; + | --- this generic parameter must be used with a generic lifetime parameter +... +LL | f + | ^ + error: the compiler unexpectedly panicked. this is a bug. query stack during panic: -#0 [type_of_opaque] computing type of opaque `TransactionFuture::{opaque#0}` -#1 [type_of] computing type of `TransactionFuture::{opaque#0}` +#0 [mir_borrowck] borrow-checking `execute_transaction_fut` +#1 [type_of_opaque] computing type of opaque `execute_transaction_fut::{opaque#0}` end of query stack diff --git a/tests/ui/impl-trait/normalize-tait-in-const.stderr b/tests/ui/impl-trait/normalize-tait-in-const.stderr index e0513433b8ec1..7fd2ec57b1455 100644 --- a/tests/ui/impl-trait/normalize-tait-in-const.stderr +++ b/tests/ui/impl-trait/normalize-tait-in-const.stderr @@ -1,4 +1,4 @@ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/normalize-tait-in-const.rs:25:42 | LL | const fn with_positive Fn(&'a Alias<'a>) + ~const Destruct>(fun: F) { diff --git a/tests/ui/impl-trait/not_general_enough_regression_106630.rs b/tests/ui/impl-trait/not_general_enough_regression_106630.rs new file mode 100644 index 0000000000000..439973950f397 --- /dev/null +++ b/tests/ui/impl-trait/not_general_enough_regression_106630.rs @@ -0,0 +1,33 @@ +// edition:2018 +// run-pass + +use std::future::Future; + +trait AsyncCallback<'a> { + type Out; +} + +impl<'a, Fut, T, F> AsyncCallback<'a> for F +where + F: FnOnce(&'a mut ()) -> Fut, + Fut: Future + Send + 'a, +{ + type Out = T; +} + +trait CallbackMarker {} + +impl CallbackMarker for F +where + T: 'static, + for<'a> F: AsyncCallback<'a, Out = T> + Send, +{ +} + +fn do_sth(_: F) {} + +async fn callback(_: &mut ()) -> impl Send {} + +fn main() { + do_sth(callback); +} diff --git a/tests/ui/issues/issue-13482-2.rs b/tests/ui/issues/issue-13482-2.rs index bbcb954afcc8c..b5b81dea73e8b 100644 --- a/tests/ui/issues/issue-13482-2.rs +++ b/tests/ui/issues/issue-13482-2.rs @@ -1,4 +1,4 @@ -// compile-flags:-Z verbose +// compile-flags:-Z verbose-internals fn main() { let x = [1,2]; diff --git a/tests/ui/issues/issue-17441.stderr b/tests/ui/issues/issue-17441.stderr index 4dbe50178cf3b..29e50b91c7c6f 100644 --- a/tests/ui/issues/issue-17441.stderr +++ b/tests/ui/issues/issue-17441.stderr @@ -2,13 +2,9 @@ error[E0620]: cast to unsized type: `&[usize; 2]` as `[usize]` --> $DIR/issue-17441.rs:2:16 | LL | let _foo = &[1_usize, 2] as [usize]; - | ^^^^^^^^^^^^^^^^^^^^^^^^ - | -help: consider using an implicit coercion to `&[usize]` instead - --> $DIR/issue-17441.rs:2:16 - | -LL | let _foo = &[1_usize, 2] as [usize]; - | ^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^------- + | | + | help: try casting to a reference instead: `&[usize]` error[E0620]: cast to unsized type: `Box` as `dyn Debug` --> $DIR/issue-17441.rs:5:16 diff --git a/tests/ui/issues/issue-23122-2.stderr b/tests/ui/issues/issue-23122-2.stderr index b8aa587a7398c..10463ab2c33fe 100644 --- a/tests/ui/issues/issue-23122-2.stderr +++ b/tests/ui/issues/issue-23122-2.stderr @@ -5,7 +5,7 @@ LL | type Next = as Next>::Next; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`issue_23122_2`) -note: required for `GetNext<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next>` to implement `Next` +note: required for `GetNext<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next as Next>::Next>` to implement `Next` --> $DIR/issue-23122-2.rs:10:15 | LL | impl Next for GetNext { diff --git a/tests/ui/issues/issue-62375.stderr b/tests/ui/issues/issue-62375.stderr index 8750fbcf4cf75..faca94a03f018 100644 --- a/tests/ui/issues/issue-62375.stderr +++ b/tests/ui/issues/issue-62375.stderr @@ -11,11 +11,6 @@ note: an implementation of `PartialEq A {A::Value}>` might be missing | LL | enum A { | ^^^^^^ must implement `PartialEq A {A::Value}>` -help: consider annotating `A` with `#[derive(PartialEq)]` - | -LL + #[derive(PartialEq)] -LL | enum A { - | help: use parentheses to construct this tuple variant | LL | a == A::Value(/* () */); diff --git a/tests/ui/layout/issue-84108.stderr b/tests/ui/layout/issue-84108.stderr index 3a02e73f96b78..d6d7585103469 100644 --- a/tests/ui/layout/issue-84108.stderr +++ b/tests/ui/layout/issue-84108.stderr @@ -29,15 +29,6 @@ LL | const BAR: (&Path, [u8], usize) = ("hello", [], 42); = help: the trait `Sized` is not implemented for `[u8]` = note: only the last element of a tuple may have a dynamically sized type -error[E0277]: the size for values of type `[u8]` cannot be known at compilation time - --> $DIR/issue-84108.rs:14:13 - | -LL | static BAZ: ([u8], usize) = ([], 0); - | ^^^^^^^^^^^^^ doesn't have a size known at compile-time - | - = help: the trait `Sized` is not implemented for `[u8]` - = note: only the last element of a tuple may have a dynamically sized type - error[E0308]: mismatched types --> $DIR/issue-84108.rs:9:45 | @@ -47,6 +38,15 @@ LL | const BAR: (&Path, [u8], usize) = ("hello", [], 42); = note: expected slice `[u8]` found array `[_; 0]` +error[E0277]: the size for values of type `[u8]` cannot be known at compilation time + --> $DIR/issue-84108.rs:14:13 + | +LL | static BAZ: ([u8], usize) = ([], 0); + | ^^^^^^^^^^^^^ doesn't have a size known at compile-time + | + = help: the trait `Sized` is not implemented for `[u8]` + = note: only the last element of a tuple may have a dynamically sized type + error[E0308]: mismatched types --> $DIR/issue-84108.rs:14:30 | diff --git a/tests/ui/lint/crate_level_only_lint.rs b/tests/ui/lint/crate_level_only_lint.rs index d9673faa2142e..6679cc0862f33 100644 --- a/tests/ui/lint/crate_level_only_lint.rs +++ b/tests/ui/lint/crate_level_only_lint.rs @@ -3,20 +3,14 @@ mod foo { #![allow(uncommon_codepoints)] //~^ ERROR allow(uncommon_codepoints) is ignored unless specified at crate level [unused_attributes] -//~| ERROR allow(uncommon_codepoints) is ignored unless specified at crate level [unused_attributes] -//~| ERROR allow(uncommon_codepoints) is ignored unless specified at crate level [unused_attributes] #[allow(uncommon_codepoints)] //~^ ERROR allow(uncommon_codepoints) is ignored unless specified at crate level [unused_attributes] -//~| ERROR allow(uncommon_codepoints) is ignored unless specified at crate level [unused_attributes] -//~| ERROR allow(uncommon_codepoints) is ignored unless specified at crate level [unused_attributes] const BAR: f64 = 0.000001; } #[allow(uncommon_codepoints)] //~^ ERROR allow(uncommon_codepoints) is ignored unless specified at crate level [unused_attributes] -//~| ERROR allow(uncommon_codepoints) is ignored unless specified at crate level [unused_attributes] -//~| ERROR allow(uncommon_codepoints) is ignored unless specified at crate level [unused_attributes] fn main() { } diff --git a/tests/ui/lint/crate_level_only_lint.stderr b/tests/ui/lint/crate_level_only_lint.stderr index fbb1ec381c8ff..34d27f873f689 100644 --- a/tests/ui/lint/crate_level_only_lint.stderr +++ b/tests/ui/lint/crate_level_only_lint.stderr @@ -11,64 +11,16 @@ LL | #![deny(uncommon_codepoints, unused_attributes)] | ^^^^^^^^^^^^^^^^^ error: allow(uncommon_codepoints) is ignored unless specified at crate level - --> $DIR/crate_level_only_lint.rs:9:9 + --> $DIR/crate_level_only_lint.rs:7:9 | LL | #[allow(uncommon_codepoints)] | ^^^^^^^^^^^^^^^^^^^ error: allow(uncommon_codepoints) is ignored unless specified at crate level - --> $DIR/crate_level_only_lint.rs:17:9 + --> $DIR/crate_level_only_lint.rs:13:9 | LL | #[allow(uncommon_codepoints)] | ^^^^^^^^^^^^^^^^^^^ -error: allow(uncommon_codepoints) is ignored unless specified at crate level - --> $DIR/crate_level_only_lint.rs:4:10 - | -LL | #![allow(uncommon_codepoints)] - | ^^^^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: allow(uncommon_codepoints) is ignored unless specified at crate level - --> $DIR/crate_level_only_lint.rs:9:9 - | -LL | #[allow(uncommon_codepoints)] - | ^^^^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: allow(uncommon_codepoints) is ignored unless specified at crate level - --> $DIR/crate_level_only_lint.rs:17:9 - | -LL | #[allow(uncommon_codepoints)] - | ^^^^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: allow(uncommon_codepoints) is ignored unless specified at crate level - --> $DIR/crate_level_only_lint.rs:4:10 - | -LL | #![allow(uncommon_codepoints)] - | ^^^^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: allow(uncommon_codepoints) is ignored unless specified at crate level - --> $DIR/crate_level_only_lint.rs:9:9 - | -LL | #[allow(uncommon_codepoints)] - | ^^^^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: allow(uncommon_codepoints) is ignored unless specified at crate level - --> $DIR/crate_level_only_lint.rs:17:9 - | -LL | #[allow(uncommon_codepoints)] - | ^^^^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: aborting due to 9 previous errors +error: aborting due to 3 previous errors diff --git a/tests/ui/lint/dead-code/multiple-dead-codes-in-the-same-struct.rs b/tests/ui/lint/dead-code/multiple-dead-codes-in-the-same-struct.rs index 2003e1e293a58..a478153b3f480 100644 --- a/tests/ui/lint/dead-code/multiple-dead-codes-in-the-same-struct.rs +++ b/tests/ui/lint/dead-code/multiple-dead-codes-in-the-same-struct.rs @@ -15,6 +15,10 @@ struct Bar { _h: usize, } +// Issue 119267: this should not ICE. +#[derive(Debug)] +struct Foo(usize, #[allow(unused)] usize); + fn main() { Bar { a: 1, diff --git a/tests/ui/lint/forbid-group-group-2.rs b/tests/ui/lint/forbid-group-group-2.rs index b12fd72da7494..b3d3e30fb8d60 100644 --- a/tests/ui/lint/forbid-group-group-2.rs +++ b/tests/ui/lint/forbid-group-group-2.rs @@ -11,16 +11,4 @@ //~| WARNING previously accepted by the compiler //~| ERROR incompatible with previous //~| WARNING previously accepted by the compiler -//~| ERROR incompatible with previous -//~| WARNING previously accepted by the compiler -//~| ERROR incompatible with previous -//~| WARNING previously accepted by the compiler -//~| ERROR incompatible with previous -//~| WARNING previously accepted by the compiler -//~| ERROR incompatible with previous -//~| WARNING previously accepted by the compiler -//~| ERROR incompatible with previous -//~| WARNING previously accepted by the compiler -//~| ERROR incompatible with previous -//~| WARNING previously accepted by the compiler fn main() {} diff --git a/tests/ui/lint/forbid-group-group-2.stderr b/tests/ui/lint/forbid-group-group-2.stderr index 4a2c8fbd68ab1..80e2f566eb84d 100644 --- a/tests/ui/lint/forbid-group-group-2.stderr +++ b/tests/ui/lint/forbid-group-group-2.stderr @@ -41,83 +41,5 @@ LL | #[allow(nonstandard_style)] = note: for more information, see issue #81670 = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: allow(nonstandard_style) incompatible with previous forbid - --> $DIR/forbid-group-group-2.rs:7:9 - | -LL | #![forbid(warnings)] - | -------- `forbid` level set here -... -LL | #[allow(nonstandard_style)] - | ^^^^^^^^^^^^^^^^^ overruled by previous forbid - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #81670 - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: allow(nonstandard_style) incompatible with previous forbid - --> $DIR/forbid-group-group-2.rs:7:9 - | -LL | #![forbid(warnings)] - | -------- `forbid` level set here -... -LL | #[allow(nonstandard_style)] - | ^^^^^^^^^^^^^^^^^ overruled by previous forbid - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #81670 - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: allow(nonstandard_style) incompatible with previous forbid - --> $DIR/forbid-group-group-2.rs:7:9 - | -LL | #![forbid(warnings)] - | -------- `forbid` level set here -... -LL | #[allow(nonstandard_style)] - | ^^^^^^^^^^^^^^^^^ overruled by previous forbid - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #81670 - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: allow(nonstandard_style) incompatible with previous forbid - --> $DIR/forbid-group-group-2.rs:7:9 - | -LL | #![forbid(warnings)] - | -------- `forbid` level set here -... -LL | #[allow(nonstandard_style)] - | ^^^^^^^^^^^^^^^^^ overruled by previous forbid - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #81670 - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: allow(nonstandard_style) incompatible with previous forbid - --> $DIR/forbid-group-group-2.rs:7:9 - | -LL | #![forbid(warnings)] - | -------- `forbid` level set here -... -LL | #[allow(nonstandard_style)] - | ^^^^^^^^^^^^^^^^^ overruled by previous forbid - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #81670 - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: allow(nonstandard_style) incompatible with previous forbid - --> $DIR/forbid-group-group-2.rs:7:9 - | -LL | #![forbid(warnings)] - | -------- `forbid` level set here -... -LL | #[allow(nonstandard_style)] - | ^^^^^^^^^^^^^^^^^ overruled by previous forbid - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #81670 - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: aborting due to 9 previous errors +error: aborting due to 3 previous errors diff --git a/tests/ui/lint/forbid-group-member.rs b/tests/ui/lint/forbid-group-member.rs index 664edeaa8b434..d03e858438b60 100644 --- a/tests/ui/lint/forbid-group-member.rs +++ b/tests/ui/lint/forbid-group-member.rs @@ -8,10 +8,6 @@ #[allow(unused_variables)] //~^ WARNING incompatible with previous forbid //~| WARNING previously accepted -//~| WARNING incompatible with previous forbid -//~| WARNING previously accepted -//~| WARNING incompatible with previous forbid -//~| WARNING previously accepted fn main() { let a: (); } diff --git a/tests/ui/lint/forbid-group-member.stderr b/tests/ui/lint/forbid-group-member.stderr index ddaaafa12ec2e..8794591bd3134 100644 --- a/tests/ui/lint/forbid-group-member.stderr +++ b/tests/ui/lint/forbid-group-member.stderr @@ -11,31 +11,5 @@ LL | #[allow(unused_variables)] = note: for more information, see issue #81670 = note: `#[warn(forbidden_lint_groups)]` on by default -warning: allow(unused_variables) incompatible with previous forbid - --> $DIR/forbid-group-member.rs:8:9 - | -LL | #![forbid(unused)] - | ------ `forbid` level set here -LL | -LL | #[allow(unused_variables)] - | ^^^^^^^^^^^^^^^^ overruled by previous forbid - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #81670 - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: allow(unused_variables) incompatible with previous forbid - --> $DIR/forbid-group-member.rs:8:9 - | -LL | #![forbid(unused)] - | ------ `forbid` level set here -LL | -LL | #[allow(unused_variables)] - | ^^^^^^^^^^^^^^^^ overruled by previous forbid - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #81670 - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: 3 warnings emitted +warning: 1 warning emitted diff --git a/tests/ui/lint/forbid-member-group.rs b/tests/ui/lint/forbid-member-group.rs index e2f76825a2d41..d1874aa81a53f 100644 --- a/tests/ui/lint/forbid-member-group.rs +++ b/tests/ui/lint/forbid-member-group.rs @@ -5,7 +5,6 @@ #[allow(unused)] //~^ ERROR incompatible with previous forbid -//~| ERROR incompatible with previous forbid fn main() { let a: (); } diff --git a/tests/ui/lint/forbid-member-group.stderr b/tests/ui/lint/forbid-member-group.stderr index 612dccd8d6ca6..9b32c00a3c3cb 100644 --- a/tests/ui/lint/forbid-member-group.stderr +++ b/tests/ui/lint/forbid-member-group.stderr @@ -7,17 +7,6 @@ LL | LL | #[allow(unused)] | ^^^^^^ overruled by previous forbid -error[E0453]: allow(unused) incompatible with previous forbid - --> $DIR/forbid-member-group.rs:6:9 - | -LL | #![forbid(unused_variables)] - | ---------------- `forbid` level set here -LL | -LL | #[allow(unused)] - | ^^^^^^ overruled by previous forbid - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: aborting due to 2 previous errors +error: aborting due to 1 previous error For more information about this error, try `rustc --explain E0453`. diff --git a/tests/ui/lint/issue-80988.rs b/tests/ui/lint/issue-80988.rs index 1e116206f7b94..5b910f1d8df9d 100644 --- a/tests/ui/lint/issue-80988.rs +++ b/tests/ui/lint/issue-80988.rs @@ -7,8 +7,4 @@ #[deny(warnings)] //~^ WARNING incompatible with previous forbid //~| WARNING being phased out -//~| WARNING incompatible with previous forbid -//~| WARNING being phased out -//~| WARNING incompatible with previous forbid -//~| WARNING being phased out fn main() {} diff --git a/tests/ui/lint/issue-80988.stderr b/tests/ui/lint/issue-80988.stderr index 7a65881b5eda5..afc93fcfeef12 100644 --- a/tests/ui/lint/issue-80988.stderr +++ b/tests/ui/lint/issue-80988.stderr @@ -11,31 +11,5 @@ LL | #[deny(warnings)] = note: for more information, see issue #81670 = note: `#[warn(forbidden_lint_groups)]` on by default -warning: deny(warnings) incompatible with previous forbid - --> $DIR/issue-80988.rs:7:8 - | -LL | #![forbid(warnings)] - | -------- `forbid` level set here -LL | -LL | #[deny(warnings)] - | ^^^^^^^^ overruled by previous forbid - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #81670 - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: deny(warnings) incompatible with previous forbid - --> $DIR/issue-80988.rs:7:8 - | -LL | #![forbid(warnings)] - | -------- `forbid` level set here -LL | -LL | #[deny(warnings)] - | ^^^^^^^^ overruled by previous forbid - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #81670 - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: 3 warnings emitted +warning: 1 warning emitted diff --git a/tests/ui/lint/lint-forbid-attr.rs b/tests/ui/lint/lint-forbid-attr.rs index 6d4cfd8342420..270a379c2f848 100644 --- a/tests/ui/lint/lint-forbid-attr.rs +++ b/tests/ui/lint/lint-forbid-attr.rs @@ -2,6 +2,5 @@ #[allow(deprecated)] //~^ ERROR allow(deprecated) incompatible -//~| ERROR allow(deprecated) incompatible fn main() { } diff --git a/tests/ui/lint/lint-forbid-attr.stderr b/tests/ui/lint/lint-forbid-attr.stderr index bd476a0e362d9..fa7106b5e115b 100644 --- a/tests/ui/lint/lint-forbid-attr.stderr +++ b/tests/ui/lint/lint-forbid-attr.stderr @@ -7,17 +7,6 @@ LL | LL | #[allow(deprecated)] | ^^^^^^^^^^ overruled by previous forbid -error[E0453]: allow(deprecated) incompatible with previous forbid - --> $DIR/lint-forbid-attr.rs:3:9 - | -LL | #![forbid(deprecated)] - | ---------- `forbid` level set here -LL | -LL | #[allow(deprecated)] - | ^^^^^^^^^^ overruled by previous forbid - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: aborting due to 2 previous errors +error: aborting due to 1 previous error For more information about this error, try `rustc --explain E0453`. diff --git a/tests/ui/lint/lint-forbid-cmdline.rs b/tests/ui/lint/lint-forbid-cmdline.rs index 5246ccb57a67a..32a92e09b14a9 100644 --- a/tests/ui/lint/lint-forbid-cmdline.rs +++ b/tests/ui/lint/lint-forbid-cmdline.rs @@ -1,6 +1,5 @@ // compile-flags: -F deprecated #[allow(deprecated)] //~ ERROR allow(deprecated) incompatible - //~| ERROR allow(deprecated) incompatible fn main() { } diff --git a/tests/ui/lint/lint-forbid-cmdline.stderr b/tests/ui/lint/lint-forbid-cmdline.stderr index ed49a2cb4274a..3920a7429763e 100644 --- a/tests/ui/lint/lint-forbid-cmdline.stderr +++ b/tests/ui/lint/lint-forbid-cmdline.stderr @@ -6,15 +6,6 @@ LL | #[allow(deprecated)] | = note: `forbid` lint level was set on command line -error[E0453]: allow(deprecated) incompatible with previous forbid - --> $DIR/lint-forbid-cmdline.rs:3:9 - | -LL | #[allow(deprecated)] - | ^^^^^^^^^^ overruled by previous forbid - | - = note: `forbid` lint level was set on command line - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: aborting due to 2 previous errors +error: aborting due to 1 previous error For more information about this error, try `rustc --explain E0453`. diff --git a/tests/ui/lint/must_not_suspend/gated.rs b/tests/ui/lint/must_not_suspend/gated.rs index b73a76555296f..fe8192b0eaa7f 100644 --- a/tests/ui/lint/must_not_suspend/gated.rs +++ b/tests/ui/lint/must_not_suspend/gated.rs @@ -3,8 +3,6 @@ // edition:2018 #![deny(must_not_suspend)] //~^ WARNING unknown lint: `must_not_suspend` -//~| WARNING unknown lint: `must_not_suspend` -//~| WARNING unknown lint: `must_not_suspend` async fn other() {} diff --git a/tests/ui/lint/must_not_suspend/gated.stderr b/tests/ui/lint/must_not_suspend/gated.stderr index f0d2117d42b5c..c238c1f3351f3 100644 --- a/tests/ui/lint/must_not_suspend/gated.stderr +++ b/tests/ui/lint/must_not_suspend/gated.stderr @@ -9,27 +9,5 @@ LL | #![deny(must_not_suspend)] = help: add `#![feature(must_not_suspend)]` to the crate attributes to enable = note: `#[warn(unknown_lints)]` on by default -warning: unknown lint: `must_not_suspend` - --> $DIR/gated.rs:4:1 - | -LL | #![deny(must_not_suspend)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `must_not_suspend` lint is unstable - = note: see issue #83310 for more information - = help: add `#![feature(must_not_suspend)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `must_not_suspend` - --> $DIR/gated.rs:4:1 - | -LL | #![deny(must_not_suspend)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `must_not_suspend` lint is unstable - = note: see issue #83310 for more information - = help: add `#![feature(must_not_suspend)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: 3 warnings emitted +warning: 1 warning emitted diff --git a/tests/ui/lint/unused/issue-119383-if-let-guard.rs b/tests/ui/lint/unused/issue-119383-if-let-guard.rs new file mode 100644 index 0000000000000..71197444f45c3 --- /dev/null +++ b/tests/ui/lint/unused/issue-119383-if-let-guard.rs @@ -0,0 +1,9 @@ +#![feature(if_let_guard)] +#![deny(unused_variables)] + +fn main() { + match () { + () if let Some(b) = Some(()) => {} //~ ERROR unused variable: `b` + _ => {} + } +} diff --git a/tests/ui/lint/unused/issue-119383-if-let-guard.stderr b/tests/ui/lint/unused/issue-119383-if-let-guard.stderr new file mode 100644 index 0000000000000..5bf48bb80a8f6 --- /dev/null +++ b/tests/ui/lint/unused/issue-119383-if-let-guard.stderr @@ -0,0 +1,14 @@ +error: unused variable: `b` + --> $DIR/issue-119383-if-let-guard.rs:6:24 + | +LL | () if let Some(b) = Some(()) => {} + | ^ help: if this is intentional, prefix it with an underscore: `_b` + | +note: the lint level is defined here + --> $DIR/issue-119383-if-let-guard.rs:2:9 + | +LL | #![deny(unused_variables)] + | ^^^^^^^^^^^^^^^^ + +error: aborting due to 1 previous error + diff --git a/tests/ui/lint/unused_parens_json_suggestion.fixed b/tests/ui/lint/unused_parens_json_suggestion.fixed index c400454046748..b73197ef1bd0b 100644 --- a/tests/ui/lint/unused_parens_json_suggestion.fixed +++ b/tests/ui/lint/unused_parens_json_suggestion.fixed @@ -1,4 +1,5 @@ // compile-flags: --error-format json +// error-pattern:unnecessary parentheses // run-rustfix // The output for humans should just highlight the whole span without showing @@ -13,7 +14,7 @@ fn main() { // We want to suggest the properly-balanced expression `1 / (2 + 3)`, not // the malformed `1 / (2 + 3` - let _a = 1 / (2 + 3); //~ERROR unnecessary parentheses + let _a = 1 / (2 + 3); f(); } diff --git a/tests/ui/lint/unused_parens_json_suggestion.rs b/tests/ui/lint/unused_parens_json_suggestion.rs index 962c8bdd7d789..4339655cf9d51 100644 --- a/tests/ui/lint/unused_parens_json_suggestion.rs +++ b/tests/ui/lint/unused_parens_json_suggestion.rs @@ -1,4 +1,5 @@ // compile-flags: --error-format json +// error-pattern:unnecessary parentheses // run-rustfix // The output for humans should just highlight the whole span without showing @@ -13,7 +14,7 @@ fn main() { // We want to suggest the properly-balanced expression `1 / (2 + 3)`, not // the malformed `1 / (2 + 3` - let _a = (1 / (2 + 3)); //~ERROR unnecessary parentheses + let _a = (1 / (2 + 3)); f(); } diff --git a/tests/ui/lint/unused_parens_json_suggestion.stderr b/tests/ui/lint/unused_parens_json_suggestion.stderr index 4bdfee9159b2d..88f6be4236b2b 100644 --- a/tests/ui/lint/unused_parens_json_suggestion.stderr +++ b/tests/ui/lint/unused_parens_json_suggestion.stderr @@ -1,11 +1,11 @@ -{"$message_type":"diagnostic","message":"unnecessary parentheses around assigned value","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_json_suggestion.rs","byte_start":577,"byte_end":578,"line_start":16,"line_end":16,"column_start":14,"column_end":15,"is_primary":true,"text":[{"text":" let _a = (1 / (2 + 3)); - --> $DIR/unused_parens_json_suggestion.rs:16:14 +{"$message_type":"diagnostic","message":"unnecessary parentheses around assigned value","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_json_suggestion.rs","byte_start":618,"byte_end":619,"line_start":17,"line_end":17,"column_start":14,"column_end":15,"is_primary":true,"text":[{"text":" let _a = (1 / (2 + 3));","highlight_start":14,"highlight_end":15}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/unused_parens_json_suggestion.rs","byte_start":630,"byte_end":631,"line_start":17,"line_end":17,"column_start":26,"column_end":27,"is_primary":true,"text":[{"text":" let _a = (1 / (2 + 3));","highlight_start":26,"highlight_end":27}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[{"message":"the lint level is defined here","code":null,"level":"note","spans":[{"file_name":"$DIR/unused_parens_json_suggestion.rs","byte_start":436,"byte_end":449,"line_start":11,"line_end":11,"column_start":9,"column_end":22,"is_primary":true,"text":[{"text":"#![deny(unused_parens)]","highlight_start":9,"highlight_end":22}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[],"rendered":null},{"message":"remove these parentheses","code":null,"level":"help","spans":[{"file_name":"$DIR/unused_parens_json_suggestion.rs","byte_start":618,"byte_end":619,"line_start":17,"line_end":17,"column_start":14,"column_end":15,"is_primary":true,"text":[{"text":" let _a = (1 / (2 + 3));","highlight_start":14,"highlight_end":15}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null},{"file_name":"$DIR/unused_parens_json_suggestion.rs","byte_start":630,"byte_end":631,"line_start":17,"line_end":17,"column_start":26,"column_end":27,"is_primary":true,"text":[{"text":" let _a = (1 / (2 + 3));","highlight_start":26,"highlight_end":27}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null}],"children":[],"rendered":null}],"rendered":"error: unnecessary parentheses around assigned value + --> $DIR/unused_parens_json_suggestion.rs:17:14 | LL | let _a = (1 / (2 + 3)); | ^ ^ | note: the lint level is defined here - --> $DIR/unused_parens_json_suggestion.rs:10:9 + --> $DIR/unused_parens_json_suggestion.rs:11:9 | LL | #![deny(unused_parens)] | ^^^^^^^^^^^^^ diff --git a/tests/ui/lint/unused_parens_remove_json_suggestion.fixed b/tests/ui/lint/unused_parens_remove_json_suggestion.fixed index 8a57cd57385f3..39d7a1127b642 100644 --- a/tests/ui/lint/unused_parens_remove_json_suggestion.fixed +++ b/tests/ui/lint/unused_parens_remove_json_suggestion.fixed @@ -1,4 +1,5 @@ // compile-flags: --error-format json +// error-pattern:unnecessary parentheses // run-rustfix // The output for humans should just highlight the whole span without showing @@ -14,7 +15,7 @@ fn main() { let _b = false; - if _b { //~ ERROR unnecessary parentheses + if _b { println!("hello"); } @@ -25,29 +26,29 @@ fn main() { fn f() -> bool { let c = false; - if c { //~ ERROR unnecessary parentheses + if c { println!("next"); } - if c { //~ ERROR unnecessary parentheses + if c { println!("prev"); } while false && true { - if c { //~ ERROR unnecessary parentheses + if c { println!("norm"); } } - while true && false { //~ ERROR unnecessary parentheses - for _ in 0 .. 3 { //~ ERROR unnecessary parentheses + while true && false { + for _ in 0 .. 3 { println!("e~") } } - for _ in 0 .. 3 { //~ ERROR unnecessary parentheses - while true && false { //~ ERROR unnecessary parentheses + for _ in 0 .. 3 { + while true && false { println!("e~") } } diff --git a/tests/ui/lint/unused_parens_remove_json_suggestion.rs b/tests/ui/lint/unused_parens_remove_json_suggestion.rs index 952332d54e9c1..2748bd3f73df6 100644 --- a/tests/ui/lint/unused_parens_remove_json_suggestion.rs +++ b/tests/ui/lint/unused_parens_remove_json_suggestion.rs @@ -1,4 +1,5 @@ // compile-flags: --error-format json +// error-pattern:unnecessary parentheses // run-rustfix // The output for humans should just highlight the whole span without showing @@ -14,7 +15,7 @@ fn main() { let _b = false; - if (_b) { //~ ERROR unnecessary parentheses + if (_b) { println!("hello"); } @@ -25,29 +26,29 @@ fn main() { fn f() -> bool { let c = false; - if(c) { //~ ERROR unnecessary parentheses + if(c) { println!("next"); } - if (c){ //~ ERROR unnecessary parentheses + if (c){ println!("prev"); } while (false && true){ - if (c) { //~ ERROR unnecessary parentheses + if (c) { println!("norm"); } } - while(true && false) { //~ ERROR unnecessary parentheses - for _ in (0 .. 3){ //~ ERROR unnecessary parentheses + while(true && false) { + for _ in (0 .. 3){ println!("e~") } } - for _ in (0 .. 3) { //~ ERROR unnecessary parentheses - while (true && false) { //~ ERROR unnecessary parentheses + for _ in (0 .. 3) { + while (true && false) { println!("e~") } } diff --git a/tests/ui/lint/unused_parens_remove_json_suggestion.stderr b/tests/ui/lint/unused_parens_remove_json_suggestion.stderr index 7521d41cc939d..80371c1594f71 100644 --- a/tests/ui/lint/unused_parens_remove_json_suggestion.stderr +++ b/tests/ui/lint/unused_parens_remove_json_suggestion.stderr @@ -1,11 +1,11 @@ -{"$message_type":"diagnostic","message":"unnecessary parentheses around `if` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":481,"byte_end":482,"line_start":17,"line_end":17,"column_start":8,"column_end":9,"is_primary":true,"text":[{"text":" if (_b) { - --> $DIR/unused_parens_remove_json_suggestion.rs:17:8 +{"$message_type":"diagnostic","message":"unnecessary parentheses around `if` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":522,"byte_end":523,"line_start":18,"line_end":18,"column_start":8,"column_end":9,"is_primary":true,"text":[{"text":" if (_b) {","highlight_start":8,"highlight_end":9}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":525,"byte_end":526,"line_start":18,"line_end":18,"column_start":11,"column_end":12,"is_primary":true,"text":[{"text":" if (_b) {","highlight_start":11,"highlight_end":12}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[{"message":"the lint level is defined here","code":null,"level":"note","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":436,"byte_end":449,"line_start":11,"line_end":11,"column_start":9,"column_end":22,"is_primary":true,"text":[{"text":"#![deny(unused_parens)]","highlight_start":9,"highlight_end":22}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[],"rendered":null},{"message":"remove these parentheses","code":null,"level":"help","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":522,"byte_end":523,"line_start":18,"line_end":18,"column_start":8,"column_end":9,"is_primary":true,"text":[{"text":" if (_b) {","highlight_start":8,"highlight_end":9}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":525,"byte_end":526,"line_start":18,"line_end":18,"column_start":11,"column_end":12,"is_primary":true,"text":[{"text":" if (_b) {","highlight_start":11,"highlight_end":12}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null}],"children":[],"rendered":null}],"rendered":"error: unnecessary parentheses around `if` condition + --> $DIR/unused_parens_remove_json_suggestion.rs:18:8 | LL | if (_b) { | ^ ^ | note: the lint level is defined here - --> $DIR/unused_parens_remove_json_suggestion.rs:10:9 + --> $DIR/unused_parens_remove_json_suggestion.rs:11:9 | LL | #![deny(unused_parens)] | ^^^^^^^^^^^^^ @@ -16,8 +16,8 @@ LL + if _b { | "} -{"$message_type":"diagnostic","message":"unnecessary parentheses around `if` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":612,"byte_end":613,"line_start":28,"line_end":28,"column_start":7,"column_end":8,"is_primary":true,"text":[{"text":" if(c) { - --> $DIR/unused_parens_remove_json_suggestion.rs:28:7 +{"$message_type":"diagnostic","message":"unnecessary parentheses around `if` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":619,"byte_end":620,"line_start":29,"line_end":29,"column_start":7,"column_end":8,"is_primary":true,"text":[{"text":" if(c) {","highlight_start":7,"highlight_end":8}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":621,"byte_end":622,"line_start":29,"line_end":29,"column_start":9,"column_end":10,"is_primary":true,"text":[{"text":" if(c) {","highlight_start":9,"highlight_end":10}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[{"message":"remove these parentheses","code":null,"level":"help","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":619,"byte_end":620,"line_start":29,"line_end":29,"column_start":7,"column_end":8,"is_primary":true,"text":[{"text":" if(c) {","highlight_start":7,"highlight_end":8}],"label":null,"suggested_replacement":" ","suggestion_applicability":"MachineApplicable","expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":621,"byte_end":622,"line_start":29,"line_end":29,"column_start":9,"column_end":10,"is_primary":true,"text":[{"text":" if(c) {","highlight_start":9,"highlight_end":10}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null}],"children":[],"rendered":null}],"rendered":"error: unnecessary parentheses around `if` condition + --> $DIR/unused_parens_remove_json_suggestion.rs:29:7 | LL | if(c) { | ^ ^ @@ -29,8 +29,8 @@ LL + if c { | "} -{"$message_type":"diagnostic","message":"unnecessary parentheses around `if` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":692,"byte_end":693,"line_start":32,"line_end":32,"column_start":8,"column_end":9,"is_primary":true,"text":[{"text":" if (c){ - --> $DIR/unused_parens_remove_json_suggestion.rs:32:8 +{"$message_type":"diagnostic","message":"unnecessary parentheses around `if` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":665,"byte_end":666,"line_start":33,"line_end":33,"column_start":8,"column_end":9,"is_primary":true,"text":[{"text":" if (c){","highlight_start":8,"highlight_end":9}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":667,"byte_end":668,"line_start":33,"line_end":33,"column_start":10,"column_end":11,"is_primary":true,"text":[{"text":" if (c){","highlight_start":10,"highlight_end":11}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[{"message":"remove these parentheses","code":null,"level":"help","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":665,"byte_end":666,"line_start":33,"line_end":33,"column_start":8,"column_end":9,"is_primary":true,"text":[{"text":" if (c){","highlight_start":8,"highlight_end":9}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":667,"byte_end":668,"line_start":33,"line_end":33,"column_start":10,"column_end":11,"is_primary":true,"text":[{"text":" if (c){","highlight_start":10,"highlight_end":11}],"label":null,"suggested_replacement":" ","suggestion_applicability":"MachineApplicable","expansion":null}],"children":[],"rendered":null}],"rendered":"error: unnecessary parentheses around `if` condition + --> $DIR/unused_parens_remove_json_suggestion.rs:33:8 | LL | if (c){ | ^ ^ @@ -42,8 +42,8 @@ LL + if c { | "} -{"$message_type":"diagnostic","message":"unnecessary parentheses around `while` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":774,"byte_end":775,"line_start":36,"line_end":36,"column_start":11,"column_end":12,"is_primary":true,"text":[{"text":" while (false && true){","highlight_start":11,"highlight_end":12}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":788,"byte_end":789,"line_start":36,"line_end":36,"column_start":25,"column_end":26,"is_primary":true,"text":[{"text":" while (false && true){","highlight_start":25,"highlight_end":26}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[{"message":"remove these parentheses","code":null,"level":"help","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":774,"byte_end":775,"line_start":36,"line_end":36,"column_start":11,"column_end":12,"is_primary":true,"text":[{"text":" while (false && true){","highlight_start":11,"highlight_end":12}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":788,"byte_end":789,"line_start":36,"line_end":36,"column_start":25,"column_end":26,"is_primary":true,"text":[{"text":" while (false && true){","highlight_start":25,"highlight_end":26}],"label":null,"suggested_replacement":" ","suggestion_applicability":"MachineApplicable","expansion":null}],"children":[],"rendered":null}],"rendered":"error: unnecessary parentheses around `while` condition - --> $DIR/unused_parens_remove_json_suggestion.rs:36:11 +{"$message_type":"diagnostic","message":"unnecessary parentheses around `while` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":713,"byte_end":714,"line_start":37,"line_end":37,"column_start":11,"column_end":12,"is_primary":true,"text":[{"text":" while (false && true){","highlight_start":11,"highlight_end":12}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":727,"byte_end":728,"line_start":37,"line_end":37,"column_start":25,"column_end":26,"is_primary":true,"text":[{"text":" while (false && true){","highlight_start":25,"highlight_end":26}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[{"message":"remove these parentheses","code":null,"level":"help","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":713,"byte_end":714,"line_start":37,"line_end":37,"column_start":11,"column_end":12,"is_primary":true,"text":[{"text":" while (false && true){","highlight_start":11,"highlight_end":12}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":727,"byte_end":728,"line_start":37,"line_end":37,"column_start":25,"column_end":26,"is_primary":true,"text":[{"text":" while (false && true){","highlight_start":25,"highlight_end":26}],"label":null,"suggested_replacement":" ","suggestion_applicability":"MachineApplicable","expansion":null}],"children":[],"rendered":null}],"rendered":"error: unnecessary parentheses around `while` condition + --> $DIR/unused_parens_remove_json_suggestion.rs:37:11 | LL | while (false && true){ | ^ ^ @@ -55,8 +55,8 @@ LL + while false && true { | "} -{"$message_type":"diagnostic","message":"unnecessary parentheses around `if` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":802,"byte_end":803,"line_start":37,"line_end":37,"column_start":12,"column_end":13,"is_primary":true,"text":[{"text":" if (c) { - --> $DIR/unused_parens_remove_json_suggestion.rs:37:12 +{"$message_type":"diagnostic","message":"unnecessary parentheses around `if` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":741,"byte_end":742,"line_start":38,"line_end":38,"column_start":12,"column_end":13,"is_primary":true,"text":[{"text":" if (c) {","highlight_start":12,"highlight_end":13}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":743,"byte_end":744,"line_start":38,"line_end":38,"column_start":14,"column_end":15,"is_primary":true,"text":[{"text":" if (c) {","highlight_start":14,"highlight_end":15}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[{"message":"remove these parentheses","code":null,"level":"help","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":741,"byte_end":742,"line_start":38,"line_end":38,"column_start":12,"column_end":13,"is_primary":true,"text":[{"text":" if (c) {","highlight_start":12,"highlight_end":13}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":743,"byte_end":744,"line_start":38,"line_end":38,"column_start":14,"column_end":15,"is_primary":true,"text":[{"text":" if (c) {","highlight_start":14,"highlight_end":15}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null}],"children":[],"rendered":null}],"rendered":"error: unnecessary parentheses around `if` condition + --> $DIR/unused_parens_remove_json_suggestion.rs:38:12 | LL | if (c) { | ^ ^ @@ -68,8 +68,8 @@ LL + if c { | "} -{"$message_type":"diagnostic","message":"unnecessary parentheses around `while` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":899,"byte_end":900,"line_start":43,"line_end":43,"column_start":10,"column_end":11,"is_primary":true,"text":[{"text":" while(true && false) { - --> $DIR/unused_parens_remove_json_suggestion.rs:43:10 +{"$message_type":"diagnostic","message":"unnecessary parentheses around `while` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":804,"byte_end":805,"line_start":44,"line_end":44,"column_start":10,"column_end":11,"is_primary":true,"text":[{"text":" while(true && false) {","highlight_start":10,"highlight_end":11}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":818,"byte_end":819,"line_start":44,"line_end":44,"column_start":24,"column_end":25,"is_primary":true,"text":[{"text":" while(true && false) {","highlight_start":24,"highlight_end":25}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[{"message":"remove these parentheses","code":null,"level":"help","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":804,"byte_end":805,"line_start":44,"line_end":44,"column_start":10,"column_end":11,"is_primary":true,"text":[{"text":" while(true && false) {","highlight_start":10,"highlight_end":11}],"label":null,"suggested_replacement":" ","suggestion_applicability":"MachineApplicable","expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":818,"byte_end":819,"line_start":44,"line_end":44,"column_start":24,"column_end":25,"is_primary":true,"text":[{"text":" while(true && false) {","highlight_start":24,"highlight_end":25}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null}],"children":[],"rendered":null}],"rendered":"error: unnecessary parentheses around `while` condition + --> $DIR/unused_parens_remove_json_suggestion.rs:44:10 | LL | while(true && false) { | ^ ^ @@ -81,8 +81,8 @@ LL + while true && false { | "} -{"$message_type":"diagnostic","message":"unnecessary parentheses around `for` iterator expression","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":968,"byte_end":969,"line_start":44,"line_end":44,"column_start":18,"column_end":19,"is_primary":true,"text":[{"text":" for _ in (0 .. 3){ - --> $DIR/unused_parens_remove_json_suggestion.rs:44:18 +{"$message_type":"diagnostic","message":"unnecessary parentheses around `for` iterator expression","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":839,"byte_end":840,"line_start":45,"line_end":45,"column_start":18,"column_end":19,"is_primary":true,"text":[{"text":" for _ in (0 .. 3){","highlight_start":18,"highlight_end":19}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":846,"byte_end":847,"line_start":45,"line_end":45,"column_start":25,"column_end":26,"is_primary":true,"text":[{"text":" for _ in (0 .. 3){","highlight_start":25,"highlight_end":26}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[{"message":"remove these parentheses","code":null,"level":"help","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":839,"byte_end":840,"line_start":45,"line_end":45,"column_start":18,"column_end":19,"is_primary":true,"text":[{"text":" for _ in (0 .. 3){","highlight_start":18,"highlight_end":19}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":846,"byte_end":847,"line_start":45,"line_end":45,"column_start":25,"column_end":26,"is_primary":true,"text":[{"text":" for _ in (0 .. 3){","highlight_start":25,"highlight_end":26}],"label":null,"suggested_replacement":" ","suggestion_applicability":"MachineApplicable","expansion":null}],"children":[],"rendered":null}],"rendered":"error: unnecessary parentheses around `for` iterator expression + --> $DIR/unused_parens_remove_json_suggestion.rs:45:18 | LL | for _ in (0 .. 3){ | ^ ^ @@ -94,8 +94,8 @@ LL + for _ in 0 .. 3 { | "} -{"$message_type":"diagnostic","message":"unnecessary parentheses around `for` iterator expression","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":1069,"byte_end":1070,"line_start":49,"line_end":49,"column_start":14,"column_end":15,"is_primary":true,"text":[{"text":" for _ in (0 .. 3) { - --> $DIR/unused_parens_remove_json_suggestion.rs:49:14 +{"$message_type":"diagnostic","message":"unnecessary parentheses around `for` iterator expression","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":906,"byte_end":907,"line_start":50,"line_end":50,"column_start":14,"column_end":15,"is_primary":true,"text":[{"text":" for _ in (0 .. 3) {","highlight_start":14,"highlight_end":15}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":913,"byte_end":914,"line_start":50,"line_end":50,"column_start":21,"column_end":22,"is_primary":true,"text":[{"text":" for _ in (0 .. 3) {","highlight_start":21,"highlight_end":22}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[{"message":"remove these parentheses","code":null,"level":"help","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":906,"byte_end":907,"line_start":50,"line_end":50,"column_start":14,"column_end":15,"is_primary":true,"text":[{"text":" for _ in (0 .. 3) {","highlight_start":14,"highlight_end":15}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":913,"byte_end":914,"line_start":50,"line_end":50,"column_start":21,"column_end":22,"is_primary":true,"text":[{"text":" for _ in (0 .. 3) {","highlight_start":21,"highlight_end":22}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null}],"children":[],"rendered":null}],"rendered":"error: unnecessary parentheses around `for` iterator expression + --> $DIR/unused_parens_remove_json_suggestion.rs:50:14 | LL | for _ in (0 .. 3) { | ^ ^ @@ -107,8 +107,8 @@ LL + for _ in 0 .. 3 { | "} -{"$message_type":"diagnostic","message":"unnecessary parentheses around `while` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":1128,"byte_end":1129,"line_start":50,"line_end":50,"column_start":15,"column_end":16,"is_primary":true,"text":[{"text":" while (true && false) { - --> $DIR/unused_parens_remove_json_suggestion.rs:50:15 +{"$message_type":"diagnostic","message":"unnecessary parentheses around `while` condition","code":{"code":"unused_parens","explanation":null},"level":"error","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":931,"byte_end":932,"line_start":51,"line_end":51,"column_start":15,"column_end":16,"is_primary":true,"text":[{"text":" while (true && false) {","highlight_start":15,"highlight_end":16}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":945,"byte_end":946,"line_start":51,"line_end":51,"column_start":29,"column_end":30,"is_primary":true,"text":[{"text":" while (true && false) {","highlight_start":29,"highlight_end":30}],"label":null,"suggested_replacement":null,"suggestion_applicability":null,"expansion":null}],"children":[{"message":"remove these parentheses","code":null,"level":"help","spans":[{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":931,"byte_end":932,"line_start":51,"line_end":51,"column_start":15,"column_end":16,"is_primary":true,"text":[{"text":" while (true && false) {","highlight_start":15,"highlight_end":16}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null},{"file_name":"$DIR/unused_parens_remove_json_suggestion.rs","byte_start":945,"byte_end":946,"line_start":51,"line_end":51,"column_start":29,"column_end":30,"is_primary":true,"text":[{"text":" while (true && false) {","highlight_start":29,"highlight_end":30}],"label":null,"suggested_replacement":"","suggestion_applicability":"MachineApplicable","expansion":null}],"children":[],"rendered":null}],"rendered":"error: unnecessary parentheses around `while` condition + --> $DIR/unused_parens_remove_json_suggestion.rs:51:15 | LL | while (true && false) { | ^ ^ diff --git a/tests/ui/macros/issue-118786.stderr b/tests/ui/macros/issue-118786.stderr index ca3a40f31c1f5..1a8ac9340daa8 100644 --- a/tests/ui/macros/issue-118786.stderr +++ b/tests/ui/macros/issue-118786.stderr @@ -6,8 +6,8 @@ LL | macro_rules! $macro_name { | help: change the delimiters to curly braces | -LL | macro_rules! {} { - | ~ + +LL | macro_rules! {$macro_name} { + | + + help: add a semicolon | LL | macro_rules! $macro_name; { diff --git a/tests/ui/macros/stringify.rs b/tests/ui/macros/stringify.rs index 6fc12509aad6c..192e6e0cc985d 100644 --- a/tests/ui/macros/stringify.rs +++ b/tests/ui/macros/stringify.rs @@ -204,6 +204,16 @@ fn test_expr() { } ], "match self { Ok => 1, Err => 0, }" ); + macro_rules! c2_match_arm { + ([ $expr:expr ], $expr_expected:expr, $tokens_expected:expr $(,)?) => { + c2!(expr, [ match () { _ => $expr } ], $expr_expected, $tokens_expected); + }; + } + c2_match_arm!( + [ { 1 } - 1 ], + "match () { _ => ({ 1 }) - 1, }", + "match() { _ => { 1 } - 1 }", + ); // ExprKind::Closure c1!(expr, [ || {} ], "|| {}"); @@ -651,6 +661,16 @@ fn test_stmt() { "let (a, b): (u32, u32) = (1, 2);", "let(a, b): (u32, u32) = (1, 2)" // FIXME ); + macro_rules! c2_let_expr_minus_one { + ([ $expr:expr ], $stmt_expected:expr, $tokens_expected:expr $(,)?) => { + c2!(stmt, [ let _ = $expr - 1 ], $stmt_expected, $tokens_expected); + }; + } + c2_let_expr_minus_one!( + [ match void {} ], + "let _ = match void {} - 1;", + "let _ = match void {} - 1", + ); // StmtKind::Item c1!(stmt, [ struct S; ], "struct S;"); @@ -661,6 +681,46 @@ fn test_stmt() { // StmtKind::Semi c2!(stmt, [ 1 + 1 ], "1 + 1;", "1 + 1"); + macro_rules! c2_expr_as_stmt { + // Parse as expr, then reparse as stmt. + // + // The c2_minus_one macro below can't directly call `c2!(stmt, ...)` + // because `$expr - 1` cannot be parsed directly as a stmt. A statement + // boundary occurs after the `match void {}`, after which the `-` token + // hits "no rules expected this token in macro call". + // + // The unwanted statement boundary is exactly why the pretty-printer is + // injecting parentheses around the subexpression, which is the behavior + // we are interested in testing. + ([ $expr:expr ], $stmt_expected:expr, $tokens_expected:expr $(,)?) => { + c2!(stmt, [ $expr ], $stmt_expected, $tokens_expected); + }; + } + macro_rules! c2_minus_one { + ([ $expr:expr ], $stmt_expected:expr, $tokens_expected:expr $(,)?) => { + c2_expr_as_stmt!([ $expr - 1 ], $stmt_expected, $tokens_expected); + }; + } + c2_minus_one!( + [ match void {} ], + "(match void {}) - 1;", + "match void {} - 1", + ); + c2_minus_one!( + [ match void {}() ], + "(match void {})() - 1;", + "match void {}() - 1", + ); + c2_minus_one!( + [ match void {}[0] ], + "(match void {})[0] - 1;", + "match void {}[0] - 1", + ); + c2_minus_one!( + [ loop { break 1; } ], + "(loop { break 1; }) - 1;", + "loop { break 1; } - 1", + ); // StmtKind::Empty c1!(stmt, [ ; ], ";"); diff --git a/tests/ui/methods/disambiguate-associated-function-first-arg.rs b/tests/ui/methods/disambiguate-associated-function-first-arg.rs new file mode 100644 index 0000000000000..4c8192fc14bd3 --- /dev/null +++ b/tests/ui/methods/disambiguate-associated-function-first-arg.rs @@ -0,0 +1,49 @@ +struct A {} + +fn main() { + let _a = A {}; + _a.new(1); + //~^ ERROR no method named `new` found for struct `A` in the current scope +} + +trait M { + fn new(_a: i32); +} +impl M for A { + fn new(_a: i32) {} +} + +trait N { + fn new(_a: Self, _b: i32); +} +impl N for A { + fn new(_a: Self, _b: i32) {} +} + +trait O { + fn new(_a: Self, _b: i32); +} +impl O for A { + fn new(_a: A, _b: i32) {} +} + +struct S; + +trait TraitA { + fn f(self); +} +trait TraitB { + fn f(self); +} + +impl TraitA for T { + fn f(self) {} +} +impl TraitB for T { + fn f(self) {} +} + +fn test() { + S.f(); + //~^ multiple applicable items in scope +} diff --git a/tests/ui/methods/disambiguate-associated-function-first-arg.stderr b/tests/ui/methods/disambiguate-associated-function-first-arg.stderr new file mode 100644 index 0000000000000..341b7a9100329 --- /dev/null +++ b/tests/ui/methods/disambiguate-associated-function-first-arg.stderr @@ -0,0 +1,67 @@ +error[E0599]: no method named `new` found for struct `A` in the current scope + --> $DIR/disambiguate-associated-function-first-arg.rs:5:8 + | +LL | struct A {} + | -------- method `new` not found for this struct +... +LL | _a.new(1); + | ^^^ this is an associated function, not a method + | + = note: found the following associated functions; to be used as methods, functions must have a `self` parameter +note: candidate #1 is defined in the trait `M` + --> $DIR/disambiguate-associated-function-first-arg.rs:10:5 + | +LL | fn new(_a: i32); + | ^^^^^^^^^^^^^^^^ +note: candidate #2 is defined in the trait `N` + --> $DIR/disambiguate-associated-function-first-arg.rs:17:5 + | +LL | fn new(_a: Self, _b: i32); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +note: candidate #3 is defined in the trait `O` + --> $DIR/disambiguate-associated-function-first-arg.rs:24:5 + | +LL | fn new(_a: Self, _b: i32); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +help: disambiguate the associated function for candidate #1 + | +LL | ::new(1); + | ~~~~~~~~~~~~~~~~ +help: disambiguate the associated function for candidate #2 + | +LL | ::new(_a, 1); + | ~~~~~~~~~~~~~~~~~~~~ +help: disambiguate the associated function for candidate #3 + | +LL | ::new(_a, 1); + | ~~~~~~~~~~~~~~~~~~~~ + +error[E0034]: multiple applicable items in scope + --> $DIR/disambiguate-associated-function-first-arg.rs:47:7 + | +LL | S.f(); + | ^ multiple `f` found + | +note: candidate #1 is defined in an impl of the trait `TraitA` for the type `T` + --> $DIR/disambiguate-associated-function-first-arg.rs:40:5 + | +LL | fn f(self) {} + | ^^^^^^^^^^ +note: candidate #2 is defined in an impl of the trait `TraitB` for the type `T` + --> $DIR/disambiguate-associated-function-first-arg.rs:43:5 + | +LL | fn f(self) {} + | ^^^^^^^^^^ +help: disambiguate the method for candidate #1 + | +LL | TraitA::f(S); + | ~~~~~~~~~~~~ +help: disambiguate the method for candidate #2 + | +LL | TraitB::f(S); + | ~~~~~~~~~~~~ + +error: aborting due to 2 previous errors + +Some errors have detailed explanations: E0034, E0599. +For more information about an error, try `rustc --explain E0034`. diff --git a/tests/ui/methods/method-ambiguity-no-rcvr.stderr b/tests/ui/methods/method-ambiguity-no-rcvr.stderr index 73f6043f256ea..3b6eb07393ac8 100644 --- a/tests/ui/methods/method-ambiguity-no-rcvr.stderr +++ b/tests/ui/methods/method-ambiguity-no-rcvr.stderr @@ -20,12 +20,12 @@ LL | fn foo() {} | ^^^^^^^^ help: disambiguate the associated function for candidate #1 | -LL | ::foo(Qux); - | ~~~~~~~~~~~~~~~~~~~~~~ +LL | ::foo(); + | ~~~~~~~~~~~~~~~~~~~ help: disambiguate the associated function for candidate #2 | -LL | ::foo(Qux); - | ~~~~~~~~~~~~~~~~~~~~~~~~~ +LL | ::foo(); + | ~~~~~~~~~~~~~~~~~~~~~~ error: aborting due to 1 previous error diff --git a/tests/ui/mir/lint/storage-live.stderr b/tests/ui/mir/lint/storage-live.stderr index 1037ddc88ef65..02156dd858db2 100644 --- a/tests/ui/mir/lint/storage-live.stderr +++ b/tests/ui/mir/lint/storage-live.stderr @@ -1,4 +1,4 @@ -error: internal compiler error: broken MIR in Item(DefId(0:8 ~ storage_live[HASH]::multiple_storage)) (before pass CheckPackedRef) at bb0[1]: +error: internal compiler error: broken MIR in Item(DefId(0:8 ~ storage_live[HASH]::multiple_storage)) (after pass CheckPackedRef) at bb0[1]: StorageLive(_1) which already has storage here --> $DIR/storage-live.rs:22:13 | diff --git a/tests/ui/mir/ssa_call_ret.rs b/tests/ui/mir/ssa_call_ret.rs index 6132a6691dea3..f8a83249225c2 100644 --- a/tests/ui/mir/ssa_call_ret.rs +++ b/tests/ui/mir/ssa_call_ret.rs @@ -13,7 +13,7 @@ pub fn f() -> u32 { mir!( let a: u32; { - Call(a = g(), bb1, UnwindCleanup(bb2)) + Call(a = g(), ReturnTo(bb1), UnwindCleanup(bb2)) } bb1 = { RET = a; diff --git a/tests/ui/mir/validate/critical-edge.rs b/tests/ui/mir/validate/critical-edge.rs index 9ef655cd1bb4a..3bb732ad3f776 100644 --- a/tests/ui/mir/validate/critical-edge.rs +++ b/tests/ui/mir/validate/critical-edge.rs @@ -20,7 +20,7 @@ pub fn f(a: u32) -> u32 { } } bb1 = { - Call(RET = f(1), bb2, UnwindTerminate(ReasonAbi)) + Call(RET = f(1), ReturnTo(bb2), UnwindTerminate(ReasonAbi)) } bb2 = { diff --git a/tests/ui/mir/validate/noncleanup-cleanup.rs b/tests/ui/mir/validate/noncleanup-cleanup.rs index 0a1c4528aa6d9..a14ab44257fa1 100644 --- a/tests/ui/mir/validate/noncleanup-cleanup.rs +++ b/tests/ui/mir/validate/noncleanup-cleanup.rs @@ -11,7 +11,7 @@ use core::intrinsics::mir::*; pub fn main() { mir!( { - Call(RET = main(), block, UnwindCleanup(block)) + Call(RET = main(), ReturnTo(block), UnwindCleanup(block)) } block = { Return() diff --git a/tests/ui/mismatched_types/const-fn-in-trait.stderr b/tests/ui/mismatched_types/const-fn-in-trait.stderr index 7d1fbe45c5302..06976933b2f94 100644 --- a/tests/ui/mismatched_types/const-fn-in-trait.stderr +++ b/tests/ui/mismatched_types/const-fn-in-trait.stderr @@ -2,13 +2,19 @@ error[E0379]: functions in traits cannot be declared const --> $DIR/const-fn-in-trait.rs:3:5 | LL | const fn g(); - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in traits cannot be const + | help: remove the `const` -error[E0379]: functions in traits cannot be declared const +error[E0379]: functions in trait impls cannot be declared const --> $DIR/const-fn-in-trait.rs:7:5 | LL | const fn f() -> u32 { 22 } - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in trait impls cannot be const + | help: remove the `const` error: aborting due to 2 previous errors diff --git a/tests/ui/missing/missing-block-hint.stderr b/tests/ui/missing/missing-block-hint.stderr index 16954223a4521..18719289abdc8 100644 --- a/tests/ui/missing/missing-block-hint.stderr +++ b/tests/ui/missing/missing-block-hint.stderr @@ -9,6 +9,10 @@ note: the `if` expression is missing a block after this condition | LL | if (foo) => {} | ^^^^^ +help: you might have meant to write a "greater than or equal to" comparison + | +LL | if (foo) >= {} + | ~~ error: expected `{`, found `bar` --> $DIR/missing-block-hint.rs:7:13 diff --git a/tests/ui/nll/closure-requirements/escape-argument-callee.rs b/tests/ui/nll/closure-requirements/escape-argument-callee.rs index 3aea511b05696..d643a1b2a0d75 100644 --- a/tests/ui/nll/closure-requirements/escape-argument-callee.rs +++ b/tests/ui/nll/closure-requirements/escape-argument-callee.rs @@ -12,7 +12,7 @@ // that appear free in its type (hence, we see it before the closure's // "external requirements" report). -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/escape-argument.rs b/tests/ui/nll/closure-requirements/escape-argument.rs index 066cd436016c7..7b1e6e9c820a4 100644 --- a/tests/ui/nll/closure-requirements/escape-argument.rs +++ b/tests/ui/nll/closure-requirements/escape-argument.rs @@ -12,7 +12,7 @@ // basically checking that the MIR type checker correctly enforces the // closure signature. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/escape-upvar-nested.rs b/tests/ui/nll/closure-requirements/escape-upvar-nested.rs index 765a3cf961c59..b104bc2479e8d 100644 --- a/tests/ui/nll/closure-requirements/escape-upvar-nested.rs +++ b/tests/ui/nll/closure-requirements/escape-upvar-nested.rs @@ -5,7 +5,7 @@ // // except that the closure does so via a second closure. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/escape-upvar-ref.rs b/tests/ui/nll/closure-requirements/escape-upvar-ref.rs index 0a562a0a1bcff..97c2d7dc291fb 100644 --- a/tests/ui/nll/closure-requirements/escape-upvar-ref.rs +++ b/tests/ui/nll/closure-requirements/escape-upvar-ref.rs @@ -9,7 +9,7 @@ // `'b`. This relationship is propagated to the closure creator, // which reports an error. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/propagate-approximated-fail-no-postdom.rs b/tests/ui/nll/closure-requirements/propagate-approximated-fail-no-postdom.rs index 35a864b885138..31f537d19d295 100644 --- a/tests/ui/nll/closure-requirements/propagate-approximated-fail-no-postdom.rs +++ b/tests/ui/nll/closure-requirements/propagate-approximated-fail-no-postdom.rs @@ -1,7 +1,7 @@ // Test where we fail to approximate due to demanding a postdom // relationship between our upper bounds. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/propagate-approximated-ref.rs b/tests/ui/nll/closure-requirements/propagate-approximated-ref.rs index 7291c6e9749b2..295b9cb77551d 100644 --- a/tests/ui/nll/closure-requirements/propagate-approximated-ref.rs +++ b/tests/ui/nll/closure-requirements/propagate-approximated-ref.rs @@ -12,7 +12,7 @@ // Note: the use of `Cell` here is to introduce invariance. One less // variable. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-comparing-against-free.rs b/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-comparing-against-free.rs index afe6f10a52f08..e27a7d591e789 100644 --- a/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-comparing-against-free.rs +++ b/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-comparing-against-free.rs @@ -2,7 +2,7 @@ // where `'x` is bound in closure type but `'a` is free. This forces // us to approximate `'x` one way or the other. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-no-bound.rs b/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-no-bound.rs index 3722090754b3e..f11dc769a03dc 100644 --- a/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-no-bound.rs +++ b/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-no-bound.rs @@ -3,7 +3,7 @@ // because `'y` is higher-ranked but we know of no relations to other // regions. Note that `'static` shows up in the stderr output as `'0`. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-wrong-bound.rs b/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-wrong-bound.rs index 9898777c72749..5e5aa3a3cce74 100644 --- a/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-wrong-bound.rs +++ b/tests/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-wrong-bound.rs @@ -4,7 +4,7 @@ // relations to other regions. Note that `'static` shows up in the // stderr output as `'0`. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/propagate-approximated-val.rs b/tests/ui/nll/closure-requirements/propagate-approximated-val.rs index 5bb5eea991bb4..83cb37516deb4 100644 --- a/tests/ui/nll/closure-requirements/propagate-approximated-val.rs +++ b/tests/ui/nll/closure-requirements/propagate-approximated-val.rs @@ -5,7 +5,7 @@ // relationships. In the 'main' variant, there are a number of // anonymous regions as well. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/propagate-despite-same-free-region.rs b/tests/ui/nll/closure-requirements/propagate-despite-same-free-region.rs index 704a026d29b79..5a25e29816d74 100644 --- a/tests/ui/nll/closure-requirements/propagate-despite-same-free-region.rs +++ b/tests/ui/nll/closure-requirements/propagate-despite-same-free-region.rs @@ -3,7 +3,7 @@ // need to propagate; but in fact we do because identity of free // regions is erased. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals // check-pass #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-no-bounds.rs b/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-no-bounds.rs index dcd05d7fa2ce2..0fb57d47d2d1c 100644 --- a/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-no-bounds.rs +++ b/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-no-bounds.rs @@ -7,7 +7,7 @@ // as it knows of no relationships between `'x` and any // non-higher-ranked regions. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-wrong-bounds.rs b/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-wrong-bounds.rs index 98be92d1cd638..3bdd923543efc 100644 --- a/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-wrong-bounds.rs +++ b/tests/ui/nll/closure-requirements/propagate-fail-to-approximate-longer-wrong-bounds.rs @@ -7,7 +7,7 @@ // as it only knows of regions that `'x` is outlived by, and none that // `'x` outlives. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/closure-requirements/propagate-from-trait-match.rs b/tests/ui/nll/closure-requirements/propagate-from-trait-match.rs index cda781d8e2637..5fe2f46ee79e9 100644 --- a/tests/ui/nll/closure-requirements/propagate-from-trait-match.rs +++ b/tests/ui/nll/closure-requirements/propagate-from-trait-match.rs @@ -4,7 +4,7 @@ // the same `'a` for which it implements `Trait`, which can only be the `'a` // from the function definition. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] #![allow(dead_code)] diff --git a/tests/ui/nll/closure-requirements/region-lbr-anon-does-not-outlive-static.rs b/tests/ui/nll/closure-requirements/region-lbr-anon-does-not-outlive-static.rs index 8147da09d4391..6e7db4578a079 100644 --- a/tests/ui/nll/closure-requirements/region-lbr-anon-does-not-outlive-static.rs +++ b/tests/ui/nll/closure-requirements/region-lbr-anon-does-not-outlive-static.rs @@ -3,7 +3,7 @@ // a variety of errors from the older, AST-based machinery (notably // borrowck), and then we get the NLL error at the end. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals fn foo(x: &u32) -> &'static u32 { &*x diff --git a/tests/ui/nll/closure-requirements/region-lbr-named-does-not-outlive-static.rs b/tests/ui/nll/closure-requirements/region-lbr-named-does-not-outlive-static.rs index 4acd2fc92f3a3..c1b9e249c09c4 100644 --- a/tests/ui/nll/closure-requirements/region-lbr-named-does-not-outlive-static.rs +++ b/tests/ui/nll/closure-requirements/region-lbr-named-does-not-outlive-static.rs @@ -3,7 +3,7 @@ // a variety of errors from the older, AST-based machinery (notably // borrowck), and then we get the NLL error at the end. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals fn foo<'a>(x: &'a u32) -> &'static u32 { &*x diff --git a/tests/ui/nll/closure-requirements/region-lbr1-does-not-outlive-ebr2.rs b/tests/ui/nll/closure-requirements/region-lbr1-does-not-outlive-ebr2.rs index 06e96be80d5a5..1d31c9cb5a5d4 100644 --- a/tests/ui/nll/closure-requirements/region-lbr1-does-not-outlive-ebr2.rs +++ b/tests/ui/nll/closure-requirements/region-lbr1-does-not-outlive-ebr2.rs @@ -3,7 +3,7 @@ // a variety of errors from the older, AST-based machinery (notably // borrowck), and then we get the NLL error at the end. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals fn foo<'a, 'b>(x: &'a u32, y: &'b u32) -> &'b u32 { &*x diff --git a/tests/ui/nll/closure-requirements/region-lbr1-does-outlive-lbr2-because-implied-bound.rs b/tests/ui/nll/closure-requirements/region-lbr1-does-outlive-lbr2-because-implied-bound.rs index 014959fdbd479..4e57fef167a95 100644 --- a/tests/ui/nll/closure-requirements/region-lbr1-does-outlive-lbr2-because-implied-bound.rs +++ b/tests/ui/nll/closure-requirements/region-lbr1-does-outlive-lbr2-because-implied-bound.rs @@ -2,7 +2,7 @@ // report an error because of the (implied) bound that `'b: 'a`. // check-pass -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals fn foo<'a, 'b>(x: &'a &'b u32) -> &'a u32 { &**x diff --git a/tests/ui/nll/closure-requirements/return-wrong-bound-region.rs b/tests/ui/nll/closure-requirements/return-wrong-bound-region.rs index e34a3f6f2cbda..0277715b59073 100644 --- a/tests/ui/nll/closure-requirements/return-wrong-bound-region.rs +++ b/tests/ui/nll/closure-requirements/return-wrong-bound-region.rs @@ -2,7 +2,7 @@ // the first, but actually returns the second. This should fail within // the closure. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![feature(rustc_attrs)] diff --git a/tests/ui/nll/ty-outlives/impl-trait-captures.rs b/tests/ui/nll/ty-outlives/impl-trait-captures.rs index 67b31b8bcd4f5..faab2cf8bcbea 100644 --- a/tests/ui/nll/ty-outlives/impl-trait-captures.rs +++ b/tests/ui/nll/ty-outlives/impl-trait-captures.rs @@ -1,4 +1,4 @@ -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![allow(warnings)] diff --git a/tests/ui/nll/ty-outlives/impl-trait-outlives.rs b/tests/ui/nll/ty-outlives/impl-trait-outlives.rs index 68ccb51fcd0fb..2c2eb703a153a 100644 --- a/tests/ui/nll/ty-outlives/impl-trait-outlives.rs +++ b/tests/ui/nll/ty-outlives/impl-trait-outlives.rs @@ -1,4 +1,4 @@ -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals #![allow(warnings)] diff --git a/tests/ui/nll/ty-outlives/projection-implied-bounds.rs b/tests/ui/nll/ty-outlives/projection-implied-bounds.rs index e1dac08240906..59854fe6d8af2 100644 --- a/tests/ui/nll/ty-outlives/projection-implied-bounds.rs +++ b/tests/ui/nll/ty-outlives/projection-implied-bounds.rs @@ -1,7 +1,7 @@ // Test that we can deduce when projections like `T::Item` outlive the // function body. Test that this does not imply that `T: 'a` holds. -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals use std::cell::Cell; diff --git a/tests/ui/nll/ty-outlives/projection-no-regions-closure.rs b/tests/ui/nll/ty-outlives/projection-no-regions-closure.rs index 2d9c008c75924..f908381d4ac07 100644 --- a/tests/ui/nll/ty-outlives/projection-no-regions-closure.rs +++ b/tests/ui/nll/ty-outlives/projection-no-regions-closure.rs @@ -1,4 +1,4 @@ -// compile-flags:-Zverbose +// compile-flags:-Zverbose-internals // Tests closures that propagate an outlives relationship to their // creator where the subject is a projection with no regions (`, bar); // issue #111657 offset_of!(Lt<'_>, bar); // issue #111678 } diff --git a/tests/ui/offset-of/offset-of-arg-count.stderr b/tests/ui/offset-of/offset-of-arg-count.stderr index 4275a89545f50..af0d42de30d60 100644 --- a/tests/ui/offset-of/offset-of-arg-count.stderr +++ b/tests/ui/offset-of/offset-of-arg-count.stderr @@ -13,7 +13,7 @@ error: unexpected end of macro invocation LL | offset_of!(NotEnoughArgumentsWithAComma, ); | ^ missing tokens in macro arguments | -note: while trying to match meta-variable `$fields:tt` +note: while trying to match meta-variable `$fields:expr` --> $SRC_DIR/core/src/mem/mod.rs:LL:COL error: no rules expected the token `too` @@ -24,36 +24,29 @@ LL | offset_of!(Container, field, too many arguments); | = note: while trying to match sequence end -error: unexpected end of macro invocation +error: unexpected token: `)` --> $DIR/offset-of-arg-count.rs:11:21 | LL | offset_of!(S, f.); - | ^ missing tokens in macro arguments - | -note: while trying to match meta-variable `$fields:tt` - --> $SRC_DIR/core/src/mem/mod.rs:LL:COL + | ^ -error: expected identifier, found `,` +error: unexpected token: `,` --> $DIR/offset-of-arg-count.rs:12:21 | LL | offset_of!(S, f.,); - | ^ expected identifier + | ^ -error: no rules expected the token `..` - --> $DIR/offset-of-arg-count.rs:13:20 +error: offset_of expects dot-separated field and variant names + --> $DIR/offset-of-arg-count.rs:13:19 | LL | offset_of!(S, f..); - | ^^ no rules expected this token in macro call - | - = note: while trying to match sequence start + | ^^^ -error: no rules expected the token `..` - --> $DIR/offset-of-arg-count.rs:14:20 +error: offset_of expects dot-separated field and variant names + --> $DIR/offset-of-arg-count.rs:14:19 | LL | offset_of!(S, f..,); - | ^^ no rules expected this token in macro call - | - = note: while trying to match sequence start + | ^^^ error: aborting due to 7 previous errors diff --git a/tests/ui/offset-of/offset-of-builtin.rs b/tests/ui/offset-of/offset-of-builtin.rs index 1be9899887b4a..6664c10f905ef 100644 --- a/tests/ui/offset-of/offset-of-builtin.rs +++ b/tests/ui/offset-of/offset-of-builtin.rs @@ -8,37 +8,25 @@ fn main() { builtin # offset_of(NotEnoughArguments); //~ ERROR expected one of } fn t1() { - // Already errored upon at the macro level. Yielding an error would require - // extra effort. - builtin # offset_of(NotEnoughArgumentsWithAComma, ); + builtin # offset_of(NotEnoughArgumentsWithAComma, ); //~ ERROR expected expression } fn t2() { - builtin # offset_of(Container, field, too many arguments); //~ ERROR expected identifier, found - //~| ERROR found `,` - //~| ERROR found `many` - //~| ERROR found `arguments` + builtin # offset_of(S, f, too many arguments); //~ ERROR expected `)`, found `too` } fn t3() { builtin # offset_of(S, f); // compiles fine } fn t4() { - // Already errored upon at the macro level. Yielding an error would require - // extra effort. - builtin # offset_of(S, f); + builtin # offset_of(S, f.); //~ ERROR unexpected token } fn t5() { - builtin # offset_of(S, f.); //~ ERROR expected identifier + builtin # offset_of(S, f.,); //~ ERROR unexpected token } fn t6() { - builtin # offset_of(S, f.,); //~ ERROR expected identifier + builtin # offset_of(S, f..); //~ ERROR offset_of expects dot-separated field and variant names } fn t7() { - builtin # offset_of(S, f..); //~ ERROR expected one of -} -fn t8() { - // Already errored upon at the macro level. Yielding an error would require - // extra effort. - builtin # offset_of(S, f..,); + builtin # offset_of(S, f..,); //~ ERROR offset_of expects dot-separated field and variant names } struct S { f: u8, } diff --git a/tests/ui/offset-of/offset-of-builtin.stderr b/tests/ui/offset-of/offset-of-builtin.stderr index 1a1f33cc613a7..5917ee2936361 100644 --- a/tests/ui/offset-of/offset-of-builtin.stderr +++ b/tests/ui/offset-of/offset-of-builtin.stderr @@ -4,62 +4,43 @@ error: expected one of `!`, `(`, `+`, `,`, `::`, or `<`, found `)` LL | builtin # offset_of(NotEnoughArguments); | ^ expected one of `!`, `(`, `+`, `,`, `::`, or `<` -error: expected identifier, found `,` - --> $DIR/offset-of-builtin.rs:16:41 +error: expected expression, found `)` + --> $DIR/offset-of-builtin.rs:11:55 | -LL | builtin # offset_of(Container, field, too many arguments); - | ^ - | | - | expected identifier - | help: remove this comma +LL | builtin # offset_of(NotEnoughArgumentsWithAComma, ); + | ^ expected expression -error: expected one of `)` or `.`, found `,` - --> $DIR/offset-of-builtin.rs:16:41 +error: expected `)`, found `too` + --> $DIR/offset-of-builtin.rs:14:31 | -LL | builtin # offset_of(Container, field, too many arguments); - | ^ - | | - | expected one of `)` or `.` - | help: missing `.` - -error: expected one of `)` or `.`, found `many` - --> $DIR/offset-of-builtin.rs:16:47 - | -LL | builtin # offset_of(Container, field, too many arguments); - | -^^^^ expected one of `)` or `.` - | | - | help: missing `.` - -error: expected one of `)` or `.`, found `arguments` - --> $DIR/offset-of-builtin.rs:16:52 +LL | builtin # offset_of(S, f, too many arguments); + | ^^^ expected `)` | -LL | builtin # offset_of(Container, field, too many arguments); - | -^^^^^^^^^ expected one of `)` or `.` - | | - | help: missing `.` + = note: unexpected third argument to offset_of -error: expected identifier, found `)` - --> $DIR/offset-of-builtin.rs:30:30 +error: unexpected token: `)` + --> $DIR/offset-of-builtin.rs:20:30 | LL | builtin # offset_of(S, f.); - | ^ expected identifier + | ^ -error: expected identifier, found `,` - --> $DIR/offset-of-builtin.rs:33:30 +error: unexpected token: `,` + --> $DIR/offset-of-builtin.rs:23:30 | LL | builtin # offset_of(S, f.,); - | ^ expected identifier + | ^ -error: expected one of `)` or `.`, found `..` - --> $DIR/offset-of-builtin.rs:36:29 +error: offset_of expects dot-separated field and variant names + --> $DIR/offset-of-builtin.rs:26:28 | LL | builtin # offset_of(S, f..); - | ^^ expected one of `)` or `.` - | -help: if you meant to bind the contents of the rest of the array pattern into `f`, use `@` + | ^^^ + +error: offset_of expects dot-separated field and variant names + --> $DIR/offset-of-builtin.rs:29:28 | -LL | builtin # offset_of(S, f @ ..); - | + +LL | builtin # offset_of(S, f..,); + | ^^^ -error: aborting due to 8 previous errors +error: aborting due to 7 previous errors diff --git a/tests/ui/offset-of/offset-of-self.rs b/tests/ui/offset-of/offset-of-self.rs index dbeef0e74dc37..04dcaf7c0a6b6 100644 --- a/tests/ui/offset-of/offset-of-self.rs +++ b/tests/ui/offset-of/offset-of-self.rs @@ -17,9 +17,8 @@ impl S { offset_of!(Self, v) } fn v_offs_wrong_syntax() { - offset_of!(Self, Self::v); //~ ERROR no rules expected the token `::` - offset_of!(S, Self); //~ ERROR expected identifier, found keyword `Self` - //~| no field `Self` on type `S` + offset_of!(Self, Self::v); //~ offset_of expects dot-separated field and variant names + offset_of!(S, Self); //~ no field `Self` on type `S` } fn offs_in_c() -> usize { offset_of!(C, w) @@ -51,8 +50,6 @@ fn main() { offset_of!(self::S, v); offset_of!(Self, v); //~ ERROR cannot find type `Self` in this scope - offset_of!(S, self); //~ ERROR expected identifier, found keyword `self` - //~| no field `self` on type `S` - offset_of!(S, v.self); //~ ERROR expected identifier, found keyword `self` - //~| no field `self` on type `u8` + offset_of!(S, self); //~ no field `self` on type `S` + offset_of!(S, v.self); //~ no field `self` on type `u8` } diff --git a/tests/ui/offset-of/offset-of-self.stderr b/tests/ui/offset-of/offset-of-self.stderr index 2dc17189a702e..7c7576e066b6f 100644 --- a/tests/ui/offset-of/offset-of-self.stderr +++ b/tests/ui/offset-of/offset-of-self.stderr @@ -1,31 +1,11 @@ -error: no rules expected the token `::` - --> $DIR/offset-of-self.rs:20:30 +error: offset_of expects dot-separated field and variant names + --> $DIR/offset-of-self.rs:20:26 | LL | offset_of!(Self, Self::v); - | ^^ no rules expected this token in macro call - | - = note: while trying to match sequence start - -error: expected identifier, found keyword `Self` - --> $DIR/offset-of-self.rs:21:23 - | -LL | offset_of!(S, Self); - | ^^^^ expected identifier, found keyword - -error: expected identifier, found keyword `self` - --> $DIR/offset-of-self.rs:54:19 - | -LL | offset_of!(S, self); - | ^^^^ expected identifier, found keyword - -error: expected identifier, found keyword `self` - --> $DIR/offset-of-self.rs:56:21 - | -LL | offset_of!(S, v.self); - | ^^^^ expected identifier, found keyword + | ^^^^^^^ error[E0412]: cannot find type `S` in module `self` - --> $DIR/offset-of-self.rs:35:26 + --> $DIR/offset-of-self.rs:34:26 | LL | offset_of!(self::S, v); | ^ not found in `self` @@ -41,7 +21,7 @@ LL + offset_of!(S, v); | error[E0411]: cannot find type `Self` in this scope - --> $DIR/offset-of-self.rs:52:16 + --> $DIR/offset-of-self.rs:51:16 | LL | fn main() { | ---- `Self` not allowed in a function @@ -58,13 +38,13 @@ LL | offset_of!(S, Self); = note: available fields are: `v`, `w` error[E0616]: field `v` of struct `T` is private - --> $DIR/offset-of-self.rs:41:30 + --> $DIR/offset-of-self.rs:40:30 | LL | offset_of!(Self, v) | ^ private field error[E0609]: no field `self` on type `S` - --> $DIR/offset-of-self.rs:54:19 + --> $DIR/offset-of-self.rs:53:19 | LL | offset_of!(S, self); | ^^^^ @@ -72,12 +52,12 @@ LL | offset_of!(S, self); = note: available fields are: `v`, `w` error[E0609]: no field `self` on type `u8` - --> $DIR/offset-of-self.rs:56:21 + --> $DIR/offset-of-self.rs:54:21 | LL | offset_of!(S, v.self); | ^^^^ -error: aborting due to 10 previous errors +error: aborting due to 7 previous errors Some errors have detailed explanations: E0411, E0412, E0609, E0616. For more information about an error, try `rustc --explain E0411`. diff --git a/tests/ui/offset-of/offset-of-tuple-nested.rs b/tests/ui/offset-of/offset-of-tuple-nested.rs index 00fbb6bf8f407..59b02e3c5c433 100644 --- a/tests/ui/offset-of/offset-of-tuple-nested.rs +++ b/tests/ui/offset-of/offset-of-tuple-nested.rs @@ -3,7 +3,6 @@ // similar to why `offset-of-unsized.rs` is also build-pass #![feature(offset_of)] -#![feature(builtin_syntax)] use std::mem::offset_of; @@ -17,13 +16,13 @@ fn main() { // Complex case: do all combinations of spacings because the spacing determines what gets // sent to the lexer. println!("{}", offset_of!(ComplexTup, 0.1.1.1)); - println!("{}", builtin # offset_of(ComplexTup, 0. 1.1.1)); + println!("{}", offset_of!(ComplexTup, 0. 1.1.1)); println!("{}", offset_of!(ComplexTup, 0 . 1.1.1)); println!("{}", offset_of!(ComplexTup, 0 .1.1.1)); println!("{}", offset_of!(ComplexTup, 0.1 .1.1)); println!("{}", offset_of!(ComplexTup, 0.1 . 1.1)); println!("{}", offset_of!(ComplexTup, 0.1. 1.1)); - println!("{}", builtin # offset_of(ComplexTup, 0.1.1. 1)); + println!("{}", offset_of!(ComplexTup, 0.1.1. 1)); println!("{}", offset_of!(ComplexTup, 0.1.1 . 1)); println!("{}", offset_of!(ComplexTup, 0.1.1 .1)); diff --git a/tests/ui/offset-of/offset-of-tuple.rs b/tests/ui/offset-of/offset-of-tuple.rs index e31b037ee3e01..b6fc1e32229a1 100644 --- a/tests/ui/offset-of/offset-of-tuple.rs +++ b/tests/ui/offset-of/offset-of-tuple.rs @@ -10,9 +10,9 @@ fn main() { offset_of!((u8, u8), 1_u8); //~ ERROR no field `1_` //~| ERROR suffixes on a tuple index offset_of!((u8, u8), +1); //~ ERROR no rules expected - offset_of!((u8, u8), -1); //~ ERROR no rules expected - offset_of!((u8, u8), 1.); //~ ERROR expected identifier, found `)` - offset_of!((u8, u8), 1 .); //~ ERROR unexpected end of macro + offset_of!((u8, u8), -1); //~ ERROR offset_of expects dot-separated field and variant names + offset_of!((u8, u8), 1.); //~ ERROR offset_of expects dot-separated field and variant names + offset_of!((u8, u8), 1 .); //~ unexpected token: `)` builtin # offset_of((u8, u8), 1e2); //~ ERROR no field `1e2` builtin # offset_of((u8, u8), _0); //~ ERROR no field `_0` builtin # offset_of((u8, u8), 01); //~ ERROR no field `01` @@ -20,12 +20,12 @@ fn main() { //~| ERROR suffixes on a tuple index // We need to put these into curly braces, otherwise only one of the // errors will be emitted and the others suppressed. - { builtin # offset_of((u8, u8), +1) }; //~ ERROR expected identifier, found `+` - { builtin # offset_of((u8, u8), 1.) }; //~ ERROR expected identifier, found `)` - { builtin # offset_of((u8, u8), 1 .) }; //~ ERROR expected identifier, found `)` + { builtin # offset_of((u8, u8), +1) }; //~ ERROR leading `+` is not supported + { builtin # offset_of((u8, u8), 1.) }; //~ ERROR offset_of expects dot-separated field and variant names + { builtin # offset_of((u8, u8), 1 .) }; //~ ERROR unexpected token: `)` } -type ComplexTup = ((u8, (u8, u8)), u8); +type ComplexTup = (((u8, u8), u8), u8); fn nested() { offset_of!(((u8, u16), (u32, u16, u8)), 0.2); //~ ERROR no field `2` @@ -33,22 +33,22 @@ fn nested() { offset_of!(((u8, u16), (u32, u16, u8)), 1.2.0); //~ ERROR no field `0` // All combinations of spaces (this sends different tokens to the parser) - offset_of!(ComplexTup, 0.0.1.); //~ ERROR expected identifier - offset_of!(ComplexTup, 0 .0.1.); //~ ERROR unexpected end of macro - offset_of!(ComplexTup, 0 . 0.1.); //~ ERROR unexpected end of macro - offset_of!(ComplexTup, 0. 0.1.); //~ ERROR no rules expected - offset_of!(ComplexTup, 0.0 .1.); //~ ERROR expected identifier, found `)` - offset_of!(ComplexTup, 0.0 . 1.); //~ ERROR expected identifier, found `)` - offset_of!(ComplexTup, 0.0. 1.); //~ ERROR expected identifier, found `)` + offset_of!(ComplexTup, 0.0.1.); //~ ERROR unexpected token: `)` + offset_of!(ComplexTup, 0 .0.1.); //~ ERROR unexpected token: `)` + offset_of!(ComplexTup, 0 . 0.1.); //~ ERROR unexpected token: `)` + offset_of!(ComplexTup, 0. 0.1.); //~ ERROR unexpected token: `)` + offset_of!(ComplexTup, 0.0 .1.); //~ ERROR unexpected token: `)` + offset_of!(ComplexTup, 0.0 . 1.); //~ ERROR unexpected token: `)` + offset_of!(ComplexTup, 0.0. 1.); //~ ERROR unexpected token: `)` // Test for builtin too to ensure that the builtin syntax can also handle these cases // We need to put these into curly braces, otherwise only one of the // errors will be emitted and the others suppressed. - { builtin # offset_of(ComplexTup, 0.0.1.) }; //~ ERROR expected identifier, found `)` - { builtin # offset_of(ComplexTup, 0 .0.1.) }; //~ ERROR expected identifier, found `)` - { builtin # offset_of(ComplexTup, 0 . 0.1.) }; //~ ERROR expected identifier, found `)` - { builtin # offset_of(ComplexTup, 0. 0.1.) }; //~ ERROR expected identifier, found `)` - { builtin # offset_of(ComplexTup, 0.0 .1.) }; //~ ERROR expected identifier, found `)` - { builtin # offset_of(ComplexTup, 0.0 . 1.) }; //~ ERROR expected identifier, found `)` - { builtin # offset_of(ComplexTup, 0.0. 1.) }; //~ ERROR expected identifier, found `)` + { builtin # offset_of(ComplexTup, 0.0.1.) }; //~ ERROR unexpected token: `)` + { builtin # offset_of(ComplexTup, 0 .0.1.) }; //~ ERROR unexpected token: `)` + { builtin # offset_of(ComplexTup, 0 . 0.1.) }; //~ ERROR unexpected token: `)` + { builtin # offset_of(ComplexTup, 0. 0.1.) }; //~ ERROR unexpected token: `)` + { builtin # offset_of(ComplexTup, 0.0 .1.) }; //~ ERROR unexpected token: `)` + { builtin # offset_of(ComplexTup, 0.0 . 1.) }; //~ ERROR unexpected token: `)` + { builtin # offset_of(ComplexTup, 0.0. 1.) }; //~ ERROR unexpected token: `)` } diff --git a/tests/ui/offset-of/offset-of-tuple.stderr b/tests/ui/offset-of/offset-of-tuple.stderr index ed9523458063d..e9aa495becdfa 100644 --- a/tests/ui/offset-of/offset-of-tuple.stderr +++ b/tests/ui/offset-of/offset-of-tuple.stderr @@ -4,65 +4,71 @@ error: suffixes on a tuple index are invalid LL | builtin # offset_of((u8, u8), 1_u8); | ^^^^ invalid suffix `u8` -error: expected identifier, found `+` +error: leading `+` is not supported --> $DIR/offset-of-tuple.rs:23:37 | LL | { builtin # offset_of((u8, u8), +1) }; - | ^ expected identifier + | ^ unexpected `+` + | +help: try removing the `+` + | +LL - { builtin # offset_of((u8, u8), +1) }; +LL + { builtin # offset_of((u8, u8), 1) }; + | -error: expected identifier, found `)` - --> $DIR/offset-of-tuple.rs:24:39 +error: offset_of expects dot-separated field and variant names + --> $DIR/offset-of-tuple.rs:24:38 | LL | { builtin # offset_of((u8, u8), 1.) }; - | ^ expected identifier + | ^ -error: expected identifier, found `)` +error: unexpected token: `)` --> $DIR/offset-of-tuple.rs:25:40 | LL | { builtin # offset_of((u8, u8), 1 .) }; - | ^ expected identifier + | ^ -error: expected identifier, found `)` +error: unexpected token: `)` --> $DIR/offset-of-tuple.rs:47:45 | LL | { builtin # offset_of(ComplexTup, 0.0.1.) }; - | ^ expected identifier + | ^ -error: expected identifier, found `)` +error: unexpected token: `)` --> $DIR/offset-of-tuple.rs:48:46 | LL | { builtin # offset_of(ComplexTup, 0 .0.1.) }; - | ^ expected identifier + | ^ -error: expected identifier, found `)` +error: unexpected token: `)` --> $DIR/offset-of-tuple.rs:49:47 | LL | { builtin # offset_of(ComplexTup, 0 . 0.1.) }; - | ^ expected identifier + | ^ -error: expected identifier, found `)` +error: unexpected token: `)` --> $DIR/offset-of-tuple.rs:50:46 | LL | { builtin # offset_of(ComplexTup, 0. 0.1.) }; - | ^ expected identifier + | ^ -error: expected identifier, found `)` +error: unexpected token: `)` --> $DIR/offset-of-tuple.rs:51:46 | LL | { builtin # offset_of(ComplexTup, 0.0 .1.) }; - | ^ expected identifier + | ^ -error: expected identifier, found `)` +error: unexpected token: `)` --> $DIR/offset-of-tuple.rs:52:47 | LL | { builtin # offset_of(ComplexTup, 0.0 . 1.) }; - | ^ expected identifier + | ^ -error: expected identifier, found `)` +error: unexpected token: `)` --> $DIR/offset-of-tuple.rs:53:46 | LL | { builtin # offset_of(ComplexTup, 0.0. 1.) }; - | ^ expected identifier + | ^ error: suffixes on a tuple index are invalid --> $DIR/offset-of-tuple.rs:10:26 @@ -70,96 +76,74 @@ error: suffixes on a tuple index are invalid LL | offset_of!((u8, u8), 1_u8); | ^^^^ invalid suffix `u8` -error: no rules expected the token `1` - --> $DIR/offset-of-tuple.rs:12:27 +error: no rules expected the token `+` + --> $DIR/offset-of-tuple.rs:12:26 | LL | offset_of!((u8, u8), +1); - | ^ no rules expected this token in macro call + | ^ no rules expected this token in macro call | - = note: while trying to match sequence start +note: while trying to match meta-variable `$fields:expr` + --> $SRC_DIR/core/src/mem/mod.rs:LL:COL -error: no rules expected the token `1` - --> $DIR/offset-of-tuple.rs:13:27 +error: offset_of expects dot-separated field and variant names + --> $DIR/offset-of-tuple.rs:13:26 | LL | offset_of!((u8, u8), -1); - | ^ no rules expected this token in macro call - | - = note: while trying to match sequence start + | ^^ -error: expected identifier, found `)` - --> $DIR/offset-of-tuple.rs:14:5 +error: offset_of expects dot-separated field and variant names + --> $DIR/offset-of-tuple.rs:14:27 | LL | offset_of!((u8, u8), 1.); - | ^^^^^^^^^^^^^^^^^^^^^^^^ expected identifier - | - = note: this error originates in the macro `offset_of` (in Nightly builds, run with -Z macro-backtrace for more info) + | ^ -error: unexpected end of macro invocation +error: unexpected token: `)` --> $DIR/offset-of-tuple.rs:15:29 | LL | offset_of!((u8, u8), 1 .); - | ^ missing tokens in macro arguments - | -note: while trying to match meta-variable `$fields:tt` - --> $SRC_DIR/core/src/mem/mod.rs:LL:COL + | ^ -error: expected identifier, found `)` - --> $DIR/offset-of-tuple.rs:36:5 +error: unexpected token: `)` + --> $DIR/offset-of-tuple.rs:36:34 | LL | offset_of!(ComplexTup, 0.0.1.); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected identifier - | - = note: this error originates in the macro `offset_of` (in Nightly builds, run with -Z macro-backtrace for more info) + | ^ -error: unexpected end of macro invocation +error: unexpected token: `)` --> $DIR/offset-of-tuple.rs:37:35 | LL | offset_of!(ComplexTup, 0 .0.1.); - | ^ missing tokens in macro arguments - | -note: while trying to match meta-variable `$fields:tt` - --> $SRC_DIR/core/src/mem/mod.rs:LL:COL + | ^ -error: unexpected end of macro invocation +error: unexpected token: `)` --> $DIR/offset-of-tuple.rs:38:36 | LL | offset_of!(ComplexTup, 0 . 0.1.); - | ^ missing tokens in macro arguments - | -note: while trying to match meta-variable `$fields:tt` - --> $SRC_DIR/core/src/mem/mod.rs:LL:COL + | ^ -error: no rules expected the token `0.1` - --> $DIR/offset-of-tuple.rs:39:31 +error: unexpected token: `)` + --> $DIR/offset-of-tuple.rs:39:35 | LL | offset_of!(ComplexTup, 0. 0.1.); - | ^^^ no rules expected this token in macro call - | - = note: while trying to match sequence start + | ^ -error: expected identifier, found `)` - --> $DIR/offset-of-tuple.rs:40:5 +error: unexpected token: `)` + --> $DIR/offset-of-tuple.rs:40:35 | LL | offset_of!(ComplexTup, 0.0 .1.); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected identifier - | - = note: this error originates in the macro `offset_of` (in Nightly builds, run with -Z macro-backtrace for more info) + | ^ -error: expected identifier, found `)` - --> $DIR/offset-of-tuple.rs:41:5 +error: unexpected token: `)` + --> $DIR/offset-of-tuple.rs:41:36 | LL | offset_of!(ComplexTup, 0.0 . 1.); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected identifier - | - = note: this error originates in the macro `offset_of` (in Nightly builds, run with -Z macro-backtrace for more info) + | ^ -error: expected identifier, found `)` - --> $DIR/offset-of-tuple.rs:42:5 +error: unexpected token: `)` + --> $DIR/offset-of-tuple.rs:42:35 | LL | offset_of!(ComplexTup, 0.0. 1.); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected identifier - | - = note: this error originates in the macro `offset_of` (in Nightly builds, run with -Z macro-backtrace for more info) + | ^ error[E0609]: no field `_0` on type `(u8, u8)` --> $DIR/offset-of-tuple.rs:7:26 @@ -212,8 +196,16 @@ LL | builtin # offset_of((u8, u8), 1_u8); error[E0609]: no field `2` on type `(u8, u16)` --> $DIR/offset-of-tuple.rs:31:47 | -LL | offset_of!(((u8, u16), (u32, u16, u8)), 0.2); - | ^ +LL | offset_of!(((u8, u16), (u32, u16, u8)), 0.2); + | _____------------------------------------------^- + | | | + | | in this macro invocation +LL | | offset_of!(((u8, u16), (u32, u16, u8)), 1.2); +LL | | offset_of!(((u8, u16), (u32, u16, u8)), 1.2.0); +LL | | +... | + | + = note: this error originates in the macro `offset_of` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0609]: no field `0` on type `u8` --> $DIR/offset-of-tuple.rs:33:49 diff --git a/tests/ui/parser/attribute/attr-stmt-expr-attr-bad.rs b/tests/ui/parser/attribute/attr-stmt-expr-attr-bad.rs index 2c402e4c65e23..d1950087c4c2d 100644 --- a/tests/ui/parser/attribute/attr-stmt-expr-attr-bad.rs +++ b/tests/ui/parser/attribute/attr-stmt-expr-attr-bad.rs @@ -84,15 +84,15 @@ fn main() {} #[cfg(FALSE)] fn e() { match 0 { 0..=#[attr] 10 => () } } //~^ ERROR inclusive range with no end -//~| ERROR expected one of `,`, `=>`, `if`, `|`, or `}`, found `#` +//~| ERROR expected one of `=>`, `if`, or `|`, found `#` #[cfg(FALSE)] fn e() { match 0 { 0..=#[attr] -10 => () } } //~^ ERROR inclusive range with no end -//~| ERROR expected one of `,`, `=>`, `if`, `|`, or `}`, found `#` +//~| ERROR expected one of `=>`, `if`, or `|`, found `#` #[cfg(FALSE)] fn e() { match 0 { 0..=-#[attr] 10 => () } } //~^ ERROR unexpected token: `#` #[cfg(FALSE)] fn e() { match 0 { 0..=#[attr] FOO => () } } //~^ ERROR inclusive range with no end -//~| ERROR expected one of `,`, `=>`, `if`, `|`, or `}`, found `#` +//~| ERROR expected one of `=>`, `if`, or `|`, found `#` #[cfg(FALSE)] fn e() { let _ = x.#![attr]foo(); } //~^ ERROR unexpected token: `#` diff --git a/tests/ui/parser/attribute/attr-stmt-expr-attr-bad.stderr b/tests/ui/parser/attribute/attr-stmt-expr-attr-bad.stderr index a0e95c5c1ed32..e46c591080d43 100644 --- a/tests/ui/parser/attribute/attr-stmt-expr-attr-bad.stderr +++ b/tests/ui/parser/attribute/attr-stmt-expr-attr-bad.stderr @@ -365,11 +365,11 @@ LL | #[cfg(FALSE)] fn e() { match 0 { 0..=#[attr] 10 => () } } | = note: inclusive ranges must be bounded at the end (`..=b` or `a..=b`) -error: expected one of `,`, `=>`, `if`, `|`, or `}`, found `#` +error: expected one of `=>`, `if`, or `|`, found `#` --> $DIR/attr-stmt-expr-attr-bad.rs:85:38 | LL | #[cfg(FALSE)] fn e() { match 0 { 0..=#[attr] 10 => () } } - | ^ expected one of `,`, `=>`, `if`, `|`, or `}` + | ^ expected one of `=>`, `if`, or `|` error[E0586]: inclusive range with no end --> $DIR/attr-stmt-expr-attr-bad.rs:88:35 @@ -379,11 +379,11 @@ LL | #[cfg(FALSE)] fn e() { match 0 { 0..=#[attr] -10 => () } } | = note: inclusive ranges must be bounded at the end (`..=b` or `a..=b`) -error: expected one of `,`, `=>`, `if`, `|`, or `}`, found `#` +error: expected one of `=>`, `if`, or `|`, found `#` --> $DIR/attr-stmt-expr-attr-bad.rs:88:38 | LL | #[cfg(FALSE)] fn e() { match 0 { 0..=#[attr] -10 => () } } - | ^ expected one of `,`, `=>`, `if`, `|`, or `}` + | ^ expected one of `=>`, `if`, or `|` error: unexpected token: `#` --> $DIR/attr-stmt-expr-attr-bad.rs:91:39 @@ -399,11 +399,11 @@ LL | #[cfg(FALSE)] fn e() { match 0 { 0..=#[attr] FOO => () } } | = note: inclusive ranges must be bounded at the end (`..=b` or `a..=b`) -error: expected one of `,`, `=>`, `if`, `|`, or `}`, found `#` +error: expected one of `=>`, `if`, or `|`, found `#` --> $DIR/attr-stmt-expr-attr-bad.rs:93:38 | LL | #[cfg(FALSE)] fn e() { match 0 { 0..=#[attr] FOO => () } } - | ^ expected one of `,`, `=>`, `if`, `|`, or `}` + | ^ expected one of `=>`, `if`, or `|` error: unexpected token: `#` --> $DIR/attr-stmt-expr-attr-bad.rs:97:34 diff --git a/tests/ui/parser/bounds-type.rs b/tests/ui/parser/bounds-type.rs index 4ae4549ea5896..bd5f6105f51a2 100644 --- a/tests/ui/parser/bounds-type.rs +++ b/tests/ui/parser/bounds-type.rs @@ -13,6 +13,7 @@ struct S< T: ~const ?Tr, // OK T: ~const Tr + 'a, // OK T: ~const 'a, //~ ERROR `~const` may only modify trait bounds, not lifetime bounds + T: const 'a, //~ ERROR `const` may only modify trait bounds, not lifetime bounds >; fn main() {} diff --git a/tests/ui/parser/bounds-type.stderr b/tests/ui/parser/bounds-type.stderr index 005bc1e54bd00..d1210e88d6678 100644 --- a/tests/ui/parser/bounds-type.stderr +++ b/tests/ui/parser/bounds-type.stderr @@ -10,5 +10,11 @@ error: `~const` may only modify trait bounds, not lifetime bounds LL | T: ~const 'a, | ^^^^^^ -error: aborting due to 2 previous errors +error: `const` may only modify trait bounds, not lifetime bounds + --> $DIR/bounds-type.rs:16:8 + | +LL | T: const 'a, + | ^^^^^ + +error: aborting due to 3 previous errors diff --git a/tests/ui/parser/eq-gt-to-gt-eq.fixed b/tests/ui/parser/eq-gt-to-gt-eq.fixed new file mode 100644 index 0000000000000..44cb464fc0c9c --- /dev/null +++ b/tests/ui/parser/eq-gt-to-gt-eq.fixed @@ -0,0 +1,45 @@ +// run-rustfix +// Check that we try to correct `=>` to `>=` in conditions. +#![allow(unused)] + +fn main() { + let a = 0; + let b = 1; + if a >= b {} //~ERROR +} + +fn foo() { + let a = 0; + if a >= 1 {} //~ERROR +} + +fn a() { + let a = 0; + if 1 >= a {} //~ERROR +} + +fn bar() { + let a = 0; + let b = 1; + if a >= b && a != b {} //~ERROR +} + +fn qux() { + let a = 0; + let b = 1; + if a != b && a >= b {} //~ERROR +} + +fn baz() { + let a = 0; + let b = 1; + let _ = a >= b; //~ERROR +} + +fn b() { + let a = 0; + let b = 1; + match a >= b { //~ERROR + _ => todo!(), + } +} diff --git a/tests/ui/parser/eq-gt-to-gt-eq.rs b/tests/ui/parser/eq-gt-to-gt-eq.rs new file mode 100644 index 0000000000000..dca67c89cc033 --- /dev/null +++ b/tests/ui/parser/eq-gt-to-gt-eq.rs @@ -0,0 +1,45 @@ +// run-rustfix +// Check that we try to correct `=>` to `>=` in conditions. +#![allow(unused)] + +fn main() { + let a = 0; + let b = 1; + if a => b {} //~ERROR +} + +fn foo() { + let a = 0; + if a => 1 {} //~ERROR +} + +fn a() { + let a = 0; + if 1 => a {} //~ERROR +} + +fn bar() { + let a = 0; + let b = 1; + if a => b && a != b {} //~ERROR +} + +fn qux() { + let a = 0; + let b = 1; + if a != b && a => b {} //~ERROR +} + +fn baz() { + let a = 0; + let b = 1; + let _ = a => b; //~ERROR +} + +fn b() { + let a = 0; + let b = 1; + match a => b { //~ERROR + _ => todo!(), + } +} diff --git a/tests/ui/parser/eq-gt-to-gt-eq.stderr b/tests/ui/parser/eq-gt-to-gt-eq.stderr new file mode 100644 index 0000000000000..73f465f7b9b28 --- /dev/null +++ b/tests/ui/parser/eq-gt-to-gt-eq.stderr @@ -0,0 +1,106 @@ +error: expected `{`, found `=>` + --> $DIR/eq-gt-to-gt-eq.rs:8:10 + | +LL | if a => b {} + | ^^ expected `{` + | +note: the `if` expression is missing a block after this condition + --> $DIR/eq-gt-to-gt-eq.rs:8:8 + | +LL | if a => b {} + | ^ +help: you might have meant to write a "greater than or equal to" comparison + | +LL | if a >= b {} + | ~~ + +error: expected `{`, found `=>` + --> $DIR/eq-gt-to-gt-eq.rs:13:10 + | +LL | if a => 1 {} + | ^^ expected `{` + | +note: the `if` expression is missing a block after this condition + --> $DIR/eq-gt-to-gt-eq.rs:13:8 + | +LL | if a => 1 {} + | ^ +help: you might have meant to write a "greater than or equal to" comparison + | +LL | if a >= 1 {} + | ~~ + +error: expected `{`, found `=>` + --> $DIR/eq-gt-to-gt-eq.rs:18:10 + | +LL | if 1 => a {} + | ^^ expected `{` + | +note: the `if` expression is missing a block after this condition + --> $DIR/eq-gt-to-gt-eq.rs:18:8 + | +LL | if 1 => a {} + | ^ +help: you might have meant to write a "greater than or equal to" comparison + | +LL | if 1 >= a {} + | ~~ + +error: expected `{`, found `=>` + --> $DIR/eq-gt-to-gt-eq.rs:24:10 + | +LL | if a => b && a != b {} + | ^^ expected `{` + | +note: the `if` expression is missing a block after this condition + --> $DIR/eq-gt-to-gt-eq.rs:24:8 + | +LL | if a => b && a != b {} + | ^ +help: you might have meant to write a "greater than or equal to" comparison + | +LL | if a >= b && a != b {} + | ~~ + +error: expected `{`, found `=>` + --> $DIR/eq-gt-to-gt-eq.rs:30:20 + | +LL | if a != b && a => b {} + | ^^ expected `{` + | +note: the `if` expression is missing a block after this condition + --> $DIR/eq-gt-to-gt-eq.rs:30:8 + | +LL | if a != b && a => b {} + | ^^^^^^^^^^^ +help: you might have meant to write a "greater than or equal to" comparison + | +LL | if a != b && a >= b {} + | ~~ + +error: expected one of `!`, `.`, `::`, `;`, `?`, `else`, `{`, or an operator, found `=>` + --> $DIR/eq-gt-to-gt-eq.rs:36:15 + | +LL | let _ = a => b; + | ^^ expected one of 8 possible tokens + | +help: you might have meant to write a "greater than or equal to" comparison + | +LL | let _ = a >= b; + | ~~ + +error: expected one of `!`, `.`, `::`, `?`, `{`, or an operator, found `=>` + --> $DIR/eq-gt-to-gt-eq.rs:42:13 + | +LL | match a => b { + | ----- ^^ expected one of `!`, `.`, `::`, `?`, `{`, or an operator + | | + | while parsing this `match` expression + | +help: you might have meant to write a "greater than or equal to" comparison + | +LL | match a >= b { + | ~~ + +error: aborting due to 7 previous errors + diff --git a/tests/ui/parser/fn-header-semantic-fail.rs b/tests/ui/parser/fn-header-semantic-fail.rs index f8b58cad7c144..f01e1c2277c6f 100644 --- a/tests/ui/parser/fn-header-semantic-fail.rs +++ b/tests/ui/parser/fn-header-semantic-fail.rs @@ -26,10 +26,10 @@ fn main() { impl X for Y { async fn ft1() {} // OK. unsafe fn ft2() {} // OK. - const fn ft3() {} //~ ERROR functions in traits cannot be declared const + const fn ft3() {} //~ ERROR functions in trait impls cannot be declared const extern "C" fn ft4() {} const async unsafe extern "C" fn ft5() {} - //~^ ERROR functions in traits cannot be declared const + //~^ ERROR functions in trait impls cannot be declared const //~| ERROR functions cannot be both `const` and `async` } diff --git a/tests/ui/parser/fn-header-semantic-fail.stderr b/tests/ui/parser/fn-header-semantic-fail.stderr index cdf01e0c5df64..696d8e01b6387 100644 --- a/tests/ui/parser/fn-header-semantic-fail.stderr +++ b/tests/ui/parser/fn-header-semantic-fail.stderr @@ -11,13 +11,19 @@ error[E0379]: functions in traits cannot be declared const --> $DIR/fn-header-semantic-fail.rs:18:9 | LL | const fn ft3(); - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in traits cannot be const + | help: remove the `const` error[E0379]: functions in traits cannot be declared const --> $DIR/fn-header-semantic-fail.rs:20:9 | LL | const async unsafe extern "C" fn ft5(); - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in traits cannot be const + | help: remove the `const` error: functions cannot be both `const` and `async` --> $DIR/fn-header-semantic-fail.rs:20:9 @@ -28,17 +34,23 @@ LL | const async unsafe extern "C" fn ft5(); | | `async` because of this | `const` because of this -error[E0379]: functions in traits cannot be declared const +error[E0379]: functions in trait impls cannot be declared const --> $DIR/fn-header-semantic-fail.rs:29:9 | LL | const fn ft3() {} - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in trait impls cannot be const + | help: remove the `const` -error[E0379]: functions in traits cannot be declared const +error[E0379]: functions in trait impls cannot be declared const --> $DIR/fn-header-semantic-fail.rs:31:9 | LL | const async unsafe extern "C" fn ft5() {} - | ^^^^^ functions in traits cannot be const + | ^^^^^- + | | + | functions in trait impls cannot be const + | help: remove the `const` error: functions cannot be both `const` and `async` --> $DIR/fn-header-semantic-fail.rs:31:9 diff --git a/tests/ui/parser/generic-param-default-in-binder.rs b/tests/ui/parser/generic-param-default-in-binder.rs new file mode 100644 index 0000000000000..78dc4186b3a53 --- /dev/null +++ b/tests/ui/parser/generic-param-default-in-binder.rs @@ -0,0 +1,10 @@ +// Check that defaults for generic parameters in `for<...>` binders are +// syntactically valid. See also PR #119042. + +// check-pass + +macro_rules! a { ($ty:ty) => {} } + +a! { for fn() } + +fn main() {} diff --git a/tests/ui/parser/issues/issue-24375.rs b/tests/ui/parser/issues/issue-24375.rs index 8d1bc579e7b84..1d128d33e4f49 100644 --- a/tests/ui/parser/issues/issue-24375.rs +++ b/tests/ui/parser/issues/issue-24375.rs @@ -3,7 +3,7 @@ static tmp : [&'static str; 2] = ["hello", "he"]; fn main() { let z = "hello"; match z { - tmp[0] => {} //~ ERROR expected one of `,`, `=>`, `@`, `if`, `|`, or `}`, found `[` + tmp[0] => {} //~ ERROR expected one of `=>`, `@`, `if`, or `|`, found `[` _ => {} } } diff --git a/tests/ui/parser/issues/issue-24375.stderr b/tests/ui/parser/issues/issue-24375.stderr index 2b980a5520fdb..bb1e19e9e6d8d 100644 --- a/tests/ui/parser/issues/issue-24375.stderr +++ b/tests/ui/parser/issues/issue-24375.stderr @@ -1,8 +1,8 @@ -error: expected one of `,`, `=>`, `@`, `if`, `|`, or `}`, found `[` +error: expected one of `=>`, `@`, `if`, or `|`, found `[` --> $DIR/issue-24375.rs:6:12 | LL | tmp[0] => {} - | ^ expected one of `,`, `=>`, `@`, `if`, `|`, or `}` + | ^ expected one of `=>`, `@`, `if`, or `|` error: aborting due to 1 previous error diff --git a/tests/ui/parser/issues/issue-68091-unicode-ident-after-if.stderr b/tests/ui/parser/issues/issue-68091-unicode-ident-after-if.stderr index 2423a7526be13..8e125864b8bce 100644 --- a/tests/ui/parser/issues/issue-68091-unicode-ident-after-if.stderr +++ b/tests/ui/parser/issues/issue-68091-unicode-ident-after-if.stderr @@ -1,10 +1,10 @@ error: missing condition for `if` expression - --> $DIR/issue-68091-unicode-ident-after-if.rs:3:14 + --> $DIR/issue-68091-unicode-ident-after-if.rs:3:13 | LL | $($c)ö* {} - | ^ - if this block is the condition of the `if` expression, then it must be followed by another block - | | - | expected condition here + | ^ - if this block is the condition of the `if` expression, then it must be followed by another block + | | + | expected condition here error: aborting due to 1 previous error diff --git a/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.stderr b/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.stderr index 43047ff88020e..15aa62e08108a 100644 --- a/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.stderr +++ b/tests/ui/parser/issues/issue-68092-unicode-ident-after-incomplete-expr.stderr @@ -1,8 +1,8 @@ error: macro expansion ends with an incomplete expression: expected expression - --> $DIR/issue-68092-unicode-ident-after-incomplete-expr.rs:3:14 + --> $DIR/issue-68092-unicode-ident-after-incomplete-expr.rs:3:13 | LL | $($c)ö* - | ^ expected expression + | ^ expected expression error: aborting due to 1 previous error diff --git a/tests/ui/parser/macro/mbe-bare-trait-object-maybe-trait-bound.rs b/tests/ui/parser/macro/mbe-bare-trait-object-maybe-trait-bound.rs new file mode 100644 index 0000000000000..fe062d62e5aa7 --- /dev/null +++ b/tests/ui/parser/macro/mbe-bare-trait-object-maybe-trait-bound.rs @@ -0,0 +1,16 @@ +// Check that `?Trait` matches the macro fragment specifier `ty`. +// Syntactically trait object types can be "bare" (i.e., lack the prefix `dyn`), +// even in newer editions like Rust 2021. +// Therefore the arm `?$Trait:path` shouldn't get reached. + +// edition: 2021 +// check-pass + +macro_rules! check { + ($Ty:ty) => {}; + (?$Trait:path) => { compile_error!("non-ty"); }; +} + +check! { ?Trait } + +fn main() {} diff --git a/tests/ui/parser/match-arm-without-body.stderr b/tests/ui/parser/match-arm-without-body.stderr index d98c7ec282601..a3f7e32c1773b 100644 --- a/tests/ui/parser/match-arm-without-body.stderr +++ b/tests/ui/parser/match-arm-without-body.stderr @@ -1,8 +1,8 @@ -error: expected one of `,`, `=>`, `if`, `|`, or `}`, found reserved identifier `_` +error: expected one of `=>`, `if`, or `|`, found reserved identifier `_` --> $DIR/match-arm-without-body.rs:13:9 | LL | Some(_) - | - expected one of `,`, `=>`, `if`, `|`, or `}` + | - expected one of `=>`, `if`, or `|` LL | _ => {} | ^ unexpected token @@ -44,11 +44,11 @@ LL + LL ~ _ => {} | -error: expected one of `,`, `.`, `=>`, `?`, `}`, or an operator, found reserved identifier `_` +error: expected one of `.`, `=>`, `?`, or an operator, found reserved identifier `_` --> $DIR/match-arm-without-body.rs:36:9 | LL | Some(_) if true - | - expected one of `,`, `.`, `=>`, `?`, `}`, or an operator + | - expected one of `.`, `=>`, `?`, or an operator LL | _ => {} | ^ unexpected token diff --git a/tests/ui/parser/pat-lt-bracket-1.rs b/tests/ui/parser/pat-lt-bracket-1.rs index 33da15adb9e40..2e2001434f28c 100644 --- a/tests/ui/parser/pat-lt-bracket-1.rs +++ b/tests/ui/parser/pat-lt-bracket-1.rs @@ -1,7 +1,7 @@ fn main() { match 42 { x < 7 => (), - //~^ error: expected one of `,`, `=>`, `@`, `if`, `|`, or `}`, found `<` + //~^ error: expected one of `=>`, `@`, `if`, or `|`, found `<` _ => () } } diff --git a/tests/ui/parser/pat-lt-bracket-1.stderr b/tests/ui/parser/pat-lt-bracket-1.stderr index f39487052ade4..14e679bbee073 100644 --- a/tests/ui/parser/pat-lt-bracket-1.stderr +++ b/tests/ui/parser/pat-lt-bracket-1.stderr @@ -1,8 +1,8 @@ -error: expected one of `,`, `=>`, `@`, `if`, `|`, or `}`, found `<` +error: expected one of `=>`, `@`, `if`, or `|`, found `<` --> $DIR/pat-lt-bracket-1.rs:3:7 | LL | x < 7 => (), - | ^ expected one of `,`, `=>`, `@`, `if`, `|`, or `}` + | ^ expected one of `=>`, `@`, `if`, or `|` error: aborting due to 1 previous error diff --git a/tests/ui/parser/pat-recover-ranges.rs b/tests/ui/parser/pat-recover-ranges.rs new file mode 100644 index 0000000000000..65a6fc6fe21f3 --- /dev/null +++ b/tests/ui/parser/pat-recover-ranges.rs @@ -0,0 +1,19 @@ +fn main() { + match -1 { + 0..=1 => (), + 0..=(1) => (), + //~^ error: range pattern bounds cannot have parentheses + (-12)..=4 => (), + //~^ error: range pattern bounds cannot have parentheses + (0)..=(-4) => (), + //~^ error: range pattern bounds cannot have parentheses + //~| error: range pattern bounds cannot have parentheses + }; +} + +macro_rules! m { + ($pat:pat) => {}; + (($s:literal)..($e:literal)) => {}; +} + +m!((7)..(7)); diff --git a/tests/ui/parser/pat-recover-ranges.stderr b/tests/ui/parser/pat-recover-ranges.stderr new file mode 100644 index 0000000000000..0d722b5aa95c8 --- /dev/null +++ b/tests/ui/parser/pat-recover-ranges.stderr @@ -0,0 +1,50 @@ +error: range pattern bounds cannot have parentheses + --> $DIR/pat-recover-ranges.rs:4:13 + | +LL | 0..=(1) => (), + | ^ ^ + | +help: remove these parentheses + | +LL - 0..=(1) => (), +LL + 0..=1 => (), + | + +error: range pattern bounds cannot have parentheses + --> $DIR/pat-recover-ranges.rs:6:9 + | +LL | (-12)..=4 => (), + | ^ ^ + | +help: remove these parentheses + | +LL - (-12)..=4 => (), +LL + -12..=4 => (), + | + +error: range pattern bounds cannot have parentheses + --> $DIR/pat-recover-ranges.rs:8:9 + | +LL | (0)..=(-4) => (), + | ^ ^ + | +help: remove these parentheses + | +LL - (0)..=(-4) => (), +LL + 0..=(-4) => (), + | + +error: range pattern bounds cannot have parentheses + --> $DIR/pat-recover-ranges.rs:8:15 + | +LL | (0)..=(-4) => (), + | ^ ^ + | +help: remove these parentheses + | +LL - (0)..=(-4) => (), +LL + (0)..=-4 => (), + | + +error: aborting due to 4 previous errors + diff --git a/tests/ui/parser/trait-object-delimiters.rs b/tests/ui/parser/trait-object-delimiters.rs index e9b13defe0384..240ae3084d68e 100644 --- a/tests/ui/parser/trait-object-delimiters.rs +++ b/tests/ui/parser/trait-object-delimiters.rs @@ -8,7 +8,7 @@ fn foo2(_: &dyn (Drop + AsRef)) {} //~ ERROR incorrect parentheses around t fn foo2_no_space(_: &dyn(Drop + AsRef)) {} //~ ERROR incorrect parentheses around trait bounds fn foo3(_: &dyn {Drop + AsRef}) {} //~ ERROR expected parameter name, found `{` -//~^ ERROR expected one of `!`, `(`, `)`, `*`, `,`, `?`, `for`, `~`, lifetime, or path, found `{` +//~^ ERROR expected one of `!`, `(`, `)`, `*`, `,`, `?`, `const`, `for`, `~`, lifetime, or path, found `{` //~| ERROR at least one trait is required for an object type fn foo4(_: &dyn >) {} //~ ERROR expected identifier, found `<` diff --git a/tests/ui/parser/trait-object-delimiters.stderr b/tests/ui/parser/trait-object-delimiters.stderr index 519546750938b..2ddb734cee067 100644 --- a/tests/ui/parser/trait-object-delimiters.stderr +++ b/tests/ui/parser/trait-object-delimiters.stderr @@ -34,11 +34,11 @@ error: expected parameter name, found `{` LL | fn foo3(_: &dyn {Drop + AsRef}) {} | ^ expected parameter name -error: expected one of `!`, `(`, `)`, `*`, `,`, `?`, `for`, `~`, lifetime, or path, found `{` +error: expected one of `!`, `(`, `)`, `*`, `,`, `?`, `const`, `for`, `~`, lifetime, or path, found `{` --> $DIR/trait-object-delimiters.rs:10:17 | LL | fn foo3(_: &dyn {Drop + AsRef}) {} - | -^ expected one of 10 possible tokens + | -^ expected one of 11 possible tokens | | | help: missing `,` diff --git a/tests/ui/partialeq_help.stderr b/tests/ui/partialeq_help.stderr index fdff94f425c8a..f5de1308e8714 100644 --- a/tests/ui/partialeq_help.stderr +++ b/tests/ui/partialeq_help.stderr @@ -5,6 +5,10 @@ LL | a == b; | ^^ no implementation for `&T == T` | = help: the trait `PartialEq` is not implemented for `&T` +help: consider dereferencing here + | +LL | *a == b; + | + help: consider introducing a `where` clause, but there might be an alternative better way to express this requirement | LL | fn foo(a: &T, b: T) where &T: PartialEq { @@ -17,6 +21,10 @@ LL | a == b; | ^^ no implementation for `&T == T` | = help: the trait `PartialEq` is not implemented for `&T` +help: consider dereferencing here + | +LL | *a == b; + | + help: consider extending the `where` clause, but there might be an alternative better way to express this requirement | LL | fn foo2(a: &T, b: T) where &T: PartialEq { diff --git a/tests/ui/pattern/issue-117626.rs b/tests/ui/pattern/issue-117626.rs new file mode 100644 index 0000000000000..f87147a5d88ec --- /dev/null +++ b/tests/ui/pattern/issue-117626.rs @@ -0,0 +1,21 @@ +// check-pass + +#[derive(PartialEq)] +struct NonMatchable; + +impl Eq for NonMatchable {} + +#[derive(PartialEq, Eq)] +enum Foo { + A(NonMatchable), + B(*const u8), +} + +const CONST: Foo = Foo::B(std::ptr::null()); + +fn main() { + match CONST { + CONST => 0, + _ => 1, + }; +} diff --git a/tests/ui/privacy/auxiliary/issue-119463-extern.rs b/tests/ui/privacy/auxiliary/issue-119463-extern.rs new file mode 100644 index 0000000000000..e703a1fb2c2df --- /dev/null +++ b/tests/ui/privacy/auxiliary/issue-119463-extern.rs @@ -0,0 +1,3 @@ +trait PrivateTrait { + const FOO: usize; +} diff --git a/tests/ui/privacy/issue-119463.rs b/tests/ui/privacy/issue-119463.rs new file mode 100644 index 0000000000000..e010bc9f536b0 --- /dev/null +++ b/tests/ui/privacy/issue-119463.rs @@ -0,0 +1,15 @@ +// aux-build:issue-119463-extern.rs + +extern crate issue_119463_extern; + +struct S; + +impl issue_119463_extern::PrivateTrait for S { + //~^ ERROR: trait `PrivateTrait` is private + const FOO: usize = 1; + + fn nonexistent() {} + //~^ ERROR: method `nonexistent` is not a member of trait +} + +fn main() {} diff --git a/tests/ui/privacy/issue-119463.stderr b/tests/ui/privacy/issue-119463.stderr new file mode 100644 index 0000000000000..4a0684de613ca --- /dev/null +++ b/tests/ui/privacy/issue-119463.stderr @@ -0,0 +1,22 @@ +error[E0407]: method `nonexistent` is not a member of trait `issue_119463_extern::PrivateTrait` + --> $DIR/issue-119463.rs:11:5 + | +LL | fn nonexistent() {} + | ^^^^^^^^^^^^^^^^^^^ not a member of trait `issue_119463_extern::PrivateTrait` + +error[E0603]: trait `PrivateTrait` is private + --> $DIR/issue-119463.rs:7:27 + | +LL | impl issue_119463_extern::PrivateTrait for S { + | ^^^^^^^^^^^^ private trait + | +note: the trait `PrivateTrait` is defined here + --> $DIR/auxiliary/issue-119463-extern.rs:1:1 + | +LL | trait PrivateTrait { + | ^^^^^^^^^^^^^^^^^^ + +error: aborting due to 2 previous errors + +Some errors have detailed explanations: E0407, E0603. +For more information about an error, try `rustc --explain E0407`. diff --git a/tests/ui/proc-macro/capture-macro-rules-invoke.stdout b/tests/ui/proc-macro/capture-macro-rules-invoke.stdout index 71e34119ba7ee..bbab08bca499e 100644 --- a/tests/ui/proc-macro/capture-macro-rules-invoke.stdout +++ b/tests/ui/proc-macro/capture-macro-rules-invoke.stdout @@ -271,7 +271,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:47:19: 47:20 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:47:13: 47:22 (#0), + span: $DIR/capture-macro-rules-invoke.rs:15:60: 15:63 (#0), }, Punct { ch: ',', diff --git a/tests/ui/proc-macro/expand-expr.rs b/tests/ui/proc-macro/expand-expr.rs index 700aac41c449a..89cd1d767a5d8 100644 --- a/tests/ui/proc-macro/expand-expr.rs +++ b/tests/ui/proc-macro/expand-expr.rs @@ -37,7 +37,7 @@ expand_expr_is!("hello", stringify!(hello)); expand_expr_is!("10 + 20", stringify!(10 + 20)); macro_rules! echo_tts { - ($($t:tt)*) => { $($t)* }; //~ ERROR: expected expression, found `$` + ($($t:tt)*) => { $($t)* }; } macro_rules! echo_lit { @@ -109,7 +109,7 @@ expand_expr_fail!("string"; hello); //~ ERROR: expected one of `.`, `?`, or an o // Invalid expressions produce errors in addition to returning `Err(())`. expand_expr_fail!($); //~ ERROR: expected expression, found `$` -expand_expr_fail!(echo_tts!($)); +expand_expr_fail!(echo_tts!($)); //~ ERROR: expected expression, found `$` expand_expr_fail!(echo_pm!($)); //~ ERROR: expected expression, found `$` // We get errors reported and recover during macro expansion if the macro diff --git a/tests/ui/proc-macro/expand-expr.stderr b/tests/ui/proc-macro/expand-expr.stderr index df61e9972896b..2b92472e5ab75 100644 --- a/tests/ui/proc-macro/expand-expr.stderr +++ b/tests/ui/proc-macro/expand-expr.stderr @@ -11,10 +11,10 @@ LL | expand_expr_fail!($); | ^ expected expression error: expected expression, found `$` - --> $DIR/expand-expr.rs:40:23 + --> $DIR/expand-expr.rs:112:29 | -LL | ($($t:tt)*) => { $($t)* }; - | ^^^^ expected expression +LL | expand_expr_fail!(echo_tts!($)); + | ^ expected expression error: expected expression, found `$` --> $DIR/expand-expr.rs:113:28 diff --git a/tests/ui/proc-macro/inner-attrs.rs b/tests/ui/proc-macro/inner-attrs.rs index 1000c9c755ffc..c448294e0f64e 100644 --- a/tests/ui/proc-macro/inner-attrs.rs +++ b/tests/ui/proc-macro/inner-attrs.rs @@ -1,5 +1,6 @@ // gate-test-custom_inner_attributes // compile-flags: -Z span-debug --error-format human +// error-pattern:expected non-macro inner attribute // aux-build:test-macros.rs // edition:2018 @@ -61,19 +62,19 @@ fn bar() { ); for _ in &[true] { - #![print_attr] //~ ERROR expected non-macro inner attribute + #![print_attr] } let _ = { - #![print_attr] //~ ERROR expected non-macro inner attribute + #![print_attr] }; let _ = async { - #![print_attr] //~ ERROR expected non-macro inner attribute + #![print_attr] }; { - #![print_attr] //~ ERROR expected non-macro inner attribute + #![print_attr] }; } diff --git a/tests/ui/proc-macro/inner-attrs.stderr b/tests/ui/proc-macro/inner-attrs.stderr index a332e143a79f1..ee8732c650dc9 100644 --- a/tests/ui/proc-macro/inner-attrs.stderr +++ b/tests/ui/proc-macro/inner-attrs.stderr @@ -1,23 +1,23 @@ error: expected non-macro inner attribute, found attribute macro `print_attr` - --> $DIR/inner-attrs.rs:64:12 + --> $DIR/inner-attrs.rs:65:12 | LL | #![print_attr] | ^^^^^^^^^^ not a non-macro inner attribute error: expected non-macro inner attribute, found attribute macro `print_attr` - --> $DIR/inner-attrs.rs:68:12 + --> $DIR/inner-attrs.rs:69:12 | LL | #![print_attr] | ^^^^^^^^^^ not a non-macro inner attribute error: expected non-macro inner attribute, found attribute macro `print_attr` - --> $DIR/inner-attrs.rs:72:12 + --> $DIR/inner-attrs.rs:73:12 | LL | #![print_attr] | ^^^^^^^^^^ not a non-macro inner attribute error: expected non-macro inner attribute, found attribute macro `print_attr` - --> $DIR/inner-attrs.rs:76:12 + --> $DIR/inner-attrs.rs:77:12 | LL | #![print_attr] | ^^^^^^^^^^ not a non-macro inner attribute diff --git a/tests/ui/proc-macro/inner-attrs.stdout b/tests/ui/proc-macro/inner-attrs.stdout index 037ec044e426e..39ec6834f0658 100644 --- a/tests/ui/proc-macro/inner-attrs.stdout +++ b/tests/ui/proc-macro/inner-attrs.stdout @@ -2,7 +2,7 @@ PRINT-ATTR_ARGS INPUT (DISPLAY): first PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "first", - span: $DIR/inner-attrs.rs:17:25: 17:30 (#0), + span: $DIR/inner-attrs.rs:18:25: 18:30 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): #[print_target_and_args(second)] fn foo() @@ -13,40 +13,40 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/inner-attrs.rs:18:1: 18:2 (#0), + span: $DIR/inner-attrs.rs:19:1: 19:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:18:3: 18:24 (#0), + span: $DIR/inner-attrs.rs:19:3: 19:24 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "second", - span: $DIR/inner-attrs.rs:18:25: 18:31 (#0), + span: $DIR/inner-attrs.rs:19:25: 19:31 (#0), }, ], - span: $DIR/inner-attrs.rs:18:24: 18:32 (#0), + span: $DIR/inner-attrs.rs:19:24: 19:32 (#0), }, ], - span: $DIR/inner-attrs.rs:18:2: 18:33 (#0), + span: $DIR/inner-attrs.rs:19:2: 19:33 (#0), }, Ident { ident: "fn", - span: $DIR/inner-attrs.rs:19:1: 19:3 (#0), + span: $DIR/inner-attrs.rs:20:1: 20:3 (#0), }, Ident { ident: "foo", - span: $DIR/inner-attrs.rs:19:4: 19:7 (#0), + span: $DIR/inner-attrs.rs:20:4: 20:7 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/inner-attrs.rs:19:7: 19:9 (#0), + span: $DIR/inner-attrs.rs:20:7: 20:9 (#0), }, Group { delimiter: Brace, @@ -54,72 +54,72 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:20:5: 20:6 (#0), + span: $DIR/inner-attrs.rs:21:5: 21:6 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:20:6: 20:7 (#0), + span: $DIR/inner-attrs.rs:21:6: 21:7 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:20:8: 20:29 (#0), + span: $DIR/inner-attrs.rs:21:8: 21:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "third", - span: $DIR/inner-attrs.rs:20:30: 20:35 (#0), + span: $DIR/inner-attrs.rs:21:30: 21:35 (#0), }, ], - span: $DIR/inner-attrs.rs:20:29: 20:36 (#0), + span: $DIR/inner-attrs.rs:21:29: 21:36 (#0), }, ], - span: $DIR/inner-attrs.rs:20:7: 20:37 (#0), + span: $DIR/inner-attrs.rs:21:7: 21:37 (#0), }, Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:21:5: 21:6 (#0), + span: $DIR/inner-attrs.rs:22:5: 22:6 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:21:6: 21:7 (#0), + span: $DIR/inner-attrs.rs:22:6: 22:7 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:21:8: 21:29 (#0), + span: $DIR/inner-attrs.rs:22:8: 22:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "fourth", - span: $DIR/inner-attrs.rs:21:30: 21:36 (#0), + span: $DIR/inner-attrs.rs:22:30: 22:36 (#0), }, ], - span: $DIR/inner-attrs.rs:21:29: 21:37 (#0), + span: $DIR/inner-attrs.rs:22:29: 22:37 (#0), }, ], - span: $DIR/inner-attrs.rs:21:7: 21:38 (#0), + span: $DIR/inner-attrs.rs:22:7: 22:38 (#0), }, ], - span: $DIR/inner-attrs.rs:19:10: 22:2 (#0), + span: $DIR/inner-attrs.rs:20:10: 23:2 (#0), }, ] PRINT-ATTR_ARGS INPUT (DISPLAY): second PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "second", - span: $DIR/inner-attrs.rs:18:25: 18:31 (#0), + span: $DIR/inner-attrs.rs:19:25: 19:31 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): fn foo() @@ -129,16 +129,16 @@ PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): fn foo() PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "fn", - span: $DIR/inner-attrs.rs:19:1: 19:3 (#0), + span: $DIR/inner-attrs.rs:20:1: 20:3 (#0), }, Ident { ident: "foo", - span: $DIR/inner-attrs.rs:19:4: 19:7 (#0), + span: $DIR/inner-attrs.rs:20:4: 20:7 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/inner-attrs.rs:19:7: 19:9 (#0), + span: $DIR/inner-attrs.rs:20:7: 20:9 (#0), }, Group { delimiter: Brace, @@ -146,72 +146,72 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:20:5: 20:6 (#0), + span: $DIR/inner-attrs.rs:21:5: 21:6 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:20:6: 20:7 (#0), + span: $DIR/inner-attrs.rs:21:6: 21:7 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:20:8: 20:29 (#0), + span: $DIR/inner-attrs.rs:21:8: 21:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "third", - span: $DIR/inner-attrs.rs:20:30: 20:35 (#0), + span: $DIR/inner-attrs.rs:21:30: 21:35 (#0), }, ], - span: $DIR/inner-attrs.rs:20:29: 20:36 (#0), + span: $DIR/inner-attrs.rs:21:29: 21:36 (#0), }, ], - span: $DIR/inner-attrs.rs:20:7: 20:37 (#0), + span: $DIR/inner-attrs.rs:21:7: 21:37 (#0), }, Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:21:5: 21:6 (#0), + span: $DIR/inner-attrs.rs:22:5: 22:6 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:21:6: 21:7 (#0), + span: $DIR/inner-attrs.rs:22:6: 22:7 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:21:8: 21:29 (#0), + span: $DIR/inner-attrs.rs:22:8: 22:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "fourth", - span: $DIR/inner-attrs.rs:21:30: 21:36 (#0), + span: $DIR/inner-attrs.rs:22:30: 22:36 (#0), }, ], - span: $DIR/inner-attrs.rs:21:29: 21:37 (#0), + span: $DIR/inner-attrs.rs:22:29: 22:37 (#0), }, ], - span: $DIR/inner-attrs.rs:21:7: 21:38 (#0), + span: $DIR/inner-attrs.rs:22:7: 22:38 (#0), }, ], - span: $DIR/inner-attrs.rs:19:10: 22:2 (#0), + span: $DIR/inner-attrs.rs:20:10: 23:2 (#0), }, ] PRINT-ATTR_ARGS INPUT (DISPLAY): third PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "third", - span: $DIR/inner-attrs.rs:20:30: 20:35 (#0), + span: $DIR/inner-attrs.rs:21:30: 21:35 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): fn foo() { #![print_target_and_args(fourth)] } @@ -219,16 +219,16 @@ PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): fn foo() { #! [print_target_and_args(fou PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "fn", - span: $DIR/inner-attrs.rs:19:1: 19:3 (#0), + span: $DIR/inner-attrs.rs:20:1: 20:3 (#0), }, Ident { ident: "foo", - span: $DIR/inner-attrs.rs:19:4: 19:7 (#0), + span: $DIR/inner-attrs.rs:20:4: 20:7 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/inner-attrs.rs:19:7: 19:9 (#0), + span: $DIR/inner-attrs.rs:20:7: 20:9 (#0), }, Group { delimiter: Brace, @@ -236,70 +236,70 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:21:5: 21:6 (#0), + span: $DIR/inner-attrs.rs:22:5: 22:6 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:21:6: 21:7 (#0), + span: $DIR/inner-attrs.rs:22:6: 22:7 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:21:8: 21:29 (#0), + span: $DIR/inner-attrs.rs:22:8: 22:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "fourth", - span: $DIR/inner-attrs.rs:21:30: 21:36 (#0), + span: $DIR/inner-attrs.rs:22:30: 22:36 (#0), }, ], - span: $DIR/inner-attrs.rs:21:29: 21:37 (#0), + span: $DIR/inner-attrs.rs:22:29: 22:37 (#0), }, ], - span: $DIR/inner-attrs.rs:21:7: 21:38 (#0), + span: $DIR/inner-attrs.rs:22:7: 22:38 (#0), }, ], - span: $DIR/inner-attrs.rs:19:10: 22:2 (#0), + span: $DIR/inner-attrs.rs:20:10: 23:2 (#0), }, ] PRINT-ATTR_ARGS INPUT (DISPLAY): fourth PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "fourth", - span: $DIR/inner-attrs.rs:21:30: 21:36 (#0), + span: $DIR/inner-attrs.rs:22:30: 22:36 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): fn foo() {} PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "fn", - span: $DIR/inner-attrs.rs:19:1: 19:3 (#0), + span: $DIR/inner-attrs.rs:20:1: 20:3 (#0), }, Ident { ident: "foo", - span: $DIR/inner-attrs.rs:19:4: 19:7 (#0), + span: $DIR/inner-attrs.rs:20:4: 20:7 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/inner-attrs.rs:19:7: 19:9 (#0), + span: $DIR/inner-attrs.rs:20:7: 20:9 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/inner-attrs.rs:19:10: 22:2 (#0), + span: $DIR/inner-attrs.rs:20:10: 23:2 (#0), }, ] PRINT-ATTR_ARGS INPUT (DISPLAY): mod_first PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "mod_first", - span: $DIR/inner-attrs.rs:24:25: 24:34 (#0), + span: $DIR/inner-attrs.rs:25:25: 25:34 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): #[print_target_and_args(mod_second)] mod inline_mod @@ -313,35 +313,35 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/inner-attrs.rs:25:1: 25:2 (#0), + span: $DIR/inner-attrs.rs:26:1: 26:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:25:3: 25:24 (#0), + span: $DIR/inner-attrs.rs:26:3: 26:24 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "mod_second", - span: $DIR/inner-attrs.rs:25:25: 25:35 (#0), + span: $DIR/inner-attrs.rs:26:25: 26:35 (#0), }, ], - span: $DIR/inner-attrs.rs:25:24: 25:36 (#0), + span: $DIR/inner-attrs.rs:26:24: 26:36 (#0), }, ], - span: $DIR/inner-attrs.rs:25:2: 25:37 (#0), + span: $DIR/inner-attrs.rs:26:2: 26:37 (#0), }, Ident { ident: "mod", - span: $DIR/inner-attrs.rs:26:1: 26:4 (#0), + span: $DIR/inner-attrs.rs:27:1: 27:4 (#0), }, Ident { ident: "inline_mod", - span: $DIR/inner-attrs.rs:26:5: 26:15 (#0), + span: $DIR/inner-attrs.rs:27:5: 27:15 (#0), }, Group { delimiter: Brace, @@ -349,72 +349,72 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:27:5: 27:6 (#0), + span: $DIR/inner-attrs.rs:28:5: 28:6 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:27:6: 27:7 (#0), + span: $DIR/inner-attrs.rs:28:6: 28:7 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:27:8: 27:29 (#0), + span: $DIR/inner-attrs.rs:28:8: 28:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "mod_third", - span: $DIR/inner-attrs.rs:27:30: 27:39 (#0), + span: $DIR/inner-attrs.rs:28:30: 28:39 (#0), }, ], - span: $DIR/inner-attrs.rs:27:29: 27:40 (#0), + span: $DIR/inner-attrs.rs:28:29: 28:40 (#0), }, ], - span: $DIR/inner-attrs.rs:27:7: 27:41 (#0), + span: $DIR/inner-attrs.rs:28:7: 28:41 (#0), }, Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:28:5: 28:6 (#0), + span: $DIR/inner-attrs.rs:29:5: 29:6 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:28:6: 28:7 (#0), + span: $DIR/inner-attrs.rs:29:6: 29:7 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:28:8: 28:29 (#0), + span: $DIR/inner-attrs.rs:29:8: 29:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "mod_fourth", - span: $DIR/inner-attrs.rs:28:30: 28:40 (#0), + span: $DIR/inner-attrs.rs:29:30: 29:40 (#0), }, ], - span: $DIR/inner-attrs.rs:28:29: 28:41 (#0), + span: $DIR/inner-attrs.rs:29:29: 29:41 (#0), }, ], - span: $DIR/inner-attrs.rs:28:7: 28:42 (#0), + span: $DIR/inner-attrs.rs:29:7: 29:42 (#0), }, ], - span: $DIR/inner-attrs.rs:26:16: 29:2 (#0), + span: $DIR/inner-attrs.rs:27:16: 30:2 (#0), }, ] PRINT-ATTR_ARGS INPUT (DISPLAY): mod_second PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "mod_second", - span: $DIR/inner-attrs.rs:25:25: 25:35 (#0), + span: $DIR/inner-attrs.rs:26:25: 26:35 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): mod inline_mod @@ -427,11 +427,11 @@ PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): mod inline_mod PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "mod", - span: $DIR/inner-attrs.rs:26:1: 26:4 (#0), + span: $DIR/inner-attrs.rs:27:1: 27:4 (#0), }, Ident { ident: "inline_mod", - span: $DIR/inner-attrs.rs:26:5: 26:15 (#0), + span: $DIR/inner-attrs.rs:27:5: 27:15 (#0), }, Group { delimiter: Brace, @@ -439,72 +439,72 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:27:5: 27:6 (#0), + span: $DIR/inner-attrs.rs:28:5: 28:6 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:27:6: 27:7 (#0), + span: $DIR/inner-attrs.rs:28:6: 28:7 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:27:8: 27:29 (#0), + span: $DIR/inner-attrs.rs:28:8: 28:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "mod_third", - span: $DIR/inner-attrs.rs:27:30: 27:39 (#0), + span: $DIR/inner-attrs.rs:28:30: 28:39 (#0), }, ], - span: $DIR/inner-attrs.rs:27:29: 27:40 (#0), + span: $DIR/inner-attrs.rs:28:29: 28:40 (#0), }, ], - span: $DIR/inner-attrs.rs:27:7: 27:41 (#0), + span: $DIR/inner-attrs.rs:28:7: 28:41 (#0), }, Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:28:5: 28:6 (#0), + span: $DIR/inner-attrs.rs:29:5: 29:6 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:28:6: 28:7 (#0), + span: $DIR/inner-attrs.rs:29:6: 29:7 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:28:8: 28:29 (#0), + span: $DIR/inner-attrs.rs:29:8: 29:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "mod_fourth", - span: $DIR/inner-attrs.rs:28:30: 28:40 (#0), + span: $DIR/inner-attrs.rs:29:30: 29:40 (#0), }, ], - span: $DIR/inner-attrs.rs:28:29: 28:41 (#0), + span: $DIR/inner-attrs.rs:29:29: 29:41 (#0), }, ], - span: $DIR/inner-attrs.rs:28:7: 28:42 (#0), + span: $DIR/inner-attrs.rs:29:7: 29:42 (#0), }, ], - span: $DIR/inner-attrs.rs:26:16: 29:2 (#0), + span: $DIR/inner-attrs.rs:27:16: 30:2 (#0), }, ] PRINT-ATTR_ARGS INPUT (DISPLAY): mod_third PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "mod_third", - span: $DIR/inner-attrs.rs:27:30: 27:39 (#0), + span: $DIR/inner-attrs.rs:28:30: 28:39 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): mod inline_mod { #![print_target_and_args(mod_fourth)] } @@ -512,11 +512,11 @@ PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): mod inline_mod { #! [print_target_and_ar PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "mod", - span: $DIR/inner-attrs.rs:26:1: 26:4 (#0), + span: $DIR/inner-attrs.rs:27:1: 27:4 (#0), }, Ident { ident: "inline_mod", - span: $DIR/inner-attrs.rs:26:5: 26:15 (#0), + span: $DIR/inner-attrs.rs:27:5: 27:15 (#0), }, Group { delimiter: Brace, @@ -524,58 +524,58 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:28:5: 28:6 (#0), + span: $DIR/inner-attrs.rs:29:5: 29:6 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:28:6: 28:7 (#0), + span: $DIR/inner-attrs.rs:29:6: 29:7 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_target_and_args", - span: $DIR/inner-attrs.rs:28:8: 28:29 (#0), + span: $DIR/inner-attrs.rs:29:8: 29:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "mod_fourth", - span: $DIR/inner-attrs.rs:28:30: 28:40 (#0), + span: $DIR/inner-attrs.rs:29:30: 29:40 (#0), }, ], - span: $DIR/inner-attrs.rs:28:29: 28:41 (#0), + span: $DIR/inner-attrs.rs:29:29: 29:41 (#0), }, ], - span: $DIR/inner-attrs.rs:28:7: 28:42 (#0), + span: $DIR/inner-attrs.rs:29:7: 29:42 (#0), }, ], - span: $DIR/inner-attrs.rs:26:16: 29:2 (#0), + span: $DIR/inner-attrs.rs:27:16: 30:2 (#0), }, ] PRINT-ATTR_ARGS INPUT (DISPLAY): mod_fourth PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "mod_fourth", - span: $DIR/inner-attrs.rs:28:30: 28:40 (#0), + span: $DIR/inner-attrs.rs:29:30: 29:40 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): mod inline_mod {} PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "mod", - span: $DIR/inner-attrs.rs:26:1: 26:4 (#0), + span: $DIR/inner-attrs.rs:27:1: 27:4 (#0), }, Ident { ident: "inline_mod", - span: $DIR/inner-attrs.rs:26:5: 26:15 (#0), + span: $DIR/inner-attrs.rs:27:5: 27:15 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/inner-attrs.rs:26:16: 29:2 (#0), + span: $DIR/inner-attrs.rs:27:16: 30:2 (#0), }, ] PRINT-DERIVE INPUT (DISPLAY): struct MyDerivePrint @@ -588,63 +588,63 @@ PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): struct MyDerivePrint PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/inner-attrs.rs:36:1: 36:7 (#0), + span: $DIR/inner-attrs.rs:37:1: 37:7 (#0), }, Ident { ident: "MyDerivePrint", - span: $DIR/inner-attrs.rs:36:8: 36:21 (#0), + span: $DIR/inner-attrs.rs:37:8: 37:21 (#0), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "field", - span: $DIR/inner-attrs.rs:37:5: 37:10 (#0), + span: $DIR/inner-attrs.rs:38:5: 38:10 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/inner-attrs.rs:37:10: 37:11 (#0), + span: $DIR/inner-attrs.rs:38:10: 38:11 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "u8", - span: $DIR/inner-attrs.rs:37:13: 37:15 (#0), + span: $DIR/inner-attrs.rs:38:13: 38:15 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/inner-attrs.rs:37:15: 37:16 (#0), + span: $DIR/inner-attrs.rs:38:15: 38:16 (#0), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "match", - span: $DIR/inner-attrs.rs:38:9: 38:14 (#0), + span: $DIR/inner-attrs.rs:39:9: 39:14 (#0), }, Ident { ident: "true", - span: $DIR/inner-attrs.rs:38:15: 38:19 (#0), + span: $DIR/inner-attrs.rs:39:15: 39:19 (#0), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "_", - span: $DIR/inner-attrs.rs:39:13: 39:14 (#0), + span: $DIR/inner-attrs.rs:40:13: 40:14 (#0), }, Punct { ch: '=', spacing: Joint, - span: $DIR/inner-attrs.rs:39:15: 39:16 (#0), + span: $DIR/inner-attrs.rs:40:15: 40:16 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/inner-attrs.rs:39:16: 39:17 (#0), + span: $DIR/inner-attrs.rs:40:16: 40:17 (#0), }, Group { delimiter: Brace, @@ -652,69 +652,69 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:40:17: 40:18 (#0), + span: $DIR/inner-attrs.rs:41:17: 41:18 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:40:18: 40:19 (#0), + span: $DIR/inner-attrs.rs:41:18: 41:19 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "rustc_dummy", - span: $DIR/inner-attrs.rs:40:41: 40:52 (#0), + span: $DIR/inner-attrs.rs:41:41: 41:52 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "third", - span: $DIR/inner-attrs.rs:40:53: 40:58 (#0), + span: $DIR/inner-attrs.rs:41:53: 41:58 (#0), }, ], - span: $DIR/inner-attrs.rs:40:52: 40:59 (#0), + span: $DIR/inner-attrs.rs:41:52: 41:59 (#0), }, ], - span: $DIR/inner-attrs.rs:40:17: 40:18 (#0), + span: $DIR/inner-attrs.rs:41:17: 41:18 (#0), }, Ident { ident: "true", - span: $DIR/inner-attrs.rs:41:17: 41:21 (#0), + span: $DIR/inner-attrs.rs:42:17: 42:21 (#0), }, ], - span: $DIR/inner-attrs.rs:39:18: 42:14 (#0), + span: $DIR/inner-attrs.rs:40:18: 43:14 (#0), }, ], - span: $DIR/inner-attrs.rs:38:20: 43:10 (#0), + span: $DIR/inner-attrs.rs:39:20: 44:10 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/inner-attrs.rs:43:10: 43:11 (#0), + span: $DIR/inner-attrs.rs:44:10: 44:11 (#0), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: $DIR/inner-attrs.rs:44:9: 44:10 (#0), + span: $DIR/inner-attrs.rs:45:9: 45:10 (#0), }, ], - span: $DIR/inner-attrs.rs:37:17: 45:6 (#0), + span: $DIR/inner-attrs.rs:38:17: 46:6 (#0), }, ], - span: $DIR/inner-attrs.rs:37:12: 45:7 (#0), + span: $DIR/inner-attrs.rs:38:12: 46:7 (#0), }, ], - span: $DIR/inner-attrs.rs:36:22: 46:2 (#0), + span: $DIR/inner-attrs.rs:37:22: 47:2 (#0), }, ] PRINT-ATTR_ARGS INPUT (DISPLAY): tuple_attrs PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "tuple_attrs", - span: $DIR/inner-attrs.rs:49:29: 49:40 (#0), + span: $DIR/inner-attrs.rs:50:29: 50:40 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }); @@ -728,23 +728,23 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ kind: Integer, symbol: "3", suffix: None, - span: $DIR/inner-attrs.rs:50:9: 50:10 (#0), + span: $DIR/inner-attrs.rs:51:9: 51:10 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/inner-attrs.rs:50:10: 50:11 (#0), + span: $DIR/inner-attrs.rs:51:10: 51:11 (#0), }, Literal { kind: Integer, symbol: "4", suffix: None, - span: $DIR/inner-attrs.rs:50:12: 50:13 (#0), + span: $DIR/inner-attrs.rs:51:12: 51:13 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/inner-attrs.rs:50:13: 50:14 (#0), + span: $DIR/inner-attrs.rs:51:13: 51:14 (#0), }, Group { delimiter: Brace, @@ -752,85 +752,85 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:51:13: 51:14 (#0), + span: $DIR/inner-attrs.rs:52:13: 52:14 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:51:14: 51:15 (#0), + span: $DIR/inner-attrs.rs:52:14: 52:15 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg_attr", - span: $DIR/inner-attrs.rs:51:16: 51:24 (#0), + span: $DIR/inner-attrs.rs:52:16: 52:24 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/inner-attrs.rs:51:25: 51:28 (#0), + span: $DIR/inner-attrs.rs:52:25: 52:28 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/inner-attrs.rs:51:29: 51:34 (#0), + span: $DIR/inner-attrs.rs:52:29: 52:34 (#0), }, ], - span: $DIR/inner-attrs.rs:51:28: 51:35 (#0), + span: $DIR/inner-attrs.rs:52:28: 52:35 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/inner-attrs.rs:51:35: 51:36 (#0), + span: $DIR/inner-attrs.rs:52:35: 52:36 (#0), }, Ident { ident: "rustc_dummy", - span: $DIR/inner-attrs.rs:51:37: 51:48 (#0), + span: $DIR/inner-attrs.rs:52:37: 52:48 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "innermost", - span: $DIR/inner-attrs.rs:51:49: 51:58 (#0), + span: $DIR/inner-attrs.rs:52:49: 52:58 (#0), }, ], - span: $DIR/inner-attrs.rs:51:48: 51:59 (#0), + span: $DIR/inner-attrs.rs:52:48: 52:59 (#0), }, ], - span: $DIR/inner-attrs.rs:51:24: 51:60 (#0), + span: $DIR/inner-attrs.rs:52:24: 52:60 (#0), }, ], - span: $DIR/inner-attrs.rs:51:15: 51:61 (#0), + span: $DIR/inner-attrs.rs:52:15: 52:61 (#0), }, Literal { kind: Integer, symbol: "5", suffix: None, - span: $DIR/inner-attrs.rs:52:13: 52:14 (#0), + span: $DIR/inner-attrs.rs:53:13: 53:14 (#0), }, ], - span: $DIR/inner-attrs.rs:50:15: 53:10 (#0), + span: $DIR/inner-attrs.rs:51:15: 54:10 (#0), }, ], - span: $DIR/inner-attrs.rs:49:43: 54:6 (#0), + span: $DIR/inner-attrs.rs:50:43: 55:6 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/inner-attrs.rs:54:6: 54:7 (#0), + span: $DIR/inner-attrs.rs:55:6: 55:7 (#0), }, ] PRINT-ATTR_ARGS INPUT (DISPLAY): tuple_attrs PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "tuple_attrs", - span: $DIR/inner-attrs.rs:56:29: 56:40 (#0), + span: $DIR/inner-attrs.rs:57:29: 57:40 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }); @@ -844,23 +844,23 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ kind: Integer, symbol: "3", suffix: None, - span: $DIR/inner-attrs.rs:57:9: 57:10 (#0), + span: $DIR/inner-attrs.rs:58:9: 58:10 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/inner-attrs.rs:57:10: 57:11 (#0), + span: $DIR/inner-attrs.rs:58:10: 58:11 (#0), }, Literal { kind: Integer, symbol: "4", suffix: None, - span: $DIR/inner-attrs.rs:57:12: 57:13 (#0), + span: $DIR/inner-attrs.rs:58:12: 58:13 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/inner-attrs.rs:57:13: 57:14 (#0), + span: $DIR/inner-attrs.rs:58:13: 58:14 (#0), }, Group { delimiter: Brace, @@ -868,105 +868,105 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/inner-attrs.rs:58:13: 58:14 (#0), + span: $DIR/inner-attrs.rs:59:13: 59:14 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/inner-attrs.rs:58:14: 58:15 (#0), + span: $DIR/inner-attrs.rs:59:14: 59:15 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg_attr", - span: $DIR/inner-attrs.rs:58:16: 58:24 (#0), + span: $DIR/inner-attrs.rs:59:16: 59:24 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/inner-attrs.rs:58:25: 58:28 (#0), + span: $DIR/inner-attrs.rs:59:25: 59:28 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/inner-attrs.rs:58:29: 58:34 (#0), + span: $DIR/inner-attrs.rs:59:29: 59:34 (#0), }, ], - span: $DIR/inner-attrs.rs:58:28: 58:35 (#0), + span: $DIR/inner-attrs.rs:59:28: 59:35 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/inner-attrs.rs:58:35: 58:36 (#0), + span: $DIR/inner-attrs.rs:59:35: 59:36 (#0), }, Ident { ident: "rustc_dummy", - span: $DIR/inner-attrs.rs:58:37: 58:48 (#0), + span: $DIR/inner-attrs.rs:59:37: 59:48 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "innermost", - span: $DIR/inner-attrs.rs:58:49: 58:58 (#0), + span: $DIR/inner-attrs.rs:59:49: 59:58 (#0), }, ], - span: $DIR/inner-attrs.rs:58:48: 58:59 (#0), + span: $DIR/inner-attrs.rs:59:48: 59:59 (#0), }, ], - span: $DIR/inner-attrs.rs:58:24: 58:60 (#0), + span: $DIR/inner-attrs.rs:59:24: 59:60 (#0), }, ], - span: $DIR/inner-attrs.rs:58:15: 58:61 (#0), + span: $DIR/inner-attrs.rs:59:15: 59:61 (#0), }, Literal { kind: Integer, symbol: "5", suffix: None, - span: $DIR/inner-attrs.rs:59:13: 59:14 (#0), + span: $DIR/inner-attrs.rs:60:13: 60:14 (#0), }, ], - span: $DIR/inner-attrs.rs:57:15: 60:10 (#0), + span: $DIR/inner-attrs.rs:58:15: 61:10 (#0), }, ], - span: $DIR/inner-attrs.rs:56:43: 61:6 (#0), + span: $DIR/inner-attrs.rs:57:43: 62:6 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/inner-attrs.rs:61:6: 61:7 (#0), + span: $DIR/inner-attrs.rs:62:6: 62:7 (#0), }, ] PRINT-ATTR_ARGS INPUT (DISPLAY): tenth PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "tenth", - span: $DIR/inner-attrs.rs:83:42: 83:47 (#0), + span: $DIR/inner-attrs.rs:84:42: 84:47 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): fn weird_extern() {} PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "fn", - span: $DIR/inner-attrs.rs:82:5: 82:7 (#0), + span: $DIR/inner-attrs.rs:83:5: 83:7 (#0), }, Ident { ident: "weird_extern", - span: $DIR/inner-attrs.rs:82:8: 82:20 (#0), + span: $DIR/inner-attrs.rs:83:8: 83:20 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/inner-attrs.rs:82:20: 82:22 (#0), + span: $DIR/inner-attrs.rs:83:20: 83:22 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/inner-attrs.rs:82:23: 84:6 (#0), + span: $DIR/inner-attrs.rs:83:23: 85:6 (#0), }, ] diff --git a/tests/ui/proc-macro/issue-75930-derive-cfg.rs b/tests/ui/proc-macro/issue-75930-derive-cfg.rs index e0213527c5005..1e37b40c9540f 100644 --- a/tests/ui/proc-macro/issue-75930-derive-cfg.rs +++ b/tests/ui/proc-macro/issue-75930-derive-cfg.rs @@ -1,13 +1,10 @@ // check-pass -// compile-flags: -Z span-debug --error-format human +// compile-flags: -Z span-debug // aux-build:test-macros.rs // Regression test for issue #75930 // Tests that we cfg-strip all targets before invoking // a derive macro -// We need '--error-format human' to stop compiletest from -// trying to interpret proc-macro output as JSON messages -// (a pretty-printed struct may cause a line to start with '{' ) // FIXME: We currently lose spans here (see issue #43081) #![no_std] // Don't load unnecessary hygiene information from std @@ -47,6 +44,8 @@ extern crate test_macros; // that kind of correction caused the problem seen in #76399, so maybe not. #[print_helper(a)] //~ WARN derive helper attribute is used before it is introduced + //~| WARN derive helper attribute is used before it is introduced + //~| WARN this was previously accepted //~| WARN this was previously accepted #[cfg_attr(not(FALSE), allow(dead_code))] #[print_attr] diff --git a/tests/ui/proc-macro/issue-75930-derive-cfg.stderr b/tests/ui/proc-macro/issue-75930-derive-cfg.stderr index 1017745de6f2b..df1e36d739080 100644 --- a/tests/ui/proc-macro/issue-75930-derive-cfg.stderr +++ b/tests/ui/proc-macro/issue-75930-derive-cfg.stderr @@ -1,5 +1,5 @@ warning: derive helper attribute is used before it is introduced - --> $DIR/issue-75930-derive-cfg.rs:49:3 + --> $DIR/issue-75930-derive-cfg.rs:46:3 | LL | #[print_helper(a)] | ^^^^^^^^^^^^ @@ -12,7 +12,7 @@ LL | #[derive(Print)] = note: `#[warn(legacy_derive_helpers)]` on by default warning: derive helper attribute is used before it is introduced - --> $DIR/issue-75930-derive-cfg.rs:49:3 + --> $DIR/issue-75930-derive-cfg.rs:46:3 | LL | #[print_helper(a)] | ^^^^^^^^^^^^ diff --git a/tests/ui/proc-macro/issue-75930-derive-cfg.stdout b/tests/ui/proc-macro/issue-75930-derive-cfg.stdout index 47f26451d1c53..093e37f8a8d0e 100644 --- a/tests/ui/proc-macro/issue-75930-derive-cfg.stdout +++ b/tests/ui/proc-macro/issue-75930-derive-cfg.stdout @@ -74,158 +74,158 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:49:1: 49:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:1: 46:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:49:3: 49:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:3: 46:15 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "a", - span: $DIR/issue-75930-derive-cfg.rs:49:16: 49:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:16: 46:17 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:49:15: 49:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:15: 46:18 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:49:2: 49:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:2: 46:19 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:51:1: 51:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:50:1: 50:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "allow", - span: $DIR/issue-75930-derive-cfg.rs:51:24: 51:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:50:24: 50:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "dead_code", - span: $DIR/issue-75930-derive-cfg.rs:51:30: 51:39 (#0), + span: $DIR/issue-75930-derive-cfg.rs:50:30: 50:39 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:51:29: 51:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:50:29: 50:40 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:51:1: 51:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:50:1: 50:2 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:53:1: 53:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:52:1: 52:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "derive", - span: $DIR/issue-75930-derive-cfg.rs:53:3: 53:9 (#0), + span: $DIR/issue-75930-derive-cfg.rs:52:3: 52:9 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "Print", - span: $DIR/issue-75930-derive-cfg.rs:53:10: 53:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:52:10: 52:15 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:53:9: 53:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:52:9: 52:16 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:53:2: 53:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:52:2: 52:17 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:54:1: 54:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:1: 53:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:54:3: 54:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:3: 53:15 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "b", - span: $DIR/issue-75930-derive-cfg.rs:54:16: 54:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:16: 53:17 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:54:15: 54:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:15: 53:18 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:54:2: 54:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:2: 53:19 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:55:1: 55:7 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:1: 54:7 (#0), }, Ident { ident: "Foo", - span: $DIR/issue-75930-derive-cfg.rs:55:8: 55:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:8: 54:11 (#0), }, Punct { ch: '<', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:55:11: 55:12 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:11: 54:12 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:55:12: 55:13 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:12: 54:13 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:55:14: 55:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:14: 54:17 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:55:18: 55:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:18: 54:23 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:55:17: 55:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:17: 54:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:55:13: 55:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:13: 54:25 (#0), }, Ident { ident: "A", - span: $DIR/issue-75930-derive-cfg.rs:55:26: 55:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:26: 54:27 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:55:27: 55:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:27: 54:28 (#0), }, Ident { ident: "B", - span: $DIR/issue-75930-derive-cfg.rs:55:29: 55:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:29: 54:30 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:55:30: 55:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:30: 54:31 (#0), }, Group { delimiter: Brace, @@ -233,128 +233,128 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:56:5: 56:6 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:5: 55:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:56:7: 56:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:7: 55:10 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:56:11: 56:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:11: 55:16 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:56:10: 56:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:10: 55:17 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:56:6: 56:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:6: 55:18 (#0), }, Ident { ident: "first", - span: $DIR/issue-75930-derive-cfg.rs:56:19: 56:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:19: 55:24 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:56:24: 56:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:24: 55:25 (#0), }, Ident { ident: "String", - span: $DIR/issue-75930-derive-cfg.rs:56:26: 56:32 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:26: 55:32 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:56:32: 56:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:55:32: 55:33 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:57:5: 57:6 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:5: 56:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg_attr", - span: $DIR/issue-75930-derive-cfg.rs:57:7: 57:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:7: 56:15 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:57:16: 57:21 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:16: 56:21 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:57:21: 57:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:21: 56:22 (#0), }, Ident { ident: "deny", - span: $DIR/issue-75930-derive-cfg.rs:57:23: 57:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:23: 56:27 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "warnings", - span: $DIR/issue-75930-derive-cfg.rs:57:28: 57:36 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:28: 56:36 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:57:27: 57:37 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:27: 56:37 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:57:15: 57:38 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:15: 56:38 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:57:6: 57:39 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:6: 56:39 (#0), }, Ident { ident: "second", - span: $DIR/issue-75930-derive-cfg.rs:57:40: 57:46 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:40: 56:46 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:57:46: 57:47 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:46: 56:47 (#0), }, Ident { ident: "bool", - span: $DIR/issue-75930-derive-cfg.rs:57:48: 57:52 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:48: 56:52 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:57:52: 57:53 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:52: 56:53 (#0), }, Ident { ident: "third", - span: $DIR/issue-75930-derive-cfg.rs:58:5: 58:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:5: 57:10 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:58:10: 58:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:10: 57:11 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:58:13: 58:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:13: 57:15 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:58:15: 58:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:15: 57:16 (#0), }, Group { delimiter: Brace, @@ -362,145 +362,145 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:59:9: 59:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:58:9: 58:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:59:11: 59:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:58:11: 58:14 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:59:15: 59:20 (#0), + span: $DIR/issue-75930-derive-cfg.rs:58:15: 58:20 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:59:14: 59:21 (#0), + span: $DIR/issue-75930-derive-cfg.rs:58:14: 58:21 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:59:10: 59:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:58:10: 58:22 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:59:23: 59:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:58:23: 58:29 (#0), }, Ident { ident: "Bar", - span: $DIR/issue-75930-derive-cfg.rs:59:30: 59:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:58:30: 58:33 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:59:33: 59:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:58:33: 58:34 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:60:9: 60:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:9: 59:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:60:11: 60:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:11: 59:14 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:60:15: 60:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:15: 59:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:60:19: 60:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:19: 59:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:60:18: 60:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:18: 59:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:60:14: 60:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:14: 59:26 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:60:10: 60:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:10: 59:27 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:60:28: 60:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:28: 59:34 (#0), }, Ident { ident: "Inner", - span: $DIR/issue-75930-derive-cfg.rs:60:35: 60:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:35: 59:40 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:60:40: 60:41 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:40: 59:41 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:61:9: 61:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:60:9: 60:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:61:11: 61:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:60:11: 60:14 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:61:15: 61:20 (#0), + span: $DIR/issue-75930-derive-cfg.rs:60:15: 60:20 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:61:14: 61:21 (#0), + span: $DIR/issue-75930-derive-cfg.rs:60:14: 60:21 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:61:10: 61:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:60:10: 60:22 (#0), }, Ident { ident: "let", - span: $DIR/issue-75930-derive-cfg.rs:61:23: 61:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:60:23: 60:26 (#0), }, Ident { ident: "a", - span: $DIR/issue-75930-derive-cfg.rs:61:27: 61:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:60:27: 60:28 (#0), }, Punct { ch: '=', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:61:29: 61:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:60:29: 60:30 (#0), }, Literal { kind: Integer, symbol: "25", suffix: None, - span: $DIR/issue-75930-derive-cfg.rs:61:31: 61:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:60:31: 60:33 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:61:33: 61:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:60:33: 60:34 (#0), }, Ident { ident: "match", - span: $DIR/issue-75930-derive-cfg.rs:62:9: 62:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:61:9: 61:14 (#0), }, Ident { ident: "true", - span: $DIR/issue-75930-derive-cfg.rs:62:15: 62:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:61:15: 61:19 (#0), }, Group { delimiter: Brace, @@ -508,194 +508,194 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:63:13: 63:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:62:13: 62:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:63:15: 63:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:62:15: 62:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:63:19: 63:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:62:19: 62:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:63:18: 63:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:62:18: 62:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:63:14: 63:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:62:14: 62:26 (#0), }, Ident { ident: "true", - span: $DIR/issue-75930-derive-cfg.rs:63:27: 63:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:62:27: 62:31 (#0), }, Punct { ch: '=', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:63:32: 63:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:62:32: 62:33 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:63:33: 63:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:62:33: 62:34 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:63:35: 63:37 (#0), + span: $DIR/issue-75930-derive-cfg.rs:62:35: 62:37 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:63:37: 63:38 (#0), + span: $DIR/issue-75930-derive-cfg.rs:62:37: 62:38 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:64:13: 64:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:13: 63:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg_attr", - span: $DIR/issue-75930-derive-cfg.rs:64:15: 64:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:15: 63:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:64:24: 64:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:24: 63:27 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:64:28: 64:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:28: 63:33 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:64:27: 64:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:27: 63:34 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:64:34: 64:35 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:34: 63:35 (#0), }, Ident { ident: "allow", - span: $DIR/issue-75930-derive-cfg.rs:64:36: 64:41 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:36: 63:41 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "warnings", - span: $DIR/issue-75930-derive-cfg.rs:64:42: 64:50 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:42: 63:50 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:64:41: 64:51 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:41: 63:51 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:64:23: 64:52 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:23: 63:52 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:64:14: 64:53 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:14: 63:53 (#0), }, Ident { ident: "false", - span: $DIR/issue-75930-derive-cfg.rs:64:54: 64:59 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:54: 63:59 (#0), }, Punct { ch: '=', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:64:60: 64:61 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:60: 63:61 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:64:61: 64:62 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:61: 63:62 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:64:63: 64:65 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:63: 63:65 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:64:65: 64:66 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:65: 63:66 (#0), }, Ident { ident: "_", - span: $DIR/issue-75930-derive-cfg.rs:65:13: 65:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:64:13: 64:14 (#0), }, Punct { ch: '=', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:65:15: 65:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:64:15: 64:16 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:65:16: 65:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:64:16: 64:17 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:65:18: 65:20 (#0), + span: $DIR/issue-75930-derive-cfg.rs:64:18: 64:20 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:62:20: 66:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:61:20: 65:10 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:66:10: 66:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:65:10: 65:11 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:68:9: 68:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:67:9: 67:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:68:11: 68:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:67:11: 67:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "should_be_removed", - span: $DIR/issue-75930-derive-cfg.rs:68:24: 68:41 (#0), + span: $DIR/issue-75930-derive-cfg.rs:67:24: 67:41 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:68:23: 68:42 (#0), + span: $DIR/issue-75930-derive-cfg.rs:67:23: 67:42 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:68:10: 68:43 (#0), + span: $DIR/issue-75930-derive-cfg.rs:67:10: 67:43 (#0), }, Ident { ident: "fn", - span: $DIR/issue-75930-derive-cfg.rs:69:9: 69:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:68:9: 68:11 (#0), }, Ident { ident: "removed_fn", - span: $DIR/issue-75930-derive-cfg.rs:69:12: 69:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:68:12: 68:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:69:22: 69:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:68:22: 68:24 (#0), }, Group { delimiter: Brace, @@ -703,108 +703,108 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:70:13: 70:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:69:13: 69:14 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:70:14: 70:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:69:14: 69:15 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:70:16: 70:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:69:16: 69:19 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:70:20: 70:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:69:20: 69:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:70:19: 70:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:69:19: 69:26 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:70:15: 70:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:69:15: 69:27 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:69:25: 71:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:68:25: 70:10 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:73:9: 73:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:9: 72:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:73:11: 73:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:11: 72:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "c", - span: $DIR/issue-75930-derive-cfg.rs:73:24: 73:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:24: 72:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:23: 73:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:23: 72:26 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:10: 73:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:10: 72:27 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:73:28: 73:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:28: 72:29 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:73:30: 73:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:30: 72:33 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:73:34: 73:37 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:34: 72:37 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:73:38: 73:43 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:38: 72:43 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:37: 73:44 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:37: 72:44 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:33: 73:45 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:33: 72:45 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:29: 73:46 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:29: 72:46 (#0), }, Ident { ident: "fn", - span: $DIR/issue-75930-derive-cfg.rs:73:47: 73:49 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:47: 72:49 (#0), }, Ident { ident: "kept_fn", - span: $DIR/issue-75930-derive-cfg.rs:73:50: 73:57 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:50: 72:57 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:73:57: 73:59 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:57: 72:59 (#0), }, Group { delimiter: Brace, @@ -812,82 +812,82 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:74:13: 74:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:13: 73:14 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:74:14: 74:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:14: 73:15 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:74:16: 74:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:16: 73:19 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:74:20: 74:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:20: 73:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:74:24: 74:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:24: 73:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:74:23: 74:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:23: 73:30 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:74:19: 74:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:19: 73:31 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:74:15: 74:32 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:15: 73:32 (#0), }, Ident { ident: "let", - span: $DIR/issue-75930-derive-cfg.rs:75:13: 75:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:74:13: 74:16 (#0), }, Ident { ident: "my_val", - span: $DIR/issue-75930-derive-cfg.rs:75:17: 75:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:74:17: 74:23 (#0), }, Punct { ch: '=', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:75:24: 75:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:74:24: 74:25 (#0), }, Ident { ident: "true", - span: $DIR/issue-75930-derive-cfg.rs:75:26: 75:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:74:26: 74:30 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:75:30: 75:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:74:30: 74:31 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:60: 76:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:60: 75:10 (#0), }, Ident { ident: "enum", - span: $DIR/issue-75930-derive-cfg.rs:78:9: 78:13 (#0), + span: $DIR/issue-75930-derive-cfg.rs:77:9: 77:13 (#0), }, Ident { ident: "TupleEnum", - span: $DIR/issue-75930-derive-cfg.rs:78:14: 78:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:77:14: 77:23 (#0), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "Foo", - span: $DIR/issue-75930-derive-cfg.rs:79:13: 79:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:78:13: 78:16 (#0), }, Group { delimiter: Parenthesis, @@ -895,166 +895,166 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:80:17: 80:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:79:17: 79:18 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:80:19: 80:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:79:19: 79:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:80:23: 80:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:79:23: 79:28 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:80:22: 80:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:79:22: 79:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:80:18: 80:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:79:18: 79:30 (#0), }, Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:80:31: 80:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:79:31: 79:33 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:80:33: 80:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:79:33: 79:34 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:81:17: 81:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:80:17: 80:18 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:81:19: 81:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:80:19: 80:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:81:23: 81:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:80:23: 80:28 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:81:22: 81:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:80:22: 80:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:81:18: 81:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:80:18: 80:30 (#0), }, Ident { ident: "bool", - span: $DIR/issue-75930-derive-cfg.rs:81:31: 81:35 (#0), + span: $DIR/issue-75930-derive-cfg.rs:80:31: 80:35 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:81:35: 81:36 (#0), + span: $DIR/issue-75930-derive-cfg.rs:80:35: 80:36 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:82:17: 82:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:17: 81:18 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:82:19: 82:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:19: 81:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:82:23: 82:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:23: 81:26 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:82:27: 82:32 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:27: 81:32 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:82:26: 82:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:26: 81:33 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:82:22: 82:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:22: 81:34 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:82:18: 82:35 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:18: 81:35 (#0), }, Ident { ident: "i32", - span: $DIR/issue-75930-derive-cfg.rs:82:36: 82:39 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:36: 81:39 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:82:39: 82:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:39: 81:40 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:83:17: 83:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:82:17: 82:18 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:83:19: 83:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:82:19: 82:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:83:23: 83:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:82:23: 82:28 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:83:22: 83:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:82:22: 82:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:83:18: 83:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:82:18: 82:30 (#0), }, Ident { ident: "String", - span: $DIR/issue-75930-derive-cfg.rs:83:31: 83:37 (#0), + span: $DIR/issue-75930-derive-cfg.rs:82:31: 82:37 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:83:37: 83:38 (#0), + span: $DIR/issue-75930-derive-cfg.rs:82:37: 82:38 (#0), }, Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:83:39: 83:41 (#0), + span: $DIR/issue-75930-derive-cfg.rs:82:39: 82:41 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:79:16: 84:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:78:16: 83:14 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:78:24: 85:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:77:24: 84:10 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:87:9: 87:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:86:9: 86:15 (#0), }, Ident { ident: "TupleStruct", - span: $DIR/issue-75930-derive-cfg.rs:87:16: 87:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:86:16: 86:27 (#0), }, Group { delimiter: Parenthesis, @@ -1062,139 +1062,139 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:88:13: 88:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:87:13: 87:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:88:15: 88:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:87:15: 87:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:88:19: 88:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:87:19: 87:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:88:18: 88:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:87:18: 87:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:88:14: 88:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:87:14: 87:26 (#0), }, Ident { ident: "String", - span: $DIR/issue-75930-derive-cfg.rs:88:27: 88:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:87:27: 87:33 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:88:33: 88:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:87:33: 87:34 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:89:13: 89:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:13: 88:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:89:15: 89:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:15: 88:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:89:19: 89:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:19: 88:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:89:23: 89:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:23: 88:28 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:89:22: 89:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:22: 88:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:89:18: 89:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:18: 88:30 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:89:14: 89:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:14: 88:31 (#0), }, Ident { ident: "i32", - span: $DIR/issue-75930-derive-cfg.rs:89:32: 89:35 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:32: 88:35 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:89:35: 89:36 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:35: 88:36 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:90:13: 90:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:89:13: 89:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:90:15: 90:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:89:15: 89:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:90:19: 90:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:89:19: 89:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:90:18: 90:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:89:18: 89:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:90:14: 90:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:89:14: 89:26 (#0), }, Ident { ident: "bool", - span: $DIR/issue-75930-derive-cfg.rs:90:27: 90:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:89:27: 89:31 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:90:31: 90:32 (#0), + span: $DIR/issue-75930-derive-cfg.rs:89:31: 89:32 (#0), }, Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:91:13: 91:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:90:13: 90:15 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:87:27: 92:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:86:27: 91:10 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:92:10: 92:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:91:10: 91:11 (#0), }, Ident { ident: "fn", - span: $DIR/issue-75930-derive-cfg.rs:94:9: 94:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:93:9: 93:11 (#0), }, Ident { ident: "plain_removed_fn", - span: $DIR/issue-75930-derive-cfg.rs:94:12: 94:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:93:12: 93:28 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:94:28: 94:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:93:28: 93:30 (#0), }, Group { delimiter: Brace, @@ -1202,122 +1202,122 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:95:13: 95:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:13: 94:14 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:95:14: 95:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:14: 94:15 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg_attr", - span: $DIR/issue-75930-derive-cfg.rs:95:16: 95:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:16: 94:24 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:95:25: 95:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:25: 94:28 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:95:29: 95:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:29: 94:34 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:95:28: 95:35 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:28: 94:35 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:95:35: 95:36 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:35: 94:36 (#0), }, Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:95:37: 95:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:37: 94:40 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:95:41: 95:46 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:41: 94:46 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:95:40: 95:47 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:40: 94:47 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:95:24: 95:48 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:24: 94:48 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:95:15: 95:49 (#0), + span: $DIR/issue-75930-derive-cfg.rs:94:15: 94:49 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:94:31: 96:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:93:31: 95:10 (#0), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: $DIR/issue-75930-derive-cfg.rs:98:9: 98:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:97:9: 97:10 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:58:17: 99:6 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:17: 98:6 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:58:12: 99:7 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:12: 98:7 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:99:7: 99:8 (#0), + span: $DIR/issue-75930-derive-cfg.rs:98:7: 98:8 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:100:5: 100:6 (#0), + span: $DIR/issue-75930-derive-cfg.rs:99:5: 99:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:100:7: 100:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:99:7: 99:19 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "d", - span: $DIR/issue-75930-derive-cfg.rs:100:20: 100:21 (#0), + span: $DIR/issue-75930-derive-cfg.rs:99:20: 99:21 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:100:19: 100:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:99:19: 99:22 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:100:6: 100:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:99:6: 99:23 (#0), }, Ident { ident: "fourth", - span: $DIR/issue-75930-derive-cfg.rs:101:5: 101:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:100:5: 100:11 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:101:11: 101:12 (#0), + span: $DIR/issue-75930-derive-cfg.rs:100:11: 100:12 (#0), }, Ident { ident: "B", - span: $DIR/issue-75930-derive-cfg.rs:101:13: 101:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:100:13: 100:14 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:55:32: 102:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:32: 101:2 (#0), }, ] PRINT-DERIVE INPUT (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[print_helper(b)] struct Foo @@ -1350,141 +1350,141 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:49:1: 49:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:1: 46:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:49:3: 49:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:3: 46:15 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "a", - span: $DIR/issue-75930-derive-cfg.rs:49:16: 49:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:16: 46:17 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:49:15: 49:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:15: 46:18 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:49:2: 49:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:46:2: 46:19 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:51:1: 51:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:50:1: 50:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "allow", - span: $DIR/issue-75930-derive-cfg.rs:51:24: 51:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:50:24: 50:29 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "dead_code", - span: $DIR/issue-75930-derive-cfg.rs:51:30: 51:39 (#0), + span: $DIR/issue-75930-derive-cfg.rs:50:30: 50:39 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:51:29: 51:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:50:29: 50:40 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:51:1: 51:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:50:1: 50:2 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:54:1: 54:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:1: 53:2 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:54:3: 54:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:3: 53:15 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "b", - span: $DIR/issue-75930-derive-cfg.rs:54:16: 54:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:16: 53:17 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:54:15: 54:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:15: 53:18 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:54:2: 54:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:53:2: 53:19 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:55:1: 55:7 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:1: 54:7 (#0), }, Ident { ident: "Foo", - span: $DIR/issue-75930-derive-cfg.rs:55:8: 55:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:8: 54:11 (#0), }, Punct { ch: '<', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:55:11: 55:12 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:11: 54:12 (#0), }, Ident { ident: "B", - span: $DIR/issue-75930-derive-cfg.rs:55:29: 55:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:29: 54:30 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:55:30: 55:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:30: 54:31 (#0), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "second", - span: $DIR/issue-75930-derive-cfg.rs:57:40: 57:46 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:40: 56:46 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:57:46: 57:47 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:46: 56:47 (#0), }, Ident { ident: "bool", - span: $DIR/issue-75930-derive-cfg.rs:57:48: 57:52 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:48: 56:52 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:57:52: 57:53 (#0), + span: $DIR/issue-75930-derive-cfg.rs:56:52: 56:53 (#0), }, Ident { ident: "third", - span: $DIR/issue-75930-derive-cfg.rs:58:5: 58:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:5: 57:10 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:58:10: 58:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:10: 57:11 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:58:13: 58:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:13: 57:15 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:58:15: 58:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:15: 57:16 (#0), }, Group { delimiter: Brace, @@ -1492,58 +1492,58 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:60:9: 60:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:9: 59:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:60:11: 60:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:11: 59:14 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:60:15: 60:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:15: 59:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:60:19: 60:24 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:19: 59:24 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:60:18: 60:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:18: 59:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:60:14: 60:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:14: 59:26 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:60:10: 60:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:10: 59:27 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:60:28: 60:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:28: 59:34 (#0), }, Ident { ident: "Inner", - span: $DIR/issue-75930-derive-cfg.rs:60:35: 60:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:35: 59:40 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:60:40: 60:41 (#0), + span: $DIR/issue-75930-derive-cfg.rs:59:40: 59:41 (#0), }, Ident { ident: "match", - span: $DIR/issue-75930-derive-cfg.rs:62:9: 62:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:61:9: 61:14 (#0), }, Ident { ident: "true", - span: $DIR/issue-75930-derive-cfg.rs:62:15: 62:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:61:15: 61:19 (#0), }, Group { delimiter: Brace, @@ -1551,151 +1551,151 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:64:13: 64:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:13: 63:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "allow", - span: $DIR/issue-75930-derive-cfg.rs:64:36: 64:41 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:36: 63:41 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "warnings", - span: $DIR/issue-75930-derive-cfg.rs:64:42: 64:50 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:42: 63:50 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:64:41: 64:51 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:41: 63:51 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:64:13: 64:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:13: 63:14 (#0), }, Ident { ident: "false", - span: $DIR/issue-75930-derive-cfg.rs:64:54: 64:59 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:54: 63:59 (#0), }, Punct { ch: '=', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:64:60: 64:61 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:60: 63:61 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:64:61: 64:62 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:61: 63:62 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:64:63: 64:65 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:63: 63:65 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:64:65: 64:66 (#0), + span: $DIR/issue-75930-derive-cfg.rs:63:65: 63:66 (#0), }, Ident { ident: "_", - span: $DIR/issue-75930-derive-cfg.rs:65:13: 65:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:64:13: 64:14 (#0), }, Punct { ch: '=', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:65:15: 65:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:64:15: 64:16 (#0), }, Punct { ch: '>', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:65:16: 65:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:64:16: 64:17 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:65:18: 65:20 (#0), + span: $DIR/issue-75930-derive-cfg.rs:64:18: 64:20 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:62:20: 66:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:61:20: 65:10 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:66:10: 66:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:65:10: 65:11 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:73:9: 73:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:9: 72:10 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:73:11: 73:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:11: 72:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "c", - span: $DIR/issue-75930-derive-cfg.rs:73:24: 73:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:24: 72:25 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:23: 73:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:23: 72:26 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:10: 73:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:10: 72:27 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:73:28: 73:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:28: 72:29 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:73:30: 73:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:30: 72:33 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:73:34: 73:37 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:34: 72:37 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:73:38: 73:43 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:38: 72:43 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:37: 73:44 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:37: 72:44 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:33: 73:45 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:33: 72:45 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:29: 73:46 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:29: 72:46 (#0), }, Ident { ident: "fn", - span: $DIR/issue-75930-derive-cfg.rs:73:47: 73:49 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:47: 72:49 (#0), }, Ident { ident: "kept_fn", - span: $DIR/issue-75930-derive-cfg.rs:73:50: 73:57 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:50: 72:57 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [], - span: $DIR/issue-75930-derive-cfg.rs:73:57: 73:59 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:57: 72:59 (#0), }, Group { delimiter: Brace, @@ -1703,82 +1703,82 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Joint, - span: $DIR/issue-75930-derive-cfg.rs:74:13: 74:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:13: 73:14 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:74:14: 74:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:14: 73:15 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:74:16: 74:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:16: 73:19 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:74:20: 74:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:20: 73:23 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:74:24: 74:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:24: 73:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:74:23: 74:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:23: 73:30 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:74:19: 74:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:19: 73:31 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:74:15: 74:32 (#0), + span: $DIR/issue-75930-derive-cfg.rs:73:15: 73:32 (#0), }, Ident { ident: "let", - span: $DIR/issue-75930-derive-cfg.rs:75:13: 75:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:74:13: 74:16 (#0), }, Ident { ident: "my_val", - span: $DIR/issue-75930-derive-cfg.rs:75:17: 75:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:74:17: 74:23 (#0), }, Punct { ch: '=', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:75:24: 75:25 (#0), + span: $DIR/issue-75930-derive-cfg.rs:74:24: 74:25 (#0), }, Ident { ident: "true", - span: $DIR/issue-75930-derive-cfg.rs:75:26: 75:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:74:26: 74:30 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:75:30: 75:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:74:30: 74:31 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:73:60: 76:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:72:60: 75:10 (#0), }, Ident { ident: "enum", - span: $DIR/issue-75930-derive-cfg.rs:78:9: 78:13 (#0), + span: $DIR/issue-75930-derive-cfg.rs:77:9: 77:13 (#0), }, Ident { ident: "TupleEnum", - span: $DIR/issue-75930-derive-cfg.rs:78:14: 78:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:77:14: 77:23 (#0), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "Foo", - span: $DIR/issue-75930-derive-cfg.rs:79:13: 79:16 (#0), + span: $DIR/issue-75930-derive-cfg.rs:78:13: 78:16 (#0), }, Group { delimiter: Parenthesis, @@ -1786,64 +1786,64 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:82:17: 82:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:17: 81:18 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:82:19: 82:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:19: 81:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:82:23: 82:26 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:23: 81:26 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:82:27: 82:32 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:27: 81:32 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:82:26: 82:33 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:26: 81:33 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:82:22: 82:34 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:22: 81:34 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:82:18: 82:35 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:18: 81:35 (#0), }, Ident { ident: "i32", - span: $DIR/issue-75930-derive-cfg.rs:82:36: 82:39 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:36: 81:39 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:82:39: 82:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:81:39: 81:40 (#0), }, Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:83:39: 83:41 (#0), + span: $DIR/issue-75930-derive-cfg.rs:82:39: 82:41 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:79:16: 84:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:78:16: 83:14 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:78:24: 85:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:77:24: 84:10 (#0), }, Ident { ident: "struct", - span: $DIR/issue-75930-derive-cfg.rs:87:9: 87:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:86:9: 86:15 (#0), }, Ident { ident: "TupleStruct", - span: $DIR/issue-75930-derive-cfg.rs:87:16: 87:27 (#0), + span: $DIR/issue-75930-derive-cfg.rs:86:16: 86:27 (#0), }, Group { delimiter: Parenthesis, @@ -1851,115 +1851,115 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:89:13: 89:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:13: 88:14 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "cfg", - span: $DIR/issue-75930-derive-cfg.rs:89:15: 89:18 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:15: 88:18 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "not", - span: $DIR/issue-75930-derive-cfg.rs:89:19: 89:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:19: 88:22 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "FALSE", - span: $DIR/issue-75930-derive-cfg.rs:89:23: 89:28 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:23: 88:28 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:89:22: 89:29 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:22: 88:29 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:89:18: 89:30 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:18: 88:30 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:89:14: 89:31 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:14: 88:31 (#0), }, Ident { ident: "i32", - span: $DIR/issue-75930-derive-cfg.rs:89:32: 89:35 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:32: 88:35 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:89:35: 89:36 (#0), + span: $DIR/issue-75930-derive-cfg.rs:88:35: 88:36 (#0), }, Ident { ident: "u8", - span: $DIR/issue-75930-derive-cfg.rs:91:13: 91:15 (#0), + span: $DIR/issue-75930-derive-cfg.rs:90:13: 90:15 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:87:27: 92:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:86:27: 91:10 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:92:10: 92:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:91:10: 91:11 (#0), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: $DIR/issue-75930-derive-cfg.rs:98:9: 98:10 (#0), + span: $DIR/issue-75930-derive-cfg.rs:97:9: 97:10 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:58:17: 99:6 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:17: 98:6 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:58:12: 99:7 (#0), + span: $DIR/issue-75930-derive-cfg.rs:57:12: 98:7 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:99:7: 99:8 (#0), + span: $DIR/issue-75930-derive-cfg.rs:98:7: 98:8 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:100:5: 100:6 (#0), + span: $DIR/issue-75930-derive-cfg.rs:99:5: 99:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "print_helper", - span: $DIR/issue-75930-derive-cfg.rs:100:7: 100:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:99:7: 99:19 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "d", - span: $DIR/issue-75930-derive-cfg.rs:100:20: 100:21 (#0), + span: $DIR/issue-75930-derive-cfg.rs:99:20: 99:21 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:100:19: 100:22 (#0), + span: $DIR/issue-75930-derive-cfg.rs:99:19: 99:22 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:100:6: 100:23 (#0), + span: $DIR/issue-75930-derive-cfg.rs:99:6: 99:23 (#0), }, Ident { ident: "fourth", - span: $DIR/issue-75930-derive-cfg.rs:101:5: 101:11 (#0), + span: $DIR/issue-75930-derive-cfg.rs:100:5: 100:11 (#0), }, Punct { ch: ':', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:101:11: 101:12 (#0), + span: $DIR/issue-75930-derive-cfg.rs:100:11: 100:12 (#0), }, Ident { ident: "B", - span: $DIR/issue-75930-derive-cfg.rs:101:13: 101:14 (#0), + span: $DIR/issue-75930-derive-cfg.rs:100:13: 100:14 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:55:32: 102:2 (#0), + span: $DIR/issue-75930-derive-cfg.rs:54:32: 101:2 (#0), }, ] diff --git a/tests/ui/rfcs/rfc-0000-never_patterns/ICE-119271-never-arm-attr-in-guard.rs b/tests/ui/rfcs/rfc-0000-never_patterns/ICE-119271-never-arm-attr-in-guard.rs new file mode 100644 index 0000000000000..2490909b6a5a9 --- /dev/null +++ b/tests/ui/rfcs/rfc-0000-never_patterns/ICE-119271-never-arm-attr-in-guard.rs @@ -0,0 +1,10 @@ +fn main() {} + +fn attr_in_guard() { + match None:: { + Some(!) //~ ERROR `!` patterns are experimental + if #[deny(unused_mut)] //~ ERROR attributes on expressions are experimental + false //~ ERROR a guard on a never pattern will never be run + } + match false {} +} diff --git a/tests/ui/rfcs/rfc-0000-never_patterns/ICE-119271-never-arm-attr-in-guard.stderr b/tests/ui/rfcs/rfc-0000-never_patterns/ICE-119271-never-arm-attr-in-guard.stderr new file mode 100644 index 0000000000000..335e6c6db5f74 --- /dev/null +++ b/tests/ui/rfcs/rfc-0000-never_patterns/ICE-119271-never-arm-attr-in-guard.stderr @@ -0,0 +1,27 @@ +error[E0658]: attributes on expressions are experimental + --> $DIR/ICE-119271-never-arm-attr-in-guard.rs:6:16 + | +LL | if #[deny(unused_mut)] + | ^^^^^^^^^^^^^^^^^^^ + | + = note: see issue #15701 for more information + = help: add `#![feature(stmt_expr_attributes)]` to the crate attributes to enable + +error[E0658]: `!` patterns are experimental + --> $DIR/ICE-119271-never-arm-attr-in-guard.rs:5:14 + | +LL | Some(!) + | ^ + | + = note: see issue #118155 for more information + = help: add `#![feature(never_patterns)]` to the crate attributes to enable + +error: a guard on a never pattern will never be run + --> $DIR/ICE-119271-never-arm-attr-in-guard.rs:7:13 + | +LL | false + | ^^^^^ help: remove this guard + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/assoc-type.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/assoc-type.stderr index 290ef6e2f5fa0..58ad1849d4fab 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/assoc-type.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/assoc-type.stderr @@ -6,7 +6,7 @@ LL | type Bar: ~const std::ops::Add; | = note: this item cannot have `~const` trait bounds -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/assoc-type.rs:17:22 | LL | type Bar: ~const std::ops::Add; diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/call-generic-method-nonconst.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/call-generic-method-nonconst.rs index 76bc738123d39..8d6176a5bace4 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/call-generic-method-nonconst.rs +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/call-generic-method-nonconst.rs @@ -21,6 +21,6 @@ const fn equals_self(t: &T) -> bool { // it not using the impl. pub const EQ: bool = equals_self(&S); -//~^ ERROR: the trait bound `S: ~const Foo` is not satisfied +//~^ ERROR: the trait bound `S: const Foo` is not satisfied fn main() {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/call-generic-method-nonconst.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/call-generic-method-nonconst.stderr index aea9a39b26107..3581b1fcd7dfb 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/call-generic-method-nonconst.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/call-generic-method-nonconst.stderr @@ -1,8 +1,8 @@ -error[E0277]: the trait bound `S: ~const Foo` is not satisfied +error[E0277]: the trait bound `S: const Foo` is not satisfied --> $DIR/call-generic-method-nonconst.rs:23:34 | LL | pub const EQ: bool = equals_self(&S); - | ----------- ^^ the trait `~const Foo` is not implemented for `S` + | ----------- ^^ the trait `const Foo` is not implemented for `S` | | | required by a bound introduced by this call | diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-bounds-non-const-trait.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-bounds-non-const-trait.rs new file mode 100644 index 0000000000000..3582e5e050c98 --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-bounds-non-const-trait.rs @@ -0,0 +1,12 @@ +// Regression test for issue #117244. +#![feature(const_trait_impl, effects)] + +trait NonConst {} + +const fn perform() {} +//~^ ERROR `~const` can only be applied to `#[const_trait]` traits + +fn operate() {} +//~^ ERROR `const` can only be applied to `#[const_trait]` traits + +fn main() {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-bounds-non-const-trait.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-bounds-non-const-trait.stderr new file mode 100644 index 0000000000000..08954987d3192 --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-bounds-non-const-trait.stderr @@ -0,0 +1,14 @@ +error: `~const` can only be applied to `#[const_trait]` traits + --> $DIR/const-bounds-non-const-trait.rs:6:28 + | +LL | const fn perform() {} + | ^^^^^^^^ + +error: `const` can only be applied to `#[const_trait]` traits + --> $DIR/const-bounds-non-const-trait.rs:9:21 + | +LL | fn operate() {} + | ^^^^^^^^ + +error: aborting due to 2 previous errors + diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-parse-not-item.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-parse-not-item.stderr index fc9b5557a6496..ace2e7e46c4e8 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-parse-not-item.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-parse-not-item.stderr @@ -1,4 +1,4 @@ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const-closure-parse-not-item.rs:7:32 | LL | const fn test() -> impl ~const Fn() { diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-trait-method-fail.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-trait-method-fail.stderr index 73ee0f2151a73..d70b0d66177eb 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-trait-method-fail.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-trait-method-fail.stderr @@ -1,4 +1,4 @@ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const-closure-trait-method-fail.rs:14:39 | LL | const fn need_const_closure i32>(x: T) -> i32 { diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-trait-method.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-trait-method.stderr index 33ae7131b928e..1642de78692b7 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-trait-method.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closure-trait-method.stderr @@ -1,4 +1,4 @@ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const-closure-trait-method.rs:14:39 | LL | const fn need_const_closure i32>(x: T) -> i32 { diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closures.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closures.stderr index 6d61b23e4b7c3..2e448c64d7a40 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closures.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-closures.stderr @@ -1,22 +1,22 @@ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const-closures.rs:8:19 | LL | F: ~const FnOnce() -> u8, | ^^^^^^^^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const-closures.rs:9:19 | LL | F: ~const FnMut() -> u8, | ^^^^^^^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const-closures.rs:10:19 | LL | F: ~const Fn() -> u8, | ^^^^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const-closures.rs:23:27 | LL | const fn answer u8>(f: &F) -> u8 { diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop-fail-2.precise.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop-fail-2.precise.stderr index 13350a6d14a42..7529af9293d44 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop-fail-2.precise.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop-fail-2.precise.stderr @@ -7,7 +7,7 @@ LL | impl const Drop for ConstDropImplWithBounds { = note: marking a trait with `#[const_trait]` ensures all default method bodies are `const` = note: adding a non-const method body in the future would be a breaking change -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const-drop-fail-2.rs:29:26 | LL | const fn check(_: T) {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop-fail-2.stock.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop-fail-2.stock.stderr index 13350a6d14a42..7529af9293d44 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop-fail-2.stock.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop-fail-2.stock.stderr @@ -7,7 +7,7 @@ LL | impl const Drop for ConstDropImplWithBounds { = note: marking a trait with `#[const_trait]` ensures all default method bodies are `const` = note: adding a non-const method body in the future would be a breaking change -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const-drop-fail-2.rs:29:26 | LL | const fn check(_: T) {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.precise.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.precise.stderr index daaba08d7dde9..f166bdf6cecce 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.precise.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.precise.stderr @@ -1,10 +1,15 @@ -error: `~const` is not allowed here - --> $DIR/const-drop.rs:67:38 +error[E0493]: destructor of `T` cannot be evaluated at compile-time + --> $DIR/const-drop.rs:19:32 | -LL | pub struct ConstDropWithBound(pub core::marker::PhantomData); - | ^^^^^^ +LL | const fn a(_: T) {} + | ^ the destructor for this type cannot be evaluated in constant functions + +error[E0493]: destructor of `S<'_>` cannot be evaluated at compile-time + --> $DIR/const-drop.rs:24:13 | - = note: this item cannot have `~const` trait bounds +LL | let _ = S(&mut c); + | ^^^^^^^^^ the destructor for this type cannot be evaluated in constant functions -error: aborting due to 1 previous error +error: aborting due to 2 previous errors +For more information about this error, try `rustc --explain E0493`. diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.rs index 9da84cdb0525b..4836d2b02ce71 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.rs +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.rs @@ -4,7 +4,7 @@ #![feature(const_trait_impl)] #![feature(const_mut_refs)] #![feature(never_type)] -// #![cfg_attr(precise, feature(const_precise_live_drops))] +#![cfg_attr(precise, feature(const_precise_live_drops))] use std::marker::Destruct; @@ -63,8 +63,7 @@ mod t { fn foo() {} } - // FIXME(effects): This should be a `const` bound instead of a `~const` one. - pub struct ConstDropWithBound(pub core::marker::PhantomData); + pub struct ConstDropWithBound(pub core::marker::PhantomData); impl const Drop for ConstDropWithBound { fn drop(&mut self) { diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.stock.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.stock.stderr index daaba08d7dde9..23e368870258e 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.stock.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-drop.stock.stderr @@ -1,10 +1,19 @@ -error: `~const` is not allowed here - --> $DIR/const-drop.rs:67:38 +error[E0493]: destructor of `T` cannot be evaluated at compile-time + --> $DIR/const-drop.rs:19:32 | -LL | pub struct ConstDropWithBound(pub core::marker::PhantomData); - | ^^^^^^ +LL | const fn a(_: T) {} + | ^ - value is dropped here + | | + | the destructor for this type cannot be evaluated in constant functions + +error[E0493]: destructor of `S<'_>` cannot be evaluated at compile-time + --> $DIR/const-drop.rs:24:13 | - = note: this item cannot have `~const` trait bounds +LL | let _ = S(&mut c); + | ^^^^^^^^^- value is dropped here + | | + | the destructor for this type cannot be evaluated in constant functions -error: aborting due to 1 previous error +error: aborting due to 2 previous errors +For more information about this error, try `rustc --explain E0493`. diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-trait-bounds-trait-objects.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-trait-bounds-trait-objects.rs new file mode 100644 index 0000000000000..a00a6d481050d --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-trait-bounds-trait-objects.rs @@ -0,0 +1,17 @@ +#![feature(const_trait_impl, effects)] +// edition: 2021 + +#[const_trait] +trait Trait {} + +fn main() { + let _: &dyn const Trait; //~ ERROR const trait bounds are not allowed in trait object types + let _: &dyn ~const Trait; //~ ERROR `~const` is not allowed here +} + +// Regression test for issue #119525. +trait NonConst {} +const fn handle(_: &dyn const NonConst) {} +//~^ ERROR const trait bounds are not allowed in trait object types +const fn take(_: &dyn ~const NonConst) {} +//~^ ERROR `~const` is not allowed here diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-trait-bounds-trait-objects.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-trait-bounds-trait-objects.stderr new file mode 100644 index 0000000000000..04c2dc2e2e0fd --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-trait-bounds-trait-objects.stderr @@ -0,0 +1,30 @@ +error: const trait bounds are not allowed in trait object types + --> $DIR/const-trait-bounds-trait-objects.rs:8:17 + | +LL | let _: &dyn const Trait; + | ^^^^^^^^^^^ + +error: `~const` is not allowed here + --> $DIR/const-trait-bounds-trait-objects.rs:9:17 + | +LL | let _: &dyn ~const Trait; + | ^^^^^^ + | + = note: trait objects cannot have `~const` trait bounds + +error: const trait bounds are not allowed in trait object types + --> $DIR/const-trait-bounds-trait-objects.rs:14:25 + | +LL | const fn handle(_: &dyn const NonConst) {} + | ^^^^^^^^^^^^^^ + +error: `~const` is not allowed here + --> $DIR/const-trait-bounds-trait-objects.rs:16:23 + | +LL | const fn take(_: &dyn ~const NonConst) {} + | ^^^^^^ + | + = note: trait objects cannot have `~const` trait bounds + +error: aborting due to 4 previous errors + diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/const-trait-bounds.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-trait-bounds.rs new file mode 100644 index 0000000000000..1ebebe632c70a --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/const-trait-bounds.rs @@ -0,0 +1,31 @@ +// check-pass + +#![feature(const_trait_impl, effects, generic_const_exprs)] +#![allow(incomplete_features)] + +fn main() { + let _ = process::<()>([()]); + let _ = Struct::<(), 4> { field: [1, 0] }; +} + +fn process(input: [(); T::make(2)]) -> [(); T::make(2)] { + input +} + +struct Struct +where + [u32; T::make(P)]:, +{ + field: [u32; T::make(P)], +} + +#[const_trait] +trait Trait { + fn make(input: usize) -> usize; +} + +impl const Trait for () { + fn make(input: usize) -> usize { + input / 2 + } +} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/ice-112822-expected-type-for-param.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/ice-112822-expected-type-for-param.rs index 61e31fc978643..c6be75a6a2f04 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/ice-112822-expected-type-for-param.rs +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/ice-112822-expected-type-for-param.rs @@ -1,6 +1,6 @@ #![feature(const_trait_impl, effects)] -const fn test() -> impl ~const Fn() { //~ ERROR ~const can only be applied to `#[const_trait]` traits +const fn test() -> impl ~const Fn() { //~ ERROR `~const` can only be applied to `#[const_trait]` traits const move || { //~ ERROR const closures are experimental let sl: &[u8] = b"foo"; diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/ice-112822-expected-type-for-param.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/ice-112822-expected-type-for-param.stderr index 658082123140c..fe6b613d1549d 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/ice-112822-expected-type-for-param.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/ice-112822-expected-type-for-param.stderr @@ -7,7 +7,7 @@ LL | const move || { = note: see issue #106003 for more information = help: add `#![feature(const_closures)]` to the crate attributes to enable -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/ice-112822-expected-type-for-param.rs:3:32 | LL | const fn test() -> impl ~const Fn() { diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/trait-fn-const.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/trait-fn-const.rs new file mode 100644 index 0000000000000..891e87d3b97d4 --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/trait-fn-const.rs @@ -0,0 +1,21 @@ +// Regression test for issue #113378. +#![feature(const_trait_impl, effects)] + +#[const_trait] +trait Trait { + const fn fun(); //~ ERROR functions in traits cannot be declared const +} + +impl const Trait for () { + const fn fun() {} //~ ERROR functions in trait impls cannot be declared const +} + +impl Trait for u32 { + const fn fun() {} //~ ERROR functions in trait impls cannot be declared const +} + +trait NonConst { + const fn fun(); //~ ERROR functions in traits cannot be declared const +} + +fn main() {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/trait-fn-const.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/trait-fn-const.stderr new file mode 100644 index 0000000000000..4d0b03046d27d --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/effects/trait-fn-const.stderr @@ -0,0 +1,59 @@ +error[E0379]: functions in traits cannot be declared const + --> $DIR/trait-fn-const.rs:6:5 + | +LL | #[const_trait] + | -------------- this declares all associated functions implicitly const +LL | trait Trait { +LL | const fn fun(); + | ^^^^^- + | | + | functions in traits cannot be const + | help: remove the `const` + +error[E0379]: functions in trait impls cannot be declared const + --> $DIR/trait-fn-const.rs:10:5 + | +LL | impl const Trait for () { + | ----- this declares all associated functions implicitly const +LL | const fn fun() {} + | ^^^^^- + | | + | functions in trait impls cannot be const + | help: remove the `const` + +error[E0379]: functions in trait impls cannot be declared const + --> $DIR/trait-fn-const.rs:14:5 + | +LL | const fn fun() {} + | ^^^^^ functions in trait impls cannot be const + | +help: remove the `const` ... + | +LL - const fn fun() {} +LL + fn fun() {} + | +help: ... and declare the impl to be const instead + | +LL | impl const Trait for u32 { + | +++++ + +error[E0379]: functions in traits cannot be declared const + --> $DIR/trait-fn-const.rs:18:5 + | +LL | const fn fun(); + | ^^^^^ functions in traits cannot be const + | +help: remove the `const` ... + | +LL - const fn fun(); +LL + fn fun(); + | +help: ... and declare the trait to be a `#[const_trait]` instead + | +LL + #[const_trait] +LL | trait NonConst { + | + +error: aborting due to 4 previous errors + +For more information about this error, try `rustc --explain E0379`. diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.gated.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.gated.stderr index 663cdd1fe57bb..12f9355e41d4e 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.gated.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.gated.stderr @@ -1,5 +1,5 @@ error: fatal error triggered by #[rustc_error] - --> $DIR/feature-gate.rs:14:1 + --> $DIR/feature-gate.rs:22:1 | LL | fn main() {} | ^^^^^^^^^ diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.rs index 0b409fbaac9e0..015d90aaf21fa 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.rs +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.rs @@ -10,5 +10,13 @@ trait T {} impl const T for S {} //[stock]~^ ERROR const trait impls are experimental +const fn f() {} //[stock]~ ERROR const trait impls are experimental +fn g() {} //[stock]~ ERROR const trait impls are experimental + +macro_rules! discard { ($ty:ty) => {} } + +discard! { impl ~const T } //[stock]~ ERROR const trait impls are experimental +discard! { impl const T } //[stock]~ ERROR const trait impls are experimental + #[rustc_error] fn main() {} //[gated]~ ERROR fatal error triggered by #[rustc_error] diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.stock.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.stock.stderr index 0e938c1c55d05..c9826aeb1665f 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.stock.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/feature-gate.stock.stderr @@ -7,6 +7,42 @@ LL | impl const T for S {} = note: see issue #67792 for more information = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable +error[E0658]: const trait impls are experimental + --> $DIR/feature-gate.rs:13:15 + | +LL | const fn f() {} + | ^^^^^^ + | + = note: see issue #67792 for more information + = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable + +error[E0658]: const trait impls are experimental + --> $DIR/feature-gate.rs:14:9 + | +LL | fn g() {} + | ^^^^^ + | + = note: see issue #67792 for more information + = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable + +error[E0658]: const trait impls are experimental + --> $DIR/feature-gate.rs:18:17 + | +LL | discard! { impl ~const T } + | ^^^^^^ + | + = note: see issue #67792 for more information + = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable + +error[E0658]: const trait impls are experimental + --> $DIR/feature-gate.rs:19:17 + | +LL | discard! { impl const T } + | ^^^^^ + | + = note: see issue #67792 for more information + = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable + error[E0658]: `const_trait` is a temporary placeholder for marking a trait that is suitable for `const` `impls` and all default bodies as `const`, which may be removed or renamed in the future. --> $DIR/feature-gate.rs:8:1 | @@ -16,6 +52,6 @@ LL | #[const_trait] = note: see issue #67792 for more information = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable -error: aborting due to 2 previous errors +error: aborting due to 6 previous errors For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-bare-trait-objects-const-trait-bounds.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-bare-trait-objects-const-trait-bounds.rs new file mode 100644 index 0000000000000..2304a766aaff5 --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-bare-trait-objects-const-trait-bounds.rs @@ -0,0 +1,20 @@ +// Ensure that we don't consider `const Trait` and `~const Trait` to +// match the macro fragment specifier `ty` as that would be a breaking +// change theoretically speaking. Syntactically trait object types can +// be "bare", i.e., lack the prefix `dyn`. +// By contrast, `?Trait` *does* match `ty` and therefore an arm like +// `?$Trait:path` would never be reached. +// See `parser/macro/mbe-bare-trait-object-maybe-trait-bound.rs`. + +// check-pass + +macro_rules! check { + ($Type:ty) => { compile_error!("ty"); }; + (const $Trait:path) => {}; + (~const $Trait:path) => {}; +} + +check! { const Trait } +check! { ~const Trait } + +fn main() {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.rs new file mode 100644 index 0000000000000..9105cb6b0438c --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.rs @@ -0,0 +1,20 @@ +// Demonstrates and records a theoretical regressions / breaking changes caused by the +// introduction of const trait bounds. + +// Setting the edition to 2018 since we don't regress `demo! { dyn const }` in Rust <2018. +// edition:2018 + +macro_rules! demo { + ($ty:ty) => { compile_error!("ty"); }; + (impl $c:ident) => {}; + (dyn $c:ident) => {}; +} + +demo! { impl const } +//~^ ERROR expected identifier, found `` + +demo! { dyn const } +//~^ ERROR const trait impls are experimental +//~| ERROR expected identifier, found `` + +fn main() {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.stderr new file mode 100644 index 0000000000000..254d31930b366 --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-const-trait-bound-theoretical-regression.stderr @@ -0,0 +1,30 @@ +error: expected identifier, found `` + --> $DIR/mbe-const-trait-bound-theoretical-regression.rs:13:14 + | +LL | ($ty:ty) => { compile_error!("ty"); }; + | ------ while parsing argument for this `ty` macro fragment +... +LL | demo! { impl const } + | ^^^^^ expected identifier + +error: expected identifier, found `` + --> $DIR/mbe-const-trait-bound-theoretical-regression.rs:16:13 + | +LL | ($ty:ty) => { compile_error!("ty"); }; + | ------ while parsing argument for this `ty` macro fragment +... +LL | demo! { dyn const } + | ^^^^^ expected identifier + +error[E0658]: const trait impls are experimental + --> $DIR/mbe-const-trait-bound-theoretical-regression.rs:16:13 + | +LL | demo! { dyn const } + | ^^^^^ + | + = note: see issue #67792 for more information + = help: add `#![feature(const_trait_impl)]` to the crate attributes to enable + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-dyn-const-2015.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-dyn-const-2015.rs new file mode 100644 index 0000000000000..817e9ee5257d6 --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/mbe-dyn-const-2015.rs @@ -0,0 +1,13 @@ +// Ensure that the introduction of const trait bound didn't regress this code in Rust 2015. +// See also `mbe-const-trait-bound-theoretical-regression.rs`. + +// check-pass + +macro_rules! check { + ($ty:ty) => { compile_error!("ty"); }; + (dyn $c:ident) => {}; +} + +check! { dyn const } + +fn main() {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/mutually-exclusive-trait-bound-modifiers.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/mutually-exclusive-trait-bound-modifiers.rs new file mode 100644 index 0000000000000..37e285f2c6590 --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/mutually-exclusive-trait-bound-modifiers.rs @@ -0,0 +1,20 @@ +#![feature(const_trait_impl)] + +const fn maybe_const_maybe() {} +//~^ ERROR `~const` and `?` are mutually exclusive + +fn const_maybe() {} +//~^ ERROR `const` and `?` are mutually exclusive + +const fn maybe_const_negative() {} +//~^ ERROR `~const` and `!` are mutually exclusive +//~| ERROR negative bounds are not supported + +fn const_negative() {} +//~^ ERROR `const` and `!` are mutually exclusive +//~| ERROR negative bounds are not supported + +#[const_trait] +trait Trait {} + +fn main() {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/mutually-exclusive-trait-bound-modifiers.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/mutually-exclusive-trait-bound-modifiers.stderr new file mode 100644 index 0000000000000..1938f740170b5 --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/mutually-exclusive-trait-bound-modifiers.stderr @@ -0,0 +1,38 @@ +error: `~const` and `?` are mutually exclusive + --> $DIR/mutually-exclusive-trait-bound-modifiers.rs:3:31 + | +LL | const fn maybe_const_maybe() {} + | ^^^^^^^^^^^^^ + +error: `const` and `?` are mutually exclusive + --> $DIR/mutually-exclusive-trait-bound-modifiers.rs:6:19 + | +LL | fn const_maybe() {} + | ^^^^^^^^^^^^ + +error: `~const` and `!` are mutually exclusive + --> $DIR/mutually-exclusive-trait-bound-modifiers.rs:9:34 + | +LL | const fn maybe_const_negative() {} + | ^^^^^^^^^^^^^ + +error: `const` and `!` are mutually exclusive + --> $DIR/mutually-exclusive-trait-bound-modifiers.rs:13:22 + | +LL | fn const_negative() {} + | ^^^^^^^^^^^^ + +error: negative bounds are not supported + --> $DIR/mutually-exclusive-trait-bound-modifiers.rs:9:41 + | +LL | const fn maybe_const_negative() {} + | ^ + +error: negative bounds are not supported + --> $DIR/mutually-exclusive-trait-bound-modifiers.rs:13:28 + | +LL | fn const_negative() {} + | ^ + +error: aborting due to 6 previous errors + diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/non-const-op-in-closure-in-const.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/non-const-op-in-closure-in-const.stderr index b2e09d82a905e..ae76cab2f2e66 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/non-const-op-in-closure-in-const.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/non-const-op-in-closure-in-const.stderr @@ -1,4 +1,4 @@ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/non-const-op-in-closure-in-const.rs:10:51 | LL | impl const Convert for A where B: ~const From { diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.nn.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.nn.stderr index fd4d7ff347501..eae313ef08748 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.nn.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.nn.stderr @@ -10,13 +10,13 @@ note: this trait is not a `#[const_trait]`, so it cannot have `~const` trait bou LL | trait Bar: ~const Foo {} | ^^^^^^^^^^^^^^^^^^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/super-traits-fail-2.rs:10:19 | LL | trait Bar: ~const Foo {} | ^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/super-traits-fail-2.rs:10:19 | LL | trait Bar: ~const Foo {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.ny.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.ny.stderr index d2e3a5cec1d9f..be3153d6a0819 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.ny.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.ny.stderr @@ -1,10 +1,10 @@ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/super-traits-fail-2.rs:10:19 | LL | trait Bar: ~const Foo {} | ^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/super-traits-fail-2.rs:10:19 | LL | trait Bar: ~const Foo {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.rs index 3820d06924342..abdf0feee0384 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.rs +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-2.rs @@ -8,8 +8,8 @@ trait Foo { #[cfg_attr(any(yy, ny), const_trait)] trait Bar: ~const Foo {} -//[ny,nn]~^ ERROR: ~const can only be applied to `#[const_trait]` -//[ny,nn]~| ERROR: ~const can only be applied to `#[const_trait]` +//[ny,nn]~^ ERROR: `~const` can only be applied to `#[const_trait]` +//[ny,nn]~| ERROR: `~const` can only be applied to `#[const_trait]` //[yn,nn]~^^^ ERROR: `~const` is not allowed here const fn foo(x: &T) { diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.nn.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.nn.stderr index 199d2199c4ae8..834d6f4dcf382 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.nn.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.nn.stderr @@ -10,13 +10,13 @@ note: this trait is not a `#[const_trait]`, so it cannot have `~const` trait bou LL | trait Bar: ~const Foo {} | ^^^^^^^^^^^^^^^^^^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/super-traits-fail-3.rs:12:19 | LL | trait Bar: ~const Foo {} | ^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/super-traits-fail-3.rs:12:19 | LL | trait Bar: ~const Foo {} @@ -24,7 +24,7 @@ LL | trait Bar: ~const Foo {} | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/super-traits-fail-3.rs:17:24 | LL | const fn foo(x: &T) { diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.ny.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.ny.stderr index 46eedc333f148..4fdd2284c4755 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.ny.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.ny.stderr @@ -1,10 +1,10 @@ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/super-traits-fail-3.rs:12:19 | LL | trait Bar: ~const Foo {} | ^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/super-traits-fail-3.rs:12:19 | LL | trait Bar: ~const Foo {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.rs index 3e98e131930af..30131d5849c81 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.rs +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.rs @@ -10,12 +10,12 @@ trait Foo { #[cfg_attr(any(yy, ny), const_trait)] trait Bar: ~const Foo {} -//[ny,nn]~^ ERROR: ~const can only be applied to `#[const_trait]` -//[ny,nn]~| ERROR: ~const can only be applied to `#[const_trait]` +//[ny,nn]~^ ERROR: `~const` can only be applied to `#[const_trait]` +//[ny,nn]~| ERROR: `~const` can only be applied to `#[const_trait]` //[yn,nn]~^^^ ERROR: `~const` is not allowed here const fn foo(x: &T) { - //[yn,nn]~^ ERROR: ~const can only be applied to `#[const_trait]` + //[yn,nn]~^ ERROR: `~const` can only be applied to `#[const_trait]` x.a(); } diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.yn.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.yn.stderr index dc08a8997389c..ab7c814eb4985 100644 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.yn.stderr +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/super-traits-fail-3.yn.stderr @@ -10,7 +10,7 @@ note: this trait is not a `#[const_trait]`, so it cannot have `~const` trait bou LL | trait Bar: ~const Foo {} | ^^^^^^^^^^^^^^^^^^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/super-traits-fail-3.rs:17:24 | LL | const fn foo(x: &T) { diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/tilde-const-maybe-trait.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/tilde-const-maybe-trait.rs deleted file mode 100644 index ed911d965d631..0000000000000 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/tilde-const-maybe-trait.rs +++ /dev/null @@ -1,6 +0,0 @@ -#![feature(const_trait_impl)] - -const fn tilde_question() {} -//~^ ERROR `~const` and `?` are mutually exclusive - -fn main() {} diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/tilde-const-maybe-trait.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/tilde-const-maybe-trait.stderr deleted file mode 100644 index 5850ab41c6ba6..0000000000000 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/tilde-const-maybe-trait.stderr +++ /dev/null @@ -1,8 +0,0 @@ -error: `~const` and `?` are mutually exclusive - --> $DIR/tilde-const-maybe-trait.rs:3:28 - | -LL | const fn tilde_question() {} - | ^^^^^^^^^^^^^ - -error: aborting due to 1 previous error - diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/unsatisfied-const-trait-bound.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/unsatisfied-const-trait-bound.rs new file mode 100644 index 0000000000000..62a7b31237842 --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/unsatisfied-const-trait-bound.rs @@ -0,0 +1,33 @@ +// Ensure that we print unsatisfied always-const trait bounds as `const Trait` in diagnostics. + +#![feature(const_trait_impl, effects, generic_const_exprs)] +#![allow(incomplete_features)] + +fn require() {} + +#[const_trait] +trait Trait { + fn make() -> u32; +} + +struct Ty; + +impl Trait for Ty { + fn make() -> u32 { 0 } +} + +fn main() { + require::(); //~ ERROR the trait bound `Ty: const Trait` is not satisfied +} + +struct Container; + +// FIXME(effects): Somehow emit `the trait bound `T: const Trait` is not satisfied` here instead +// and suggest changing `Trait` to `const Trait`. +fn accept0(_: Container<{ T::make() }>) {} +//~^ ERROR mismatched types + +// FIXME(effects): Instead of suggesting `+ const Trait`, suggest +// changing `~const Trait` to `const Trait`. +const fn accept1(_: Container<{ T::make() }>) {} +//~^ ERROR the trait bound `T: const Trait` is not satisfied diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/unsatisfied-const-trait-bound.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/unsatisfied-const-trait-bound.stderr new file mode 100644 index 0000000000000..2fb4fc1aa2b8e --- /dev/null +++ b/tests/ui/rfcs/rfc-2632-const-trait-impl/unsatisfied-const-trait-bound.stderr @@ -0,0 +1,37 @@ +error[E0308]: mismatched types + --> $DIR/unsatisfied-const-trait-bound.rs:27:37 + | +LL | fn accept0(_: Container<{ T::make() }>) {} + | ^^^^^^^^^ expected `false`, found `true` + | + = note: expected constant `false` + found constant `true` + +error[E0277]: the trait bound `T: const Trait` is not satisfied + --> $DIR/unsatisfied-const-trait-bound.rs:32:50 + | +LL | const fn accept1(_: Container<{ T::make() }>) {} + | ^ the trait `const Trait` is not implemented for `T` + | +help: consider further restricting this bound + | +LL | const fn accept1(_: Container<{ T::make() }>) {} + | +++++++++++++ + +error[E0277]: the trait bound `Ty: const Trait` is not satisfied + --> $DIR/unsatisfied-const-trait-bound.rs:20:15 + | +LL | require::(); + | ^^ the trait `const Trait` is not implemented for `Ty` + | + = help: the trait `Trait` is implemented for `Ty` +note: required by a bound in `require` + --> $DIR/unsatisfied-const-trait-bound.rs:6:15 + | +LL | fn require() {} + | ^^^^^^^^^^^ required by this bound in `require` + +error: aborting due to 3 previous errors + +Some errors have detailed explanations: E0277, E0308. +For more information about an error, try `rustc --explain E0277`. diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/without-tilde.rs b/tests/ui/rfcs/rfc-2632-const-trait-impl/without-tilde.rs deleted file mode 100644 index d63381b5f2cc9..0000000000000 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/without-tilde.rs +++ /dev/null @@ -1,6 +0,0 @@ -// compile-flags: -Z parse-only - -#![feature(const_trait_impl)] - -struct S; -//~^ ERROR const bounds must start with `~` diff --git a/tests/ui/rfcs/rfc-2632-const-trait-impl/without-tilde.stderr b/tests/ui/rfcs/rfc-2632-const-trait-impl/without-tilde.stderr deleted file mode 100644 index 646cdfc78f971..0000000000000 --- a/tests/ui/rfcs/rfc-2632-const-trait-impl/without-tilde.stderr +++ /dev/null @@ -1,10 +0,0 @@ -error: const bounds must start with `~` - --> $DIR/without-tilde.rs:5:13 - | -LL | struct S; - | -^^^^ - | | - | help: add `~`: `~` - -error: aborting due to 1 previous error - diff --git a/tests/ui/sanitize/badfree.rs b/tests/ui/sanitize/badfree.rs index c8d1ce7dff25d..4a230e11d9579 100644 --- a/tests/ui/sanitize/badfree.rs +++ b/tests/ui/sanitize/badfree.rs @@ -5,7 +5,7 @@ // compile-flags: -Z sanitizer=address -O // // run-fail -// error-pattern: AddressSanitizer: SEGV +// regex-error-pattern: AddressSanitizer: (SEGV|attempting free on address which was not malloc) use std::ffi::c_void; diff --git a/tests/ui/sanitize/sanitizer-cfi-generalize-pointers-attr-cfg.rs b/tests/ui/sanitize/sanitizer-cfi-generalize-pointers-attr-cfg.rs index 3a0fc143da6fb..5b8de5c219e13 100644 --- a/tests/ui/sanitize/sanitizer-cfi-generalize-pointers-attr-cfg.rs +++ b/tests/ui/sanitize/sanitizer-cfi-generalize-pointers-attr-cfg.rs @@ -5,5 +5,7 @@ // check-pass // compile-flags: -Clto -Cno-prepopulate-passes -Ctarget-feature=-crt-static -Zsanitizer=cfi -Zsanitizer-cfi-generalize-pointers +#![feature(cfg_sanitizer_cfi)] + #[cfg(sanitizer_cfi_generalize_pointers)] fn main() {} diff --git a/tests/ui/sanitize/sanitizer-cfi-normalize-integers-attr-cfg.rs b/tests/ui/sanitize/sanitizer-cfi-normalize-integers-attr-cfg.rs index dafc20162abc1..4972ccf31678e 100644 --- a/tests/ui/sanitize/sanitizer-cfi-normalize-integers-attr-cfg.rs +++ b/tests/ui/sanitize/sanitizer-cfi-normalize-integers-attr-cfg.rs @@ -5,5 +5,7 @@ // check-pass // compile-flags: -Clto -Cno-prepopulate-passes -Ctarget-feature=-crt-static -Zsanitizer=cfi -Zsanitizer-cfi-normalize-integers +#![feature(cfg_sanitizer_cfi)] + #[cfg(sanitizer_cfi_normalize_integers)] fn main() {} diff --git a/tests/ui/sized/recursive-type-2.rs b/tests/ui/sized/recursive-type-binding.rs similarity index 100% rename from tests/ui/sized/recursive-type-2.rs rename to tests/ui/sized/recursive-type-binding.rs diff --git a/tests/ui/sized/recursive-type-2.stderr b/tests/ui/sized/recursive-type-binding.stderr similarity index 93% rename from tests/ui/sized/recursive-type-2.stderr rename to tests/ui/sized/recursive-type-binding.stderr index 4e7f40a01533c..d9c2efa4d53b7 100644 --- a/tests/ui/sized/recursive-type-2.stderr +++ b/tests/ui/sized/recursive-type-binding.stderr @@ -3,7 +3,7 @@ error[E0391]: cycle detected when computing layout of `Foo<()>` = note: ...which requires computing layout of `<() as A>::Assoc`... = note: ...which again requires computing layout of `Foo<()>`, completing the cycle note: cycle used when elaborating drops for `main` - --> $DIR/recursive-type-2.rs:11:1 + --> $DIR/recursive-type-binding.rs:11:1 | LL | fn main() { | ^^^^^^^^^ diff --git a/tests/ui/sized/recursive-type-coercion-from-never.rs b/tests/ui/sized/recursive-type-coercion-from-never.rs new file mode 100644 index 0000000000000..a1b654637316d --- /dev/null +++ b/tests/ui/sized/recursive-type-coercion-from-never.rs @@ -0,0 +1,16 @@ +// build-fail +//~^ ERROR cycle detected when computing layout of `Foo<()>` + +// Regression test for a stack overflow: https://github.com/rust-lang/rust/issues/113197 + +trait A { type Assoc; } + +impl A for () { + type Assoc = Foo<()>; +} + +struct Foo(T::Assoc); + +fn main() { + Foo::<()>(todo!()); +} diff --git a/tests/ui/sized/recursive-type-coercion-from-never.stderr b/tests/ui/sized/recursive-type-coercion-from-never.stderr new file mode 100644 index 0000000000000..7580e780dda59 --- /dev/null +++ b/tests/ui/sized/recursive-type-coercion-from-never.stderr @@ -0,0 +1,14 @@ +error[E0391]: cycle detected when computing layout of `Foo<()>` + | + = note: ...which requires computing layout of `<() as A>::Assoc`... + = note: ...which again requires computing layout of `Foo<()>`, completing the cycle +note: cycle used when elaborating drops for `main` + --> $DIR/recursive-type-coercion-from-never.rs:14:1 + | +LL | fn main() { + | ^^^^^^^^^ + = note: see https://rustc-dev-guide.rust-lang.org/overview.html#queries and https://rustc-dev-guide.rust-lang.org/query.html for more information + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0391`. diff --git a/tests/ui/sized/recursive-type-1.rs b/tests/ui/sized/recursive-type-pass.rs similarity index 100% rename from tests/ui/sized/recursive-type-1.rs rename to tests/ui/sized/recursive-type-pass.rs diff --git a/tests/ui/specialization/const_trait_impl.stderr b/tests/ui/specialization/const_trait_impl.stderr index d13cd8f55553d..913d51875cd70 100644 --- a/tests/ui/specialization/const_trait_impl.stderr +++ b/tests/ui/specialization/const_trait_impl.stderr @@ -1,16 +1,16 @@ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const_trait_impl.rs:34:16 | LL | impl const A for T { | ^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const_trait_impl.rs:40:16 | LL | impl const A for T { | ^^^^^^^ -error: ~const can only be applied to `#[const_trait]` traits +error: `~const` can only be applied to `#[const_trait]` traits --> $DIR/const_trait_impl.rs:46:16 | LL | impl const A for T { diff --git a/tests/ui/specialization/min_specialization/issue-79224.stderr b/tests/ui/specialization/min_specialization/issue-79224.stderr index 7541579498e8a..37ced4cf267fe 100644 --- a/tests/ui/specialization/min_specialization/issue-79224.stderr +++ b/tests/ui/specialization/min_specialization/issue-79224.stderr @@ -11,10 +11,10 @@ LL | impl Display for Cow<'_, B> { | +++++++++++++++++++ error[E0277]: the trait bound `B: Clone` is not satisfied - --> $DIR/issue-79224.rs:20:13 + --> $DIR/issue-79224.rs:20:5 | LL | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - | ^^^^ the trait `Clone` is not implemented for `B` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Clone` is not implemented for `B` | = note: required for `B` to implement `ToOwned` help: consider further restricting this bound @@ -23,10 +23,10 @@ LL | impl Display for Cow<'_, B> { | +++++++++++++++++++ error[E0277]: the trait bound `B: Clone` is not satisfied - --> $DIR/issue-79224.rs:20:5 + --> $DIR/issue-79224.rs:20:13 | LL | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Clone` is not implemented for `B` + | ^^^^ the trait `Clone` is not implemented for `B` | = note: required for `B` to implement `ToOwned` help: consider further restricting this bound diff --git a/tests/ui/stats/hir-stats.stderr b/tests/ui/stats/hir-stats.stderr index 070dbbb10bb65..5296475c94a78 100644 --- a/tests/ui/stats/hir-stats.stderr +++ b/tests/ui/stats/hir-stats.stderr @@ -125,9 +125,9 @@ hir-stats ExprField 40 ( 0.4%) 1 40 hir-stats TraitItemRef 56 ( 0.6%) 2 28 hir-stats Local 64 ( 0.7%) 1 64 hir-stats Param 64 ( 0.7%) 2 32 +hir-stats Body 72 ( 0.8%) 3 24 hir-stats InlineAsm 72 ( 0.8%) 1 72 hir-stats ImplItemRef 72 ( 0.8%) 2 36 -hir-stats Body 96 ( 1.1%) 3 32 hir-stats FieldDef 96 ( 1.1%) 2 48 hir-stats Arm 96 ( 1.1%) 2 48 hir-stats Stmt 96 ( 1.1%) 3 32 @@ -146,7 +146,7 @@ hir-stats - Trait 192 ( 2.1%) 4 hir-stats WherePredicate 192 ( 2.1%) 3 64 hir-stats - BoundPredicate 192 ( 2.1%) 3 hir-stats Block 288 ( 3.2%) 6 48 -hir-stats Pat 360 ( 3.9%) 5 72 +hir-stats Pat 360 ( 4.0%) 5 72 hir-stats - Wild 72 ( 0.8%) 1 hir-stats - Struct 72 ( 0.8%) 1 hir-stats - Binding 216 ( 2.4%) 3 @@ -172,7 +172,7 @@ hir-stats - Impl 88 ( 1.0%) 1 hir-stats - Fn 176 ( 1.9%) 2 hir-stats - Use 352 ( 3.9%) 4 hir-stats Path 1_240 (13.6%) 31 40 -hir-stats PathSegment 1_920 (21.0%) 40 48 +hir-stats PathSegment 1_920 (21.1%) 40 48 hir-stats ---------------------------------------------------------------- -hir-stats Total 9_136 +hir-stats Total 9_112 hir-stats diff --git a/tests/ui/symbol-names/types.rs b/tests/ui/symbol-names/types.rs index 0ae699c93c2de..475e8d89abff6 100644 --- a/tests/ui/symbol-names/types.rs +++ b/tests/ui/symbol-names/types.rs @@ -1,7 +1,7 @@ // build-fail // revisions: legacy verbose-legacy // compile-flags: --crate-name=a -C symbol-mangling-version=legacy -Z unstable-options -//[verbose-legacy]compile-flags: -Zverbose +//[verbose-legacy]compile-flags: -Zverbose-internals // normalize-stderr-test: "h[[:xdigit:]]{16}" -> "h[HASH]" #![feature(never_type)] diff --git a/tests/ui/symbol-names/verbose.rs b/tests/ui/symbol-names/verbose.rs index e00c592b6d276..2aa43e8762741 100644 --- a/tests/ui/symbol-names/verbose.rs +++ b/tests/ui/symbol-names/verbose.rs @@ -1,10 +1,10 @@ -// Regression test for issue #57596, where -Zverbose flag unintentionally +// Regression test for issue #57596, where -Zverbose-internals flag unintentionally // affected produced symbols making it impossible to link between crates // with a different value of the flag (for symbols involving generic // arguments equal to defaults of their respective parameters). // // build-pass -// compile-flags: -Zverbose +// compile-flags: -Zverbose-internals pub fn error(msg: String) -> Box { msg.into() diff --git a/tests/ui/thir-print/thir-tree-match.stdout b/tests/ui/thir-print/thir-tree-match.stdout index e752e4a870215..a17592fd2521f 100644 --- a/tests/ui/thir-print/thir-tree-match.stdout +++ b/tests/ui/thir-print/thir-tree-match.stdout @@ -93,9 +93,9 @@ body: adt_def: AdtDef { did: DefId(0:10 ~ thir_tree_match[fcf8]::Foo) - variants: [VariantDef { def_id: DefId(0:11 ~ thir_tree_match[fcf8]::Foo::FooOne), ctor: Some((Fn, DefId(0:12 ~ thir_tree_match[fcf8]::Foo::FooOne::{constructor#0}))), name: "FooOne", discr: Relative(0), fields: [FieldDef { did: DefId(0:13 ~ thir_tree_match[fcf8]::Foo::FooOne::0), name: "0", vis: Restricted(DefId(0:0 ~ thir_tree_match[fcf8])) }], flags: NO_VARIANT_FLAGS }, VariantDef { def_id: DefId(0:14 ~ thir_tree_match[fcf8]::Foo::FooTwo), ctor: Some((Const, DefId(0:15 ~ thir_tree_match[fcf8]::Foo::FooTwo::{constructor#0}))), name: "FooTwo", discr: Relative(1), fields: [], flags: NO_VARIANT_FLAGS }] + variants: [VariantDef { def_id: DefId(0:11 ~ thir_tree_match[fcf8]::Foo::FooOne), ctor: Some((Fn, DefId(0:12 ~ thir_tree_match[fcf8]::Foo::FooOne::{constructor#0}))), name: "FooOne", discr: Relative(0), fields: [FieldDef { did: DefId(0:13 ~ thir_tree_match[fcf8]::Foo::FooOne::0), name: "0", vis: Restricted(DefId(0:0 ~ thir_tree_match[fcf8])) }], flags: }, VariantDef { def_id: DefId(0:14 ~ thir_tree_match[fcf8]::Foo::FooTwo), ctor: Some((Const, DefId(0:15 ~ thir_tree_match[fcf8]::Foo::FooTwo::{constructor#0}))), name: "FooTwo", discr: Relative(1), fields: [], flags: }] flags: IS_ENUM - repr: ReprOptions { int: None, align: None, pack: None, flags: (empty), field_shuffle_seed: 3477539199540094892 } + repr: ReprOptions { int: None, align: None, pack: None, flags: , field_shuffle_seed: 3477539199540094892 } args: [] variant_index: 0 subpatterns: [ @@ -107,9 +107,9 @@ body: adt_def: AdtDef { did: DefId(0:3 ~ thir_tree_match[fcf8]::Bar) - variants: [VariantDef { def_id: DefId(0:4 ~ thir_tree_match[fcf8]::Bar::First), ctor: Some((Const, DefId(0:5 ~ thir_tree_match[fcf8]::Bar::First::{constructor#0}))), name: "First", discr: Relative(0), fields: [], flags: NO_VARIANT_FLAGS }, VariantDef { def_id: DefId(0:6 ~ thir_tree_match[fcf8]::Bar::Second), ctor: Some((Const, DefId(0:7 ~ thir_tree_match[fcf8]::Bar::Second::{constructor#0}))), name: "Second", discr: Relative(1), fields: [], flags: NO_VARIANT_FLAGS }, VariantDef { def_id: DefId(0:8 ~ thir_tree_match[fcf8]::Bar::Third), ctor: Some((Const, DefId(0:9 ~ thir_tree_match[fcf8]::Bar::Third::{constructor#0}))), name: "Third", discr: Relative(2), fields: [], flags: NO_VARIANT_FLAGS }] + variants: [VariantDef { def_id: DefId(0:4 ~ thir_tree_match[fcf8]::Bar::First), ctor: Some((Const, DefId(0:5 ~ thir_tree_match[fcf8]::Bar::First::{constructor#0}))), name: "First", discr: Relative(0), fields: [], flags: }, VariantDef { def_id: DefId(0:6 ~ thir_tree_match[fcf8]::Bar::Second), ctor: Some((Const, DefId(0:7 ~ thir_tree_match[fcf8]::Bar::Second::{constructor#0}))), name: "Second", discr: Relative(1), fields: [], flags: }, VariantDef { def_id: DefId(0:8 ~ thir_tree_match[fcf8]::Bar::Third), ctor: Some((Const, DefId(0:9 ~ thir_tree_match[fcf8]::Bar::Third::{constructor#0}))), name: "Third", discr: Relative(2), fields: [], flags: }] flags: IS_ENUM - repr: ReprOptions { int: None, align: None, pack: None, flags: (empty), field_shuffle_seed: 10333377570083945360 } + repr: ReprOptions { int: None, align: None, pack: None, flags: , field_shuffle_seed: 10333377570083945360 } args: [] variant_index: 0 subpatterns: [] @@ -155,9 +155,9 @@ body: adt_def: AdtDef { did: DefId(0:10 ~ thir_tree_match[fcf8]::Foo) - variants: [VariantDef { def_id: DefId(0:11 ~ thir_tree_match[fcf8]::Foo::FooOne), ctor: Some((Fn, DefId(0:12 ~ thir_tree_match[fcf8]::Foo::FooOne::{constructor#0}))), name: "FooOne", discr: Relative(0), fields: [FieldDef { did: DefId(0:13 ~ thir_tree_match[fcf8]::Foo::FooOne::0), name: "0", vis: Restricted(DefId(0:0 ~ thir_tree_match[fcf8])) }], flags: NO_VARIANT_FLAGS }, VariantDef { def_id: DefId(0:14 ~ thir_tree_match[fcf8]::Foo::FooTwo), ctor: Some((Const, DefId(0:15 ~ thir_tree_match[fcf8]::Foo::FooTwo::{constructor#0}))), name: "FooTwo", discr: Relative(1), fields: [], flags: NO_VARIANT_FLAGS }] + variants: [VariantDef { def_id: DefId(0:11 ~ thir_tree_match[fcf8]::Foo::FooOne), ctor: Some((Fn, DefId(0:12 ~ thir_tree_match[fcf8]::Foo::FooOne::{constructor#0}))), name: "FooOne", discr: Relative(0), fields: [FieldDef { did: DefId(0:13 ~ thir_tree_match[fcf8]::Foo::FooOne::0), name: "0", vis: Restricted(DefId(0:0 ~ thir_tree_match[fcf8])) }], flags: }, VariantDef { def_id: DefId(0:14 ~ thir_tree_match[fcf8]::Foo::FooTwo), ctor: Some((Const, DefId(0:15 ~ thir_tree_match[fcf8]::Foo::FooTwo::{constructor#0}))), name: "FooTwo", discr: Relative(1), fields: [], flags: }] flags: IS_ENUM - repr: ReprOptions { int: None, align: None, pack: None, flags: (empty), field_shuffle_seed: 3477539199540094892 } + repr: ReprOptions { int: None, align: None, pack: None, flags: , field_shuffle_seed: 3477539199540094892 } args: [] variant_index: 0 subpatterns: [ @@ -207,9 +207,9 @@ body: adt_def: AdtDef { did: DefId(0:10 ~ thir_tree_match[fcf8]::Foo) - variants: [VariantDef { def_id: DefId(0:11 ~ thir_tree_match[fcf8]::Foo::FooOne), ctor: Some((Fn, DefId(0:12 ~ thir_tree_match[fcf8]::Foo::FooOne::{constructor#0}))), name: "FooOne", discr: Relative(0), fields: [FieldDef { did: DefId(0:13 ~ thir_tree_match[fcf8]::Foo::FooOne::0), name: "0", vis: Restricted(DefId(0:0 ~ thir_tree_match[fcf8])) }], flags: NO_VARIANT_FLAGS }, VariantDef { def_id: DefId(0:14 ~ thir_tree_match[fcf8]::Foo::FooTwo), ctor: Some((Const, DefId(0:15 ~ thir_tree_match[fcf8]::Foo::FooTwo::{constructor#0}))), name: "FooTwo", discr: Relative(1), fields: [], flags: NO_VARIANT_FLAGS }] + variants: [VariantDef { def_id: DefId(0:11 ~ thir_tree_match[fcf8]::Foo::FooOne), ctor: Some((Fn, DefId(0:12 ~ thir_tree_match[fcf8]::Foo::FooOne::{constructor#0}))), name: "FooOne", discr: Relative(0), fields: [FieldDef { did: DefId(0:13 ~ thir_tree_match[fcf8]::Foo::FooOne::0), name: "0", vis: Restricted(DefId(0:0 ~ thir_tree_match[fcf8])) }], flags: }, VariantDef { def_id: DefId(0:14 ~ thir_tree_match[fcf8]::Foo::FooTwo), ctor: Some((Const, DefId(0:15 ~ thir_tree_match[fcf8]::Foo::FooTwo::{constructor#0}))), name: "FooTwo", discr: Relative(1), fields: [], flags: }] flags: IS_ENUM - repr: ReprOptions { int: None, align: None, pack: None, flags: (empty), field_shuffle_seed: 3477539199540094892 } + repr: ReprOptions { int: None, align: None, pack: None, flags: , field_shuffle_seed: 3477539199540094892 } args: [] variant_index: 1 subpatterns: [] diff --git a/tests/ui/traits/cycle-cache-err-60010.rs b/tests/ui/traits/cycle-cache-err-60010.rs index 94e718317e7c8..94875a5a25a4e 100644 --- a/tests/ui/traits/cycle-cache-err-60010.rs +++ b/tests/ui/traits/cycle-cache-err-60010.rs @@ -29,9 +29,8 @@ struct SalsaStorage { } impl Database for RootDatabase { - // This would also be an error if we didn't abort compilation on the error - // above. type Storage = SalsaStorage; + //~^ ERROR overflow } impl HasQueryGroup for RootDatabase {} impl Query for ParseQuery diff --git a/tests/ui/traits/cycle-cache-err-60010.stderr b/tests/ui/traits/cycle-cache-err-60010.stderr index 4f9615104cbf4..4f5e318183216 100644 --- a/tests/ui/traits/cycle-cache-err-60010.stderr +++ b/tests/ui/traits/cycle-cache-err-60010.stderr @@ -21,7 +21,7 @@ note: required because it appears within the type `RootDatabase` LL | struct RootDatabase { | ^^^^^^^^^^^^ note: required for `RootDatabase` to implement `SourceDatabase` - --> $DIR/cycle-cache-err-60010.rs:44:9 + --> $DIR/cycle-cache-err-60010.rs:43:9 | LL | impl SourceDatabase for T | ^^^^^^^^^^^^^^ ^ @@ -29,7 +29,7 @@ LL | where LL | T: RefUnwindSafe, | ------------- unsatisfied trait bound introduced here note: required for `ParseQuery` to implement `Query` - --> $DIR/cycle-cache-err-60010.rs:37:10 + --> $DIR/cycle-cache-err-60010.rs:36:10 | LL | impl Query for ParseQuery | ^^^^^^^^^ ^^^^^^^^^^ @@ -37,6 +37,59 @@ LL | where LL | DB: SourceDatabase, | -------------- unsatisfied trait bound introduced here -error: aborting due to 1 previous error +error[E0275]: overflow evaluating the requirement `SalsaStorage: RefUnwindSafe` + --> $DIR/cycle-cache-err-60010.rs:32:20 + | +LL | type Storage = SalsaStorage; + | ^^^^^^^^^^^^ + | +note: required because it appears within the type `PhantomData` + --> $SRC_DIR/core/src/marker.rs:LL:COL +note: required because it appears within the type `Unique` + --> $SRC_DIR/core/src/ptr/unique.rs:LL:COL +note: required because it appears within the type `Box` + --> $SRC_DIR/alloc/src/boxed.rs:LL:COL +note: required because it appears within the type `Runtime` + --> $DIR/cycle-cache-err-60010.rs:23:8 + | +LL | struct Runtime { + | ^^^^^^^ +note: required because it appears within the type `RootDatabase` + --> $DIR/cycle-cache-err-60010.rs:20:8 + | +LL | struct RootDatabase { + | ^^^^^^^^^^^^ +note: required for `RootDatabase` to implement `SourceDatabase` + --> $DIR/cycle-cache-err-60010.rs:43:9 + | +LL | impl SourceDatabase for T + | ^^^^^^^^^^^^^^ ^ +LL | where +LL | T: RefUnwindSafe, + | ------------- unsatisfied trait bound introduced here +note: required for `ParseQuery` to implement `Query` + --> $DIR/cycle-cache-err-60010.rs:36:10 + | +LL | impl Query for ParseQuery + | ^^^^^^^^^ ^^^^^^^^^^ +LL | where +LL | DB: SourceDatabase, + | -------------- unsatisfied trait bound introduced here +note: required because it appears within the type `SalsaStorage` + --> $DIR/cycle-cache-err-60010.rs:26:8 + | +LL | struct SalsaStorage { + | ^^^^^^^^^^^^ +note: required by a bound in `Database::Storage` + --> $DIR/cycle-cache-err-60010.rs:7:5 + | +LL | type Storage; + | ^^^^^^^^^^^^^ required by this bound in `Database::Storage` +help: consider relaxing the implicit `Sized` restriction + | +LL | type Storage: ?Sized; + | ++++++++ + +error: aborting due to 2 previous errors For more information about this error, try `rustc --explain E0275`. diff --git a/tests/ui/traits/next-solver/specialization-transmute.rs b/tests/ui/traits/next-solver/specialization-transmute.rs index 58b62f52dfdb2..ff25656a7ff59 100644 --- a/tests/ui/traits/next-solver/specialization-transmute.rs +++ b/tests/ui/traits/next-solver/specialization-transmute.rs @@ -4,15 +4,15 @@ //~^ WARN the feature `specialization` is incomplete trait Default { - type Id; + type Id; - fn intu(&self) -> &Self::Id; + fn intu(&self) -> &Self::Id; } impl Default for T { default type Id = T; //~ ERROR type annotations needed // This will be fixed by #111994 - fn intu(&self) -> &Self::Id { //~ ERROR type annotations needed + fn intu(&self) -> &Self::Id { self } } diff --git a/tests/ui/traits/next-solver/specialization-transmute.stderr b/tests/ui/traits/next-solver/specialization-transmute.stderr index eaf32a475ac95..a5459165587ca 100644 --- a/tests/ui/traits/next-solver/specialization-transmute.stderr +++ b/tests/ui/traits/next-solver/specialization-transmute.stderr @@ -8,19 +8,12 @@ LL | #![feature(specialization)] = help: consider using `min_specialization` instead, which is more stable and complete = note: `#[warn(incomplete_features)]` on by default -error[E0284]: type annotations needed: cannot satisfy `::Id normalizes-to _` - --> $DIR/specialization-transmute.rs:15:23 - | -LL | fn intu(&self) -> &Self::Id { - | ^^^^^^^^^ cannot satisfy `::Id normalizes-to _` - error[E0282]: type annotations needed --> $DIR/specialization-transmute.rs:13:23 | LL | default type Id = T; | ^ cannot infer type for associated type `::Id` -error: aborting due to 2 previous errors; 1 warning emitted +error: aborting due to 1 previous error; 1 warning emitted -Some errors have detailed explanations: E0282, E0284. -For more information about an error, try `rustc --explain E0282`. +For more information about this error, try `rustc --explain E0282`. diff --git a/tests/ui/traits/non_lifetime_binders/binder-defaults-112547.rs b/tests/ui/traits/non_lifetime_binders/binder-defaults-112547.rs new file mode 100644 index 0000000000000..c6bf0dc1f720f --- /dev/null +++ b/tests/ui/traits/non_lifetime_binders/binder-defaults-112547.rs @@ -0,0 +1,16 @@ +#![feature(non_lifetime_binders)] +//~^ WARN the feature `non_lifetime_binders` is incomplete + +pub fn bar() +where + for V: IntoIterator +//~^^^ ERROR defaults for generic parameters are not allowed in `for<...>` binders +//~^^ ERROR cannot find type `V` in this scope +{ +} + +fn main() { + bar(); +} diff --git a/tests/ui/closures/issue-112547.stderr b/tests/ui/traits/non_lifetime_binders/binder-defaults-112547.stderr similarity index 62% rename from tests/ui/closures/issue-112547.stderr rename to tests/ui/traits/non_lifetime_binders/binder-defaults-112547.stderr index f47ea60729763..edc55a3c8e68f 100644 --- a/tests/ui/closures/issue-112547.stderr +++ b/tests/ui/traits/non_lifetime_binders/binder-defaults-112547.stderr @@ -1,5 +1,5 @@ error[E0412]: cannot find type `V` in this scope - --> $DIR/issue-112547.rs:8:4 + --> $DIR/binder-defaults-112547.rs:8:4 | LL | }> V: IntoIterator | ^ not found in this scope @@ -10,7 +10,7 @@ LL | pub fn bar() | +++ warning: the feature `non_lifetime_binders` is incomplete and may not be safe to use and/or cause compiler crashes - --> $DIR/issue-112547.rs:1:12 + --> $DIR/binder-defaults-112547.rs:1:12 | LL | #![feature(non_lifetime_binders)] | ^^^^^^^^^^^^^^^^^^^^ @@ -18,6 +18,15 @@ LL | #![feature(non_lifetime_binders)] = note: see issue #108185 for more information = note: `#[warn(incomplete_features)]` on by default -error: aborting due to 1 previous error; 1 warning emitted +error: defaults for generic parameters are not allowed in `for<...>` binders + --> $DIR/binder-defaults-112547.rs:6:9 + | +LL | for V: IntoIterator + | |_^ + +error: aborting due to 2 previous errors; 1 warning emitted For more information about this error, try `rustc --explain E0412`. diff --git a/tests/ui/traits/non_lifetime_binders/binder-defaults-118697.rs b/tests/ui/traits/non_lifetime_binders/binder-defaults-118697.rs new file mode 100644 index 0000000000000..2dc9fb98b153d --- /dev/null +++ b/tests/ui/traits/non_lifetime_binders/binder-defaults-118697.rs @@ -0,0 +1,9 @@ +#![allow(incomplete_features)] +#![feature(non_lifetime_binders)] + +type T = dyn for Fn(()); +//~^ ERROR defaults for generic parameters are not allowed in `for<...>` binders +//~| ERROR cannot find type `A` in this scope +//~| ERROR late-bound type parameter not allowed on trait object types + +fn main() {} diff --git a/tests/ui/traits/non_lifetime_binders/binder-defaults-118697.stderr b/tests/ui/traits/non_lifetime_binders/binder-defaults-118697.stderr new file mode 100644 index 0000000000000..6b93f52dbfcaa --- /dev/null +++ b/tests/ui/traits/non_lifetime_binders/binder-defaults-118697.stderr @@ -0,0 +1,21 @@ +error[E0412]: cannot find type `A` in this scope + --> $DIR/binder-defaults-118697.rs:4:22 + | +LL | type T = dyn for Fn(()); + | ^ not found in this scope + +error: defaults for generic parameters are not allowed in `for<...>` binders + --> $DIR/binder-defaults-118697.rs:4:18 + | +LL | type T = dyn for Fn(()); + | ^^^^^^^^^^ + +error: late-bound type parameter not allowed on trait object types + --> $DIR/binder-defaults-118697.rs:4:18 + | +LL | type T = dyn for Fn(()); + | ^^^^^^^^^^ + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0412`. diff --git a/tests/ui/traits/non_lifetime_binders/binder-defaults-119489.rs b/tests/ui/traits/non_lifetime_binders/binder-defaults-119489.rs new file mode 100644 index 0000000000000..f33da416ad8ae --- /dev/null +++ b/tests/ui/traits/non_lifetime_binders/binder-defaults-119489.rs @@ -0,0 +1,12 @@ +#![feature(non_lifetime_binders, generic_const_exprs)] +//~^ WARN the feature `non_lifetime_binders` is incomplete +//~| WARN the feature `generic_const_exprs` is incomplete + +fn fun() +where + for ():, +//~^ ERROR defaults for generic parameters are not allowed in `for<...>` binders +//~| ERROR defaults for generic parameters are not allowed in `for<...>` binders +{} + +fn main() {} diff --git a/tests/ui/traits/non_lifetime_binders/binder-defaults-119489.stderr b/tests/ui/traits/non_lifetime_binders/binder-defaults-119489.stderr new file mode 100644 index 0000000000000..7fe82f1f097c4 --- /dev/null +++ b/tests/ui/traits/non_lifetime_binders/binder-defaults-119489.stderr @@ -0,0 +1,31 @@ +warning: the feature `non_lifetime_binders` is incomplete and may not be safe to use and/or cause compiler crashes + --> $DIR/binder-defaults-119489.rs:1:12 + | +LL | #![feature(non_lifetime_binders, generic_const_exprs)] + | ^^^^^^^^^^^^^^^^^^^^ + | + = note: see issue #108185 for more information + = note: `#[warn(incomplete_features)]` on by default + +warning: the feature `generic_const_exprs` is incomplete and may not be safe to use and/or cause compiler crashes + --> $DIR/binder-defaults-119489.rs:1:34 + | +LL | #![feature(non_lifetime_binders, generic_const_exprs)] + | ^^^^^^^^^^^^^^^^^^^ + | + = note: see issue #76560 for more information + +error: defaults for generic parameters are not allowed in `for<...>` binders + --> $DIR/binder-defaults-119489.rs:7:9 + | +LL | for ():, + | ^^^^^^ + +error: defaults for generic parameters are not allowed in `for<...>` binders + --> $DIR/binder-defaults-119489.rs:7:17 + | +LL | for ():, + | ^^^^^^^^^^^^^^^^^^ + +error: aborting due to 2 previous errors; 2 warnings emitted + diff --git a/tests/ui/type-alias-impl-trait/generic_underconstrained.stderr b/tests/ui/type-alias-impl-trait/generic_underconstrained.stderr index 88529b370f133..be9b07823ae0a 100644 --- a/tests/ui/type-alias-impl-trait/generic_underconstrained.stderr +++ b/tests/ui/type-alias-impl-trait/generic_underconstrained.stderr @@ -1,8 +1,13 @@ error[E0277]: the trait bound `T: Trait` is not satisfied - --> $DIR/generic_underconstrained.rs:9:31 + --> $DIR/generic_underconstrained.rs:9:51 | -LL | fn underconstrain(_: T) -> Underconstrained { - | ^^^^^^^^^^^^^^^^^^^ the trait `Trait` is not implemented for `T` +LL | fn underconstrain(_: T) -> Underconstrained { + | ___________________________________________________^ +LL | | +LL | | +LL | | unimplemented!() +LL | | } + | |_^ the trait `Trait` is not implemented for `T` | note: required by a bound on the type alias `Underconstrained` --> $DIR/generic_underconstrained.rs:6:26 @@ -15,15 +20,10 @@ LL | fn underconstrain(_: T) -> Underconstrained { | +++++++ error[E0277]: the trait bound `T: Trait` is not satisfied - --> $DIR/generic_underconstrained.rs:9:51 + --> $DIR/generic_underconstrained.rs:9:31 | -LL | fn underconstrain(_: T) -> Underconstrained { - | ___________________________________________________^ -LL | | -LL | | -LL | | unimplemented!() -LL | | } - | |_^ the trait `Trait` is not implemented for `T` +LL | fn underconstrain(_: T) -> Underconstrained { + | ^^^^^^^^^^^^^^^^^^^ the trait `Trait` is not implemented for `T` | note: required by a bound on the type alias `Underconstrained` --> $DIR/generic_underconstrained.rs:6:26 diff --git a/tests/ui/type-alias-impl-trait/generic_underconstrained2.stderr b/tests/ui/type-alias-impl-trait/generic_underconstrained2.stderr index b3b9cbca96854..15d96191ba9e2 100644 --- a/tests/ui/type-alias-impl-trait/generic_underconstrained2.stderr +++ b/tests/ui/type-alias-impl-trait/generic_underconstrained2.stderr @@ -1,8 +1,13 @@ error[E0277]: `U` doesn't implement `Debug` - --> $DIR/generic_underconstrained2.rs:8:33 + --> $DIR/generic_underconstrained2.rs:8:53 | -LL | fn underconstrained(_: U) -> Underconstrained { - | ^^^^^^^^^^^^^^^^^^^ `U` cannot be formatted using `{:?}` because it doesn't implement `Debug` +LL | fn underconstrained(_: U) -> Underconstrained { + | _____________________________________________________^ +LL | | +LL | | +LL | | 5u32 +LL | | } + | |_^ `U` cannot be formatted using `{:?}` because it doesn't implement `Debug` | note: required by a bound on the type alias `Underconstrained` --> $DIR/generic_underconstrained2.rs:5:26 @@ -15,10 +20,15 @@ LL | fn underconstrained(_: U) -> Underconstrained { | +++++++++++++++++ error[E0277]: `V` doesn't implement `Debug` - --> $DIR/generic_underconstrained2.rs:17:43 + --> $DIR/generic_underconstrained2.rs:17:64 | -LL | fn underconstrained2(_: U, _: V) -> Underconstrained2 { - | ^^^^^^^^^^^^^^^^^^^^ `V` cannot be formatted using `{:?}` because it doesn't implement `Debug` +LL | fn underconstrained2(_: U, _: V) -> Underconstrained2 { + | ________________________________________________________________^ +LL | | +LL | | +LL | | 5u32 +LL | | } + | |_^ `V` cannot be formatted using `{:?}` because it doesn't implement `Debug` | note: required by a bound on the type alias `Underconstrained2` --> $DIR/generic_underconstrained2.rs:14:27 @@ -31,15 +41,10 @@ LL | fn underconstrained2(_: U, _: V) -> Underconstrained | +++++++++++++++++ error[E0277]: `U` doesn't implement `Debug` - --> $DIR/generic_underconstrained2.rs:8:53 + --> $DIR/generic_underconstrained2.rs:8:33 | -LL | fn underconstrained(_: U) -> Underconstrained { - | _____________________________________________________^ -LL | | -LL | | -LL | | 5u32 -LL | | } - | |_^ `U` cannot be formatted using `{:?}` because it doesn't implement `Debug` +LL | fn underconstrained(_: U) -> Underconstrained { + | ^^^^^^^^^^^^^^^^^^^ `U` cannot be formatted using `{:?}` because it doesn't implement `Debug` | note: required by a bound on the type alias `Underconstrained` --> $DIR/generic_underconstrained2.rs:5:26 @@ -52,15 +57,10 @@ LL | fn underconstrained(_: U) -> Underconstrained { | +++++++++++++++++ error[E0277]: `V` doesn't implement `Debug` - --> $DIR/generic_underconstrained2.rs:17:64 + --> $DIR/generic_underconstrained2.rs:17:43 | -LL | fn underconstrained2(_: U, _: V) -> Underconstrained2 { - | ________________________________________________________________^ -LL | | -LL | | -LL | | 5u32 -LL | | } - | |_^ `V` cannot be formatted using `{:?}` because it doesn't implement `Debug` +LL | fn underconstrained2(_: U, _: V) -> Underconstrained2 { + | ^^^^^^^^^^^^^^^^^^^^ `V` cannot be formatted using `{:?}` because it doesn't implement `Debug` | note: required by a bound on the type alias `Underconstrained2` --> $DIR/generic_underconstrained2.rs:14:27 diff --git a/tests/ui/type-alias-impl-trait/issue-53092-2.stderr b/tests/ui/type-alias-impl-trait/issue-53092-2.stderr index e4054e14abef1..e805a71ea6f30 100644 --- a/tests/ui/type-alias-impl-trait/issue-53092-2.stderr +++ b/tests/ui/type-alias-impl-trait/issue-53092-2.stderr @@ -17,17 +17,11 @@ LL | const CONST_BUG: Bug = unsafe { std::mem::transmute(|_: u8| ()) }; = note: ...which requires computing layout of `Bug`... = note: ...which requires normalizing `Bug`... = note: ...which again requires computing type of `Bug::{opaque#0}`, completing the cycle -note: cycle used when checking item types in top-level module - --> $DIR/issue-53092-2.rs:1:1 - | -LL | / #![feature(type_alias_impl_trait)] -LL | | #![allow(dead_code)] -LL | | -LL | | type Bug = impl Fn(T) -> U + Copy; -... | -LL | | CONST_BUG(0); -LL | | } - | |_^ +note: cycle used when checking that `Bug::{opaque#0}` is well-formed + --> $DIR/issue-53092-2.rs:4:18 + | +LL | type Bug = impl Fn(T) -> U + Copy; + | ^^^^^^^^^^^^^^^^^^^^^^ = note: see https://rustc-dev-guide.rust-lang.org/overview.html#queries and https://rustc-dev-guide.rust-lang.org/query.html for more information error[E0277]: the trait bound `U: From` is not satisfied diff --git a/tests/ui/type/issue-94187-verbose-type-name.rs b/tests/ui/type/issue-94187-verbose-type-name.rs index 3713a32eb1183..7c765d6d8104a 100644 --- a/tests/ui/type/issue-94187-verbose-type-name.rs +++ b/tests/ui/type/issue-94187-verbose-type-name.rs @@ -1,8 +1,8 @@ -// Check to insure that the output of `std::any::type_name` does not change based on `-Zverbose` +// Ensure the output of `std::any::type_name` does not change based on `-Zverbose-internals` // run-pass // edition: 2018 // revisions: normal verbose -// [verbose]compile-flags:-Zverbose +// [verbose]compile-flags:-Zverbose-internals --verbose use std::any::type_name; diff --git a/tests/ui/type/type-check/assignment-expected-bool.rs b/tests/ui/type/type-check/assignment-expected-bool.rs index 191939bdb705b..fe8af64b43d02 100644 --- a/tests/ui/type/type-check/assignment-expected-bool.rs +++ b/tests/ui/type/type-check/assignment-expected-bool.rs @@ -31,4 +31,9 @@ fn main() { let _: usize = 0 = 0; //~^ ERROR mismatched types [E0308] //~| ERROR invalid left-hand side of assignment [E0070] + + let foo = &String::new(); + let bar = ""; + if foo = bar {} + //~^ ERROR mismatched types [E0308] } diff --git a/tests/ui/type/type-check/assignment-expected-bool.stderr b/tests/ui/type/type-check/assignment-expected-bool.stderr index 56494baff6bdf..6c44e389a21a6 100644 --- a/tests/ui/type/type-check/assignment-expected-bool.stderr +++ b/tests/ui/type/type-check/assignment-expected-bool.stderr @@ -135,7 +135,18 @@ LL | let _: usize = 0 = 0; | | | expected due to this -error: aborting due to 13 previous errors +error[E0308]: mismatched types + --> $DIR/assignment-expected-bool.rs:37:8 + | +LL | if foo = bar {} + | ^^^^^^^^^ expected `bool`, found `()` + | +help: you might have meant to compare for equality + | +LL | if foo == bar {} + | + + +error: aborting due to 14 previous errors Some errors have detailed explanations: E0070, E0308. For more information about an error, try `rustc --explain E0070`. diff --git a/tests/ui/type/verbose.normal.stderr b/tests/ui/type/verbose.normal.stderr new file mode 100644 index 0000000000000..6cb2640333642 --- /dev/null +++ b/tests/ui/type/verbose.normal.stderr @@ -0,0 +1,14 @@ +error[E0308]: mismatched types + --> $DIR/verbose.rs:7:28 + | +LL | let _: Foo = Foo:: { x: 0, y: 0 }; + | ------------- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `Foo`, found `Foo` + | | + | expected due to this + | + = note: expected struct `Foo` + found struct `Foo` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0308`. diff --git a/tests/ui/type/verbose.rs b/tests/ui/type/verbose.rs new file mode 100644 index 0000000000000..4ebd5cdccfcf9 --- /dev/null +++ b/tests/ui/type/verbose.rs @@ -0,0 +1,13 @@ +// revisions:verbose normal +// [verbose]compile-flags:--verbose +#![crate_type = "lib"] + +struct Foo { x: T, y: U } +fn bar() { + let _: Foo = Foo:: { x: 0, y: 0 }; + //~^ ERROR mismatched types + //[verbose]~| NOTE expected struct `Foo` + //[normal]~| NOTE expected struct `Foo` + //~| NOTE expected `Foo` + //~| NOTE expected due to this +} diff --git a/tests/ui/type/verbose.verbose.stderr b/tests/ui/type/verbose.verbose.stderr new file mode 100644 index 0000000000000..7cc7a16cdb169 --- /dev/null +++ b/tests/ui/type/verbose.verbose.stderr @@ -0,0 +1,14 @@ +error[E0308]: mismatched types + --> $DIR/verbose.rs:7:28 + | +LL | let _: Foo = Foo:: { x: 0, y: 0 }; + | ------------- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `Foo`, found `Foo` + | | + | expected due to this + | + = note: expected struct `Foo` + found struct `Foo` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0308`. diff --git a/tests/ui/ufcs/ufcs-explicit-self-bad.stderr b/tests/ui/ufcs/ufcs-explicit-self-bad.stderr index 4c2cb0eb7536a..b0e71507a2e7d 100644 --- a/tests/ui/ufcs/ufcs-explicit-self-bad.stderr +++ b/tests/ui/ufcs/ufcs-explicit-self-bad.stderr @@ -1,3 +1,20 @@ +error[E0053]: method `dummy2` has an incompatible type for trait + --> $DIR/ufcs-explicit-self-bad.rs:37:21 + | +LL | fn dummy2(self: &Bar) {} + | ------^^^^^^^ + | | | + | | expected `&'a Bar`, found `Bar` + | help: change the self-receiver type to match the trait: `&self` + | +note: type in trait + --> $DIR/ufcs-explicit-self-bad.rs:31:15 + | +LL | fn dummy2(&self); + | ^^^^^ + = note: expected signature `fn(&&'a Bar<_>)` + found signature `fn(&Bar<_>)` + error[E0307]: invalid `self` parameter type: isize --> $DIR/ufcs-explicit-self-bad.rs:8:18 | @@ -101,23 +118,6 @@ note: ...does not necessarily outlive the anonymous lifetime defined here LL | fn dummy3(self: &&Bar) {} | ^^^^^^^ -error[E0053]: method `dummy2` has an incompatible type for trait - --> $DIR/ufcs-explicit-self-bad.rs:37:21 - | -LL | fn dummy2(self: &Bar) {} - | ------^^^^^^^ - | | | - | | expected `&'a Bar`, found `Bar` - | help: change the self-receiver type to match the trait: `&self` - | -note: type in trait - --> $DIR/ufcs-explicit-self-bad.rs:31:15 - | -LL | fn dummy2(&self); - | ^^^^^ - = note: expected signature `fn(&&'a Bar<_>)` - found signature `fn(&Bar<_>)` - error: aborting due to 8 previous errors Some errors have detailed explanations: E0053, E0307, E0308. diff --git a/tests/ui/union/union-unsized.mirunsafeck.stderr b/tests/ui/union/union-unsized.mirunsafeck.stderr index f8da20413b2ac..de7e690d80fc9 100644 --- a/tests/ui/union/union-unsized.mirunsafeck.stderr +++ b/tests/ui/union/union-unsized.mirunsafeck.stderr @@ -16,6 +16,18 @@ help: the `Box` type always has a statically known size and allocates its conten LL | a: Box, | ++++ + +error[E0740]: field must implement `Copy` or be wrapped in `ManuallyDrop<...>` to be used in a union + --> $DIR/union-unsized.rs:5:5 + | +LL | a: str, + | ^^^^^^ + | + = note: union fields must not have drop side-effects, which is currently enforced via either `Copy` or `ManuallyDrop<...>` +help: wrap the field type in `ManuallyDrop<...>` + | +LL | a: std::mem::ManuallyDrop, + | +++++++++++++++++++++++ + + error[E0277]: the size for values of type `str` cannot be known at compilation time --> $DIR/union-unsized.rs:14:8 | @@ -34,18 +46,6 @@ help: the `Box` type always has a statically known size and allocates its conten LL | b: Box, | ++++ + -error[E0740]: field must implement `Copy` or be wrapped in `ManuallyDrop<...>` to be used in a union - --> $DIR/union-unsized.rs:5:5 - | -LL | a: str, - | ^^^^^^ - | - = note: union fields must not have drop side-effects, which is currently enforced via either `Copy` or `ManuallyDrop<...>` -help: wrap the field type in `ManuallyDrop<...>` - | -LL | a: std::mem::ManuallyDrop, - | +++++++++++++++++++++++ + - error[E0740]: field must implement `Copy` or be wrapped in `ManuallyDrop<...>` to be used in a union --> $DIR/union-unsized.rs:14:5 | diff --git a/tests/ui/union/union-unsized.thirunsafeck.stderr b/tests/ui/union/union-unsized.thirunsafeck.stderr index f8da20413b2ac..de7e690d80fc9 100644 --- a/tests/ui/union/union-unsized.thirunsafeck.stderr +++ b/tests/ui/union/union-unsized.thirunsafeck.stderr @@ -16,6 +16,18 @@ help: the `Box` type always has a statically known size and allocates its conten LL | a: Box, | ++++ + +error[E0740]: field must implement `Copy` or be wrapped in `ManuallyDrop<...>` to be used in a union + --> $DIR/union-unsized.rs:5:5 + | +LL | a: str, + | ^^^^^^ + | + = note: union fields must not have drop side-effects, which is currently enforced via either `Copy` or `ManuallyDrop<...>` +help: wrap the field type in `ManuallyDrop<...>` + | +LL | a: std::mem::ManuallyDrop, + | +++++++++++++++++++++++ + + error[E0277]: the size for values of type `str` cannot be known at compilation time --> $DIR/union-unsized.rs:14:8 | @@ -34,18 +46,6 @@ help: the `Box` type always has a statically known size and allocates its conten LL | b: Box, | ++++ + -error[E0740]: field must implement `Copy` or be wrapped in `ManuallyDrop<...>` to be used in a union - --> $DIR/union-unsized.rs:5:5 - | -LL | a: str, - | ^^^^^^ - | - = note: union fields must not have drop side-effects, which is currently enforced via either `Copy` or `ManuallyDrop<...>` -help: wrap the field type in `ManuallyDrop<...>` - | -LL | a: std::mem::ManuallyDrop, - | +++++++++++++++++++++++ + - error[E0740]: field must implement `Copy` or be wrapped in `ManuallyDrop<...>` to be used in a union --> $DIR/union-unsized.rs:14:5 | diff --git a/tests/ui/unknown-unstable-lints/deny-unstable-lint-command-line.stderr b/tests/ui/unknown-unstable-lints/deny-unstable-lint-command-line.stderr index f0450aea49a4a..df83c10308438 100644 --- a/tests/ui/unknown-unstable-lints/deny-unstable-lint-command-line.stderr +++ b/tests/ui/unknown-unstable-lints/deny-unstable-lint-command-line.stderr @@ -4,17 +4,5 @@ error: unknown lint: `test_unstable_lint` = help: add `-Zcrate-attr="feature(test_unstable_lint)"` to the command-line options to enable = note: requested on the command line with `-D unknown-lints` -error: unknown lint: `test_unstable_lint` - | - = note: the `test_unstable_lint` lint is unstable - = help: add `-Zcrate-attr="feature(test_unstable_lint)"` to the command-line options to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: unknown lint: `test_unstable_lint` - | - = note: the `test_unstable_lint` lint is unstable - = help: add `-Zcrate-attr="feature(test_unstable_lint)"` to the command-line options to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: aborting due to 3 previous errors +error: aborting due to 1 previous error diff --git a/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.rs b/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.rs index c6c60b12d83e9..29c6547abc16a 100644 --- a/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.rs +++ b/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.rs @@ -3,7 +3,5 @@ #![deny(unknown_lints)] #![allow(test_unstable_lint)] //~^ ERROR unknown lint: `test_unstable_lint` -//~| ERROR unknown lint: `test_unstable_lint` -//~| ERROR unknown lint: `test_unstable_lint` fn main() {} diff --git a/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.stderr b/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.stderr index 20a36b28dc693..0afe3d55c98a5 100644 --- a/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.stderr +++ b/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.stderr @@ -12,25 +12,5 @@ note: the lint level is defined here LL | #![deny(unknown_lints)] | ^^^^^^^^^^^^^ -error: unknown lint: `test_unstable_lint` - --> $DIR/deny-unstable-lint-inline.rs:4:1 - | -LL | #![allow(test_unstable_lint)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `test_unstable_lint` lint is unstable - = help: add `#![feature(test_unstable_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: unknown lint: `test_unstable_lint` - --> $DIR/deny-unstable-lint-inline.rs:4:1 - | -LL | #![allow(test_unstable_lint)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `test_unstable_lint` lint is unstable - = help: add `#![feature(test_unstable_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: aborting due to 3 previous errors +error: aborting due to 1 previous error diff --git a/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-command-line.stderr b/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-command-line.stderr index a2deecf1caf5e..c133b880ebde3 100644 --- a/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-command-line.stderr +++ b/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-command-line.stderr @@ -4,17 +4,5 @@ warning: unknown lint: `test_unstable_lint` = help: add `-Zcrate-attr="feature(test_unstable_lint)"` to the command-line options to enable = note: requested on the command line with `-W unknown-lints` -warning: unknown lint: `test_unstable_lint` - | - = note: the `test_unstable_lint` lint is unstable - = help: add `-Zcrate-attr="feature(test_unstable_lint)"` to the command-line options to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `test_unstable_lint` - | - = note: the `test_unstable_lint` lint is unstable - = help: add `-Zcrate-attr="feature(test_unstable_lint)"` to the command-line options to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: 3 warnings emitted +warning: 1 warning emitted diff --git a/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.rs b/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.rs index f4247e4569eb7..89db84e69f677 100644 --- a/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.rs +++ b/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.rs @@ -3,7 +3,5 @@ #![warn(unknown_lints)] #![allow(test_unstable_lint)] //~^ WARNING unknown lint: `test_unstable_lint` -//~| WARNING unknown lint: `test_unstable_lint` -//~| WARNING unknown lint: `test_unstable_lint` fn main() {} diff --git a/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.stderr b/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.stderr index 12afb2e294a3d..48c83b49e2962 100644 --- a/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.stderr +++ b/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.stderr @@ -12,25 +12,5 @@ note: the lint level is defined here LL | #![warn(unknown_lints)] | ^^^^^^^^^^^^^ -warning: unknown lint: `test_unstable_lint` - --> $DIR/warn-unknown-unstable-lint-inline.rs:4:1 - | -LL | #![allow(test_unstable_lint)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `test_unstable_lint` lint is unstable - = help: add `#![feature(test_unstable_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: unknown lint: `test_unstable_lint` - --> $DIR/warn-unknown-unstable-lint-inline.rs:4:1 - | -LL | #![allow(test_unstable_lint)] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = note: the `test_unstable_lint` lint is unstable - = help: add `#![feature(test_unstable_lint)]` to the crate attributes to enable - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -warning: 3 warnings emitted +warning: 1 warning emitted diff --git a/tests/ui/unpretty/flattened-format-args.stdout b/tests/ui/unpretty/flattened-format-args.stdout index a8fe8da002472..7fc5d26605996 100644 --- a/tests/ui/unpretty/flattened-format-args.stdout +++ b/tests/ui/unpretty/flattened-format-args.stdout @@ -9,8 +9,7 @@ fn main() { let x = 1; // Should flatten to println!("a 123 b {x} xyz\n"): { - ::std::io::_print(<#[lang = "format_arguments"]>::new_v1(&["a 123 b ", - " xyz\n"], - &[<#[lang = "format_argument"]>::new_display(&x)])); + ::std::io::_print(format_arguments::new_v1(&["a 123 b ", + " xyz\n"], &[format_argument::new_display(&x)])); }; } diff --git a/tests/ui/wf/hir-wf-check-erase-regions.stderr b/tests/ui/wf/hir-wf-check-erase-regions.stderr index eb0a8f8f69a25..4b696dc1d1dfe 100644 --- a/tests/ui/wf/hir-wf-check-erase-regions.stderr +++ b/tests/ui/wf/hir-wf-check-erase-regions.stderr @@ -6,15 +6,15 @@ LL | type IntoIter = std::iter::Flatten>; | = help: the trait `Iterator` is not implemented for `&'a T` = help: the trait `Iterator` is implemented for `&mut I` - = note: required for `&'a T` to implement `IntoIterator` -note: required by a bound in `Flatten` - --> $SRC_DIR/core/src/iter/adapters/flatten.rs:LL:COL + = note: required for `Flatten>` to implement `Iterator` +note: required by a bound in `std::iter::IntoIterator::IntoIter` + --> $SRC_DIR/core/src/iter/traits/collect.rs:LL:COL error[E0277]: `&'a T` is not an iterator - --> $DIR/hir-wf-check-erase-regions.rs:11:27 + --> $DIR/hir-wf-check-erase-regions.rs:7:21 | -LL | fn into_iter(self) -> Self::IntoIter { - | ^^^^^^^^^^^^^^ `&'a T` is not an iterator +LL | type IntoIter = std::iter::Flatten>; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `&'a T` is not an iterator | = help: the trait `Iterator` is not implemented for `&'a T` = help: the trait `Iterator` is implemented for `&mut I` @@ -23,16 +23,16 @@ note: required by a bound in `Flatten` --> $SRC_DIR/core/src/iter/adapters/flatten.rs:LL:COL error[E0277]: `&'a T` is not an iterator - --> $DIR/hir-wf-check-erase-regions.rs:7:21 + --> $DIR/hir-wf-check-erase-regions.rs:11:27 | -LL | type IntoIter = std::iter::Flatten>; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `&'a T` is not an iterator +LL | fn into_iter(self) -> Self::IntoIter { + | ^^^^^^^^^^^^^^ `&'a T` is not an iterator | = help: the trait `Iterator` is not implemented for `&'a T` = help: the trait `Iterator` is implemented for `&mut I` - = note: required for `Flatten>` to implement `Iterator` -note: required by a bound in `std::iter::IntoIterator::IntoIter` - --> $SRC_DIR/core/src/iter/traits/collect.rs:LL:COL + = note: required for `&'a T` to implement `IntoIterator` +note: required by a bound in `Flatten` + --> $SRC_DIR/core/src/iter/adapters/flatten.rs:LL:COL error: aborting due to 3 previous errors diff --git a/tests/ui/where-clauses/higher-ranked-fn-type.rs b/tests/ui/where-clauses/higher-ranked-fn-type.rs index c19e75eb7bfc8..5d7308b6c1cdc 100644 --- a/tests/ui/where-clauses/higher-ranked-fn-type.rs +++ b/tests/ui/where-clauses/higher-ranked-fn-type.rs @@ -1,5 +1,5 @@ // revisions: quiet verbose -// [verbose]compile-flags: -Zverbose +// [verbose]compile-flags: -Zverbose-internals #![allow(unused_parens)] diff --git a/triagebot.toml b/triagebot.toml index 27b174454b4e0..5406500cec302 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -639,7 +639,7 @@ cc = ["@nnethercote"] [assign] warn_non_default_branch = true contributing_url = "https://rustc-dev-guide.rust-lang.org/getting-started.html" -users_on_vacation = ["jyn514", "oli-obk", "spastorino"] +users_on_vacation = ["jyn514", "spastorino"] [assign.adhoc_groups] compiler-team = [