diff --git a/crates/cargo-test-support/src/registry.rs b/crates/cargo-test-support/src/registry.rs index cf6cff8d2c2..6d3db585a34 100644 --- a/crates/cargo-test-support/src/registry.rs +++ b/crates/cargo-test-support/src/registry.rs @@ -442,7 +442,7 @@ impl Package { } else { registry_path.join(&file) }; - let prev = fs::read_to_string(&dst).unwrap_or(String::new()); + let prev = fs::read_to_string(&dst).unwrap_or_default(); t!(fs::create_dir_all(dst.parent().unwrap())); t!(fs::write(&dst, prev + &line[..] + "\n")); diff --git a/src/bin/cargo/cli.rs b/src/bin/cargo/cli.rs index 7fc1da57a86..23ed0ca53a3 100644 --- a/src/bin/cargo/cli.rs +++ b/src/bin/cargo/cli.rs @@ -1,5 +1,5 @@ use cargo::core::features; -use cargo::{self, CliResult, Config}; +use cargo::{self, drop_print, drop_println, CliResult, Config}; use clap::{AppSettings, Arg, ArgMatches}; use super::commands; @@ -25,7 +25,8 @@ pub fn main(config: &mut Config) -> CliResult { }; if args.value_of("unstable-features") == Some("help") { - println!( + drop_println!( + config, " Available unstable (nightly-only) flags: @@ -40,7 +41,8 @@ Available unstable (nightly-only) flags: Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" ); if !features::nightly_features_allowed() { - println!( + drop_println!( + config, "\nUnstable flags are only available on the nightly channel \ of Cargo, but this is the `{}` channel.\n\ {}", @@ -48,7 +50,8 @@ Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" features::SEE_CHANNELS ); } - println!( + drop_println!( + config, "\nSee https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ for more information about these flags." ); @@ -58,7 +61,7 @@ Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" let is_verbose = args.occurrences_of("verbose") > 0; if args.is_present("version") { let version = get_version_string(is_verbose); - print!("{}", version); + drop_print!(config, "{}", version); return Ok(()); } @@ -69,19 +72,19 @@ Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" } if args.is_present("list") { - println!("Installed Commands:"); + drop_println!(config, "Installed Commands:"); for command in list_commands(config) { match command { CommandInfo::BuiltIn { name, about } => { let summary = about.unwrap_or_default(); let summary = summary.lines().next().unwrap_or(&summary); // display only the first line - println!(" {:<20} {}", name, summary) + drop_println!(config, " {:<20} {}", name, summary); } CommandInfo::External { name, path } => { if is_verbose { - println!(" {:<20} {}", name, path.display()) + drop_println!(config, " {:<20} {}", name, path.display()); } else { - println!(" {}", name) + drop_println!(config, " {}", name); } } } diff --git a/src/bin/cargo/commands/locate_project.rs b/src/bin/cargo/commands/locate_project.rs index df0c424aa4a..5897de108b3 100644 --- a/src/bin/cargo/commands/locate_project.rs +++ b/src/bin/cargo/commands/locate_project.rs @@ -1,6 +1,4 @@ use crate::command_prelude::*; - -use cargo::print_json; use serde::Serialize; pub fn cli() -> App { @@ -30,6 +28,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { let location = ProjectLocation { root }; - print_json(&location); + config.shell().print_json(&location); Ok(()) } diff --git a/src/bin/cargo/commands/metadata.rs b/src/bin/cargo/commands/metadata.rs index 1130f074e8f..616df735379 100644 --- a/src/bin/cargo/commands/metadata.rs +++ b/src/bin/cargo/commands/metadata.rs @@ -1,7 +1,5 @@ use crate::command_prelude::*; - use cargo::ops::{self, OutputMetadataOptions}; -use cargo::print_json; pub fn cli() -> App { subcommand("metadata") @@ -54,6 +52,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { }; let result = ops::output_metadata(&ws, &options)?; - print_json(&result); + config.shell().print_json(&result); Ok(()) } diff --git a/src/bin/cargo/commands/pkgid.rs b/src/bin/cargo/commands/pkgid.rs index 57be0d11877..453c95a184d 100644 --- a/src/bin/cargo/commands/pkgid.rs +++ b/src/bin/cargo/commands/pkgid.rs @@ -37,6 +37,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { let ws = args.workspace(config)?; let spec = args.value_of("spec").or_else(|| args.value_of("package")); let spec = ops::pkgid(&ws, spec)?; - println!("{}", spec); + cargo::drop_println!(config, "{}", spec); Ok(()) } diff --git a/src/bin/cargo/commands/read_manifest.rs b/src/bin/cargo/commands/read_manifest.rs index fe2528b18aa..96cba1e082a 100644 --- a/src/bin/cargo/commands/read_manifest.rs +++ b/src/bin/cargo/commands/read_manifest.rs @@ -1,7 +1,5 @@ use crate::command_prelude::*; -use cargo::print_json; - pub fn cli() -> App { subcommand("read-manifest") .about( @@ -17,6 +15,6 @@ Deprecated, use `cargo metadata --no-deps` instead.\ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { let ws = args.workspace(config)?; - print_json(&ws.current()?); + config.shell().print_json(&ws.current()?); Ok(()) } diff --git a/src/bin/cargo/commands/tree.rs b/src/bin/cargo/commands/tree.rs index b98ee5f7aad..7eeac6cb8e7 100644 --- a/src/bin/cargo/commands/tree.rs +++ b/src/bin/cargo/commands/tree.rs @@ -102,7 +102,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { if args.is_present("version") { let verbose = args.occurrences_of("verbose") > 0; let version = cli::get_version_string(verbose); - print!("{}", version); + cargo::drop_print!(config, "{}", version); return Ok(()); } let prefix = if args.is_present("no-indent") { @@ -206,7 +206,7 @@ fn parse_edge_kinds(config: &Config, args: &ArgMatches<'_>) -> CargoResult App { subcommand("verify-project") .about("Check correctness of crate manifest") @@ -13,19 +11,15 @@ pub fn cli() -> App { } pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - fn fail(reason: &str, value: &str) -> ! { + if let Err(e) = args.workspace(config) { let mut h = HashMap::new(); - h.insert(reason.to_string(), value.to_string()); - print_json(&h); + h.insert("invalid".to_string(), e.to_string()); + config.shell().print_json(&h); process::exit(1) } - if let Err(e) = args.workspace(config) { - fail("invalid", &e.to_string()) - } - let mut h = HashMap::new(); h.insert("success".to_string(), "true".to_string()); - print_json(&h); + config.shell().print_json(&h); Ok(()) } diff --git a/src/bin/cargo/commands/version.rs b/src/bin/cargo/commands/version.rs index 81c6838e7ab..73172826150 100644 --- a/src/bin/cargo/commands/version.rs +++ b/src/bin/cargo/commands/version.rs @@ -1,6 +1,5 @@ -use crate::command_prelude::*; - use crate::cli; +use crate::command_prelude::*; pub fn cli() -> App { subcommand("version") @@ -8,9 +7,9 @@ pub fn cli() -> App { .arg(opt("quiet", "No output printed to stdout").short("q")) } -pub fn exec(_config: &mut Config, args: &ArgMatches<'_>) -> CliResult { +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { let verbose = args.occurrences_of("verbose") > 0; let version = cli::get_version_string(verbose); - print!("{}", version); + cargo::drop_print!(config, "{}", version); Ok(()) } diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs index 8dac81cffd2..ac7d6eca295 100644 --- a/src/cargo/core/compiler/build_context/mod.rs +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -10,7 +10,7 @@ use std::collections::HashMap; use std::path::PathBuf; mod target_info; -pub use self::target_info::{FileFlavor, RustcTargetData, TargetInfo}; +pub use self::target_info::{FileFlavor, FileType, RustcTargetData, TargetInfo}; /// The build context, containing all information about a build task. /// diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index 28d9589e1ef..ab3dc1c26a9 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -1,5 +1,5 @@ -use crate::core::compiler::{BuildOutput, CompileKind, CompileTarget}; -use crate::core::{Dependency, TargetKind, Workspace}; +use crate::core::compiler::{BuildOutput, CompileKind, CompileMode, CompileTarget, CrateType}; +use crate::core::{Dependency, Target, TargetKind, Workspace}; use crate::util::config::{Config, StringList, TargetConfig}; use crate::util::{CargoResult, CargoResultExt, ProcessBuilder, Rustc}; use cargo_platform::{Cfg, CfgExpr}; @@ -25,7 +25,7 @@ pub struct TargetInfo { /// `Some((prefix, suffix))`, for example `libcargo.so` would be /// `Some(("lib", ".so")). The value is `None` if the crate type is not /// supported. - crate_types: RefCell>>, + crate_types: RefCell>>, /// `cfg` information extracted from `rustc --print=cfg`. cfg: Vec, /// Path to the sysroot. @@ -40,8 +40,8 @@ pub struct TargetInfo { pub rustflags: Vec, /// Extra flags to pass to `rustdoc`, see `env_args`. pub rustdocflags: Vec, - /// Remove this when it hits stable (1.44) - pub supports_bitcode_in_rlib: Option, + /// Remove this when it hits stable (1.45) + pub supports_embed_bitcode: Option, } /// Kind of each file generated by a Unit, part of `FileType`. @@ -49,41 +49,73 @@ pub struct TargetInfo { pub enum FileFlavor { /// Not a special file type. Normal, - /// Like `Normal`, but not directly executable + /// Like `Normal`, but not directly executable. + /// For example, a `.wasm` file paired with the "normal" `.js` file. Auxiliary, /// Something you can link against (e.g., a library). - Linkable { rmeta: bool }, + Linkable, + /// An `.rmeta` Rust metadata file. + Rmeta, /// Piece of external debug information (e.g., `.dSYM`/`.pdb` file). DebugInfo, } /// Type of each file generated by a Unit. +#[derive(Debug)] pub struct FileType { /// The kind of file. pub flavor: FileFlavor, + /// The crate-type that generates this file. + /// + /// `None` for things that aren't associated with a specific crate type, + /// for example `rmeta` files. + pub crate_type: Option, /// The suffix for the file (for example, `.rlib`). /// This is an empty string for executables on Unix-like platforms. suffix: String, /// The prefix for the file (for example, `lib`). /// This is an empty string for things like executables. prefix: String, - /// Flag to convert hyphen to underscore. - /// - /// wasm bin targets will generate two files in deps such as - /// "web-stuff.js" and "web_stuff.wasm". Note the different usages of "-" - /// and "_". This flag indicates that the stem "web-stuff" should be - /// converted to "web_stuff". + /// Flag to convert hyphen to underscore when uplifting. should_replace_hyphens: bool, } impl FileType { - pub fn filename(&self, stem: &str) -> String { - let stem = if self.should_replace_hyphens { - stem.replace("-", "_") + /// The filename for this FileType crated by rustc. + pub fn output_filename(&self, target: &Target, metadata: Option<&str>) -> String { + match metadata { + Some(metadata) => format!( + "{}{}-{}{}", + self.prefix, + target.crate_name(), + metadata, + self.suffix + ), + None => format!("{}{}{}", self.prefix, target.crate_name(), self.suffix), + } + } + + /// The filename for this FileType that Cargo should use when "uplifting" + /// it to the destination directory. + pub fn uplift_filename(&self, target: &Target) -> String { + let name = if self.should_replace_hyphens { + target.crate_name() } else { - stem.to_string() + target.name().to_string() }; - format!("{}{}{}", self.prefix, stem, self.suffix) + format!("{}{}{}", self.prefix, name, self.suffix) + } + + /// Creates a new instance representing a `.rmeta` file. + pub fn new_rmeta() -> FileType { + // Note that even binaries use the `lib` prefix. + FileType { + flavor: FileFlavor::Rmeta, + crate_type: None, + suffix: ".rmeta".to_string(), + prefix: "lib".to_string(), + should_replace_hyphens: true, + } } } @@ -111,10 +143,10 @@ impl TargetInfo { .args(&rustflags) .env_remove("RUSTC_LOG"); - let mut bitcode_in_rlib_test = process.clone(); - bitcode_in_rlib_test.arg("-Cbitcode-in-rlib"); - let supports_bitcode_in_rlib = match kind { - CompileKind::Host => Some(rustc.cached_output(&bitcode_in_rlib_test).is_ok()), + let mut embed_bitcode_test = process.clone(); + embed_bitcode_test.arg("-Cembed-bitcode"); + let supports_embed_bitcode = match kind { + CompileKind::Host => Some(rustc.cached_output(&embed_bitcode_test).is_ok()), _ => None, }; @@ -123,10 +155,16 @@ impl TargetInfo { } let crate_type_process = process.clone(); - const KNOWN_CRATE_TYPES: &[&str] = - &["bin", "rlib", "dylib", "cdylib", "staticlib", "proc-macro"]; + const KNOWN_CRATE_TYPES: &[CrateType] = &[ + CrateType::Bin, + CrateType::Rlib, + CrateType::Dylib, + CrateType::Cdylib, + CrateType::Staticlib, + CrateType::ProcMacro, + ]; for crate_type in KNOWN_CRATE_TYPES.iter() { - process.arg("--crate-type").arg(crate_type); + process.arg("--crate-type").arg(crate_type.as_str()); } process.arg("--print=sysroot"); @@ -140,7 +178,7 @@ impl TargetInfo { let mut map = HashMap::new(); for crate_type in KNOWN_CRATE_TYPES { let out = parse_crate_type(crate_type, &process, &output, &error, &mut lines)?; - map.insert(crate_type.to_string(), out); + map.insert(crate_type.clone(), out); } let line = match lines.next() { @@ -202,7 +240,7 @@ impl TargetInfo { "RUSTDOCFLAGS", )?, cfg, - supports_bitcode_in_rlib, + supports_embed_bitcode, }) } @@ -226,15 +264,20 @@ impl TargetInfo { /// Returns the list of file types generated by the given crate type. /// /// Returns `None` if the target does not support the given crate type. - pub fn file_types( + fn file_types( &self, - crate_type: &str, + crate_type: &CrateType, flavor: FileFlavor, - kind: &TargetKind, target_triple: &str, ) -> CargoResult>> { + let crate_type = if *crate_type == CrateType::Lib { + CrateType::Rlib + } else { + crate_type.clone() + }; + let mut crate_types = self.crate_types.borrow_mut(); - let entry = crate_types.entry(crate_type.to_string()); + let entry = crate_types.entry(crate_type.clone()); let crate_type_info = match entry { Entry::Occupied(o) => &*o.into_mut(), Entry::Vacant(v) => { @@ -250,58 +293,95 @@ impl TargetInfo { suffix: suffix.clone(), prefix: prefix.clone(), flavor, - should_replace_hyphens: false, + crate_type: Some(crate_type.clone()), + should_replace_hyphens: crate_type != CrateType::Bin, }]; - // See rust-lang/cargo#4500. - if target_triple.ends_with("-windows-msvc") - && crate_type.ends_with("dylib") - && suffix == ".dll" - { - ret.push(FileType { - suffix: ".dll.lib".to_string(), - prefix: prefix.clone(), - flavor: FileFlavor::Normal, - should_replace_hyphens: false, - }) - } else if target_triple.ends_with("windows-gnu") - && crate_type.ends_with("dylib") - && suffix == ".dll" - { - // LD can link DLL directly, but LLD requires the import library. - ret.push(FileType { - suffix: ".dll.a".to_string(), - prefix: "lib".to_string(), - flavor: FileFlavor::Normal, - should_replace_hyphens: false, - }) + // Window shared library import/export files. + if crate_type.is_dynamic() { + if target_triple.ends_with("-windows-msvc") { + assert!(suffix == ".dll"); + // See https://docs.microsoft.com/en-us/cpp/build/reference/working-with-import-libraries-and-export-files + // for more information about DLL import/export files. + ret.push(FileType { + suffix: ".dll.lib".to_string(), + prefix: prefix.clone(), + flavor: FileFlavor::Auxiliary, + crate_type: Some(crate_type.clone()), + should_replace_hyphens: true, + }); + // NOTE: lld does not produce these + ret.push(FileType { + suffix: ".dll.exp".to_string(), + prefix: prefix.clone(), + flavor: FileFlavor::Auxiliary, + crate_type: Some(crate_type.clone()), + should_replace_hyphens: true, + }); + } else if target_triple.ends_with("windows-gnu") { + assert!(suffix == ".dll"); + // See https://cygwin.com/cygwin-ug-net/dll.html for more + // information about GNU import libraries. + // LD can link DLL directly, but LLD requires the import library. + ret.push(FileType { + suffix: ".dll.a".to_string(), + prefix: "lib".to_string(), + flavor: FileFlavor::Auxiliary, + crate_type: Some(crate_type.clone()), + should_replace_hyphens: true, + }) + } } - // See rust-lang/cargo#4535. - if target_triple.starts_with("wasm32-") && crate_type == "bin" && suffix == ".js" { + if target_triple.starts_with("wasm32-") && crate_type == CrateType::Bin && suffix == ".js" { + // emscripten binaries generate a .js file, which loads a .wasm + // file. ret.push(FileType { suffix: ".wasm".to_string(), prefix: prefix.clone(), flavor: FileFlavor::Auxiliary, + crate_type: Some(crate_type.clone()), + // Name `foo-bar` will generate a `foo_bar.js` and + // `foo_bar.wasm`. Cargo will translate the underscore and + // copy `foo_bar.js` to `foo-bar.js`. However, the wasm + // filename is embedded in the .js file with an underscore, so + // it should not contain hyphens. should_replace_hyphens: true, - }) + }); + // And a map file for debugging. This is only emitted with debug=2 + // (-g4 for emcc). + ret.push(FileType { + suffix: ".wasm.map".to_string(), + prefix: prefix.clone(), + flavor: FileFlavor::DebugInfo, + crate_type: Some(crate_type.clone()), + should_replace_hyphens: true, + }); } - // See rust-lang/cargo#4490, rust-lang/cargo#4960. - // Only uplift debuginfo for binaries. - // - Tests are run directly from `target/debug/deps/` with the - // metadata hash still in the filename. - // - Examples are only uplifted for apple because the symbol file - // needs to match the executable file name to be found (i.e., it - // needs to remove the hash in the filename). On Windows, the path - // to the .pdb with the hash is embedded in the executable. + // Handle separate debug files. let is_apple = target_triple.contains("-apple-"); - if *kind == TargetKind::Bin || (*kind == TargetKind::ExampleBin && is_apple) { + if matches!( + crate_type, + CrateType::Bin | CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro + ) { if is_apple { + let suffix = if crate_type == CrateType::Bin { + ".dSYM".to_string() + } else { + ".dylib.dSYM".to_string() + }; ret.push(FileType { - suffix: ".dSYM".to_string(), + suffix, prefix: prefix.clone(), flavor: FileFlavor::DebugInfo, + crate_type: Some(crate_type), + // macOS tools like lldb use all sorts of magic to locate + // dSYM files. See https://lldb.llvm.org/use/symbols.html + // for some details. It seems like a `.dSYM` located next + // to the executable with the same name is one method. The + // dSYM should have the same hyphens as the executable for + // the names to match. should_replace_hyphens: false, }) } else if target_triple.ends_with("-msvc") { @@ -309,8 +389,13 @@ impl TargetInfo { suffix: ".pdb".to_string(), prefix: prefix.clone(), flavor: FileFlavor::DebugInfo, - // rustc calls the linker with underscores, and the - // filename is embedded in the executable. + crate_type: Some(crate_type), + // The absolute path to the pdb file is embedded in the + // executable. If the exe/pdb pair is moved to another + // machine, then debuggers will look in the same directory + // of the exe with the original pdb filename. Since the + // original name contains underscores, they need to be + // preserved. should_replace_hyphens: true, }) } @@ -319,10 +404,10 @@ impl TargetInfo { Ok(Some(ret)) } - fn discover_crate_type(&self, crate_type: &str) -> CargoResult> { + fn discover_crate_type(&self, crate_type: &CrateType) -> CargoResult> { let mut process = self.crate_type_process.clone(); - process.arg("--crate-type").arg(crate_type); + process.arg("--crate-type").arg(crate_type.as_str()); let output = process.exec_with_output().chain_err(|| { format!( @@ -341,6 +426,62 @@ impl TargetInfo { &mut output.lines(), )?) } + + /// Returns all the file types generated by rustc for the given mode/target_kind. + /// + /// The first value is a Vec of file types generated, the second value is + /// a list of CrateTypes that are not supported by the given target. + pub fn rustc_outputs( + &self, + mode: CompileMode, + target_kind: &TargetKind, + target_triple: &str, + ) -> CargoResult<(Vec, Vec)> { + match mode { + CompileMode::Build => self.calc_rustc_outputs(target_kind, target_triple), + CompileMode::Test | CompileMode::Bench => { + match self.file_types(&CrateType::Bin, FileFlavor::Normal, target_triple)? { + Some(fts) => Ok((fts, Vec::new())), + None => Ok((Vec::new(), vec![CrateType::Bin])), + } + } + CompileMode::Check { .. } => Ok((vec![FileType::new_rmeta()], Vec::new())), + CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::RunCustomBuild => { + panic!("asked for rustc output for non-rustc mode") + } + } + } + + fn calc_rustc_outputs( + &self, + target_kind: &TargetKind, + target_triple: &str, + ) -> CargoResult<(Vec, Vec)> { + let mut unsupported = Vec::new(); + let mut result = Vec::new(); + let crate_types = target_kind.rustc_crate_types(); + for crate_type in &crate_types { + let flavor = if crate_type.is_linkable() { + FileFlavor::Linkable + } else { + FileFlavor::Normal + }; + let file_types = self.file_types(crate_type, flavor, target_triple)?; + match file_types { + Some(types) => { + result.extend(types); + } + None => { + unsupported.push(crate_type.clone()); + } + } + } + if !result.is_empty() && !crate_types.iter().any(|ct| ct.requires_upstream_objects()) { + // Only add rmeta if pipelining. + result.push(FileType::new_rmeta()); + } + Ok((result, unsupported)) + } } /// Takes rustc output (using specialized command line args), and calculates the file prefix and @@ -353,7 +494,7 @@ impl TargetInfo { /// This function can not handle more than one file per type (with wasm32-unknown-emscripten, there /// are two files for bin (`.wasm` and `.js`)). fn parse_crate_type( - crate_type: &str, + crate_type: &CrateType, cmd: &ProcessBuilder, output: &str, error: &str, @@ -448,7 +589,7 @@ fn env_args( // This means that, e.g., even if the specified --target is the // same as the host, build scripts in plugins won't get // RUSTFLAGS. - if requested_kinds != &[CompileKind::Host] && kind.is_host() { + if requested_kinds != [CompileKind::Host] && kind.is_host() { // This is probably a build script or plugin and we're // compiling with --target. In this scenario there are // no rustflags we can apply. @@ -525,9 +666,7 @@ pub struct RustcTargetData { host_info: TargetInfo, /// Build information for targets that we're building for. This will be - /// empty if the `--target` flag is not passed, and currently also only ever - /// has at most one entry, but eventually we'd like to support multi-target - /// builds with Cargo. + /// empty if the `--target` flag is not passed. target_config: HashMap, target_info: HashMap, } diff --git a/src/cargo/core/compiler/build_plan.rs b/src/cargo/core/compiler/build_plan.rs index a01fa19821b..b76e8028716 100644 --- a/src/cargo/core/compiler/build_plan.rs +++ b/src/cargo/core/compiler/build_plan.rs @@ -14,7 +14,7 @@ use serde::Serialize; use super::context::OutputFile; use super::{CompileKind, CompileMode, Context, Unit}; use crate::core::TargetKind; -use crate::util::{internal, CargoResult, ProcessBuilder}; +use crate::util::{internal, CargoResult, Config, ProcessBuilder}; #[derive(Debug, Serialize)] struct Invocation { @@ -146,9 +146,9 @@ impl BuildPlan { self.plan.inputs = inputs; } - pub fn output_plan(self) { + pub fn output_plan(self, config: &Config) { let encoded = serde_json::to_string(&self.plan).unwrap(); - println!("{}", encoded); + crate::drop_println!(config, "{}", encoded); } } diff --git a/src/cargo/core/compiler/compilation.rs b/src/cargo/core/compiler/compilation.rs index efcaff1355e..7324618ff45 100644 --- a/src/cargo/core/compiler/compilation.rs +++ b/src/cargo/core/compiler/compilation.rs @@ -167,7 +167,7 @@ impl<'cfg> Compilation<'cfg> { } for crate_type in unit.target.rustc_crate_types() { - p.arg("--crate-type").arg(crate_type); + p.arg("--crate-type").arg(crate_type.as_str()); } Ok(p) diff --git a/src/cargo/core/compiler/compile_kind.rs b/src/cargo/core/compiler/compile_kind.rs index 3f8d1e9d819..e2089d969bd 100644 --- a/src/cargo/core/compiler/compile_kind.rs +++ b/src/cargo/core/compiler/compile_kind.rs @@ -55,7 +55,7 @@ impl CompileKind { if targets.len() > 1 && !config.cli_unstable().multitarget { bail!("specifying multiple `--target` flags requires `-Zmultitarget`") } - if targets.len() != 0 { + if !targets.is_empty() { return Ok(targets .iter() .map(|value| Ok(CompileKind::Target(CompileTarget::new(value)?))) diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs index 9fc2f4117a4..04d75fe0ac5 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/context/compilation_files.rs @@ -1,7 +1,7 @@ use std::collections::HashMap; use std::env; use std::fmt; -use std::hash::{Hash, Hasher, SipHasher}; +use std::hash::{Hash, Hasher}; use std::path::{Path, PathBuf}; use std::sync::Arc; @@ -9,9 +9,9 @@ use lazycell::LazyCell; use log::info; use super::{BuildContext, CompileKind, Context, FileFlavor, Layout}; -use crate::core::compiler::{CompileMode, CompileTarget, Unit}; +use crate::core::compiler::{CompileMode, CompileTarget, CrateType, FileType, Unit}; use crate::core::{Target, TargetKind, Workspace}; -use crate::util::{self, CargoResult}; +use crate::util::{self, CargoResult, StableHasher}; /// The `Metadata` is a hash used to make unique file names for each unit in a /// build. It is also use for symbol mangling. @@ -85,9 +85,6 @@ pub struct CompilationFiles<'a, 'cfg> { roots: Vec, ws: &'a Workspace<'cfg>, /// Metadata hash to use for each unit. - /// - /// `None` if the unit should not use a metadata data hash (like rustdoc, - /// or some dylibs). metas: HashMap>, /// For each Unit, a list all files produced. outputs: HashMap>>>, @@ -151,12 +148,11 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { } } - /// Gets the metadata for a target in a specific profile. - /// We build to the path `"{filename}-{target_metadata}"`. - /// We use a linking step to link/copy to a predictable filename - /// like `target/debug/libfoo.{a,so,rlib}` and such. + /// Gets the metadata for the given unit. + /// + /// See module docs for more details. /// - /// Returns `None` if the unit should not use a metadata data hash (like + /// Returns `None` if the unit should not use a metadata hash (like /// rustdoc, or some dylibs). pub fn metadata(&self, unit: &Unit) -> Option { self.metas[unit] @@ -169,8 +165,8 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { util::short_hash(&hashable) } - /// Returns the appropriate output directory for the specified package and - /// target. + /// Returns the directory where the artifacts for the given unit are + /// initially created. pub fn out_dir(&self, unit: &Unit) -> PathBuf { if unit.mode.is_doc() { self.layout(unit.kind).doc().to_path_buf() @@ -191,7 +187,10 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { } /// Directory name to use for a package in the form `NAME-HASH`. - pub fn pkg_dir(&self, unit: &Unit) -> String { + /// + /// Note that some units may share the same directory, so care should be + /// taken in those cases! + fn pkg_dir(&self, unit: &Unit) -> String { let name = unit.pkg.package_id().name(); match self.metas[unit] { Some(ref meta) => format!("{}-{}", name, meta), @@ -221,9 +220,29 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { self.layout(unit.kind).fingerprint().join(dir) } + /// Returns the path for a file in the fingerprint directory. + /// + /// The "prefix" should be something to distinguish the file from other + /// files in the fingerprint directory. + pub fn fingerprint_file_path(&self, unit: &Unit, prefix: &str) -> PathBuf { + // Different targets need to be distinguished in the + let kind = unit.target.kind().description(); + let flavor = if unit.mode.is_any_test() { + "test-" + } else if unit.mode.is_doc() { + "doc-" + } else if unit.mode.is_run_custom_build() { + "run-" + } else { + "" + }; + let name = format!("{}{}{}-{}", prefix, flavor, kind, unit.target.name()); + self.fingerprint_dir(unit).join(name) + } + /// Path where compiler output is cached. pub fn message_cache_path(&self, unit: &Unit) -> PathBuf { - self.fingerprint_dir(unit).join("output") + self.fingerprint_file_path(unit, "output-") } /// Returns the directory where a compiled build script is stored. @@ -252,14 +271,6 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { self.build_script_run_dir(unit).join("out") } - /// Returns the file stem for a given target/profile combo (with metadata). - pub fn file_stem(&self, unit: &Unit) -> String { - match self.metas[unit] { - Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), metadata), - None => self.bin_stem(unit), - } - } - /// Returns the path to the executable binary for the given bin target. /// /// This should only to be used when a `Unit` is not available. @@ -272,13 +283,12 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { assert!(target.is_bin()); let dest = self.layout(kind).dest(); let info = bcx.target_data.info(kind); - let file_types = info - .file_types( - "bin", - FileFlavor::Normal, + let (file_types, _) = info + .rustc_outputs( + CompileMode::Build, &TargetKind::Bin, bcx.target_data.short_name(&kind), - )? + ) .expect("target must support `bin`"); let file_type = file_types @@ -286,10 +296,12 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { .find(|file_type| file_type.flavor == FileFlavor::Normal) .expect("target must support `bin`"); - Ok(dest.join(file_type.filename(target.name()))) + Ok(dest.join(file_type.uplift_filename(target))) } /// Returns the filenames that the given unit will generate. + /// + /// Note: It is not guaranteed that all of the files will be generated. pub(super) fn outputs( &self, unit: &Unit, @@ -300,57 +312,50 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { .map(Arc::clone) } - /// Returns the bin filename for a given target, without extension and metadata. - fn bin_stem(&self, unit: &Unit) -> String { - if unit.target.allows_dashes() { - unit.target.name().to_string() - } else { - unit.target.crate_name() + /// Returns the path where the output for the given unit and FileType + /// should be uplifted to. + /// + /// Returns `None` if the unit shouldn't be uplifted (for example, a + /// dependent rlib). + fn uplift_to(&self, unit: &Unit, file_type: &FileType, from_path: &Path) -> Option { + // Tests, check, doc, etc. should not be uplifted. + if unit.mode != CompileMode::Build || file_type.flavor == FileFlavor::Rmeta { + return None; + } + // Only uplift: + // - Binaries: The user always wants to see these, even if they are + // implicitly built (for example for integration tests). + // - dylibs: This ensures that the dynamic linker pulls in all the + // latest copies (even if the dylib was built from a previous cargo + // build). There are complex reasons for this, see #8139, #6167, #6162. + // - Things directly requested from the command-line (the "roots"). + // This one is a little questionable for rlibs (see #6131), but is + // historically how Cargo has operated. This is primarily useful to + // give the user access to staticlibs and cdylibs. + if !unit.target.is_bin() + && !unit.target.is_custom_build() + && file_type.crate_type != Some(CrateType::Dylib) + && !self.roots.contains(unit) + { + return None; } - } - /// Returns a tuple `(hard_link_dir, filename_stem)` for the primary - /// output file for the given unit. - /// - /// `hard_link_dir` is the directory where the file should be hard-linked - /// ("uplifted") to. For example, `/path/to/project/target`. - /// - /// `filename_stem` is the base filename without an extension. - /// - /// This function returns it in two parts so the caller can add - /// prefix/suffix to filename separately. - /// - /// Returns an `Option` because in some cases we don't want to link - /// (eg a dependent lib). - fn link_stem(&self, unit: &Unit) -> Option<(PathBuf, String)> { - let out_dir = self.out_dir(unit); - let bin_stem = self.bin_stem(unit); // Stem without metadata. - let file_stem = self.file_stem(unit); // Stem with metadata. - - // We currently only lift files up from the `deps` directory. If - // it was compiled into something like `example/` or `doc/` then - // we don't want to link it up. - if out_dir.ends_with("deps") { - // Don't lift up library dependencies. - if unit.target.is_bin() || self.roots.contains(unit) || unit.target.is_dylib() { - Some(( - out_dir.parent().unwrap().to_owned(), - if unit.mode.is_any_test() { - file_stem - } else { - bin_stem - }, - )) - } else { - None - } - } else if bin_stem == file_stem { - None - } else if out_dir.ends_with("examples") || out_dir.parent().unwrap().ends_with("build") { - Some((out_dir, bin_stem)) + let filename = file_type.uplift_filename(&unit.target); + let uplift_path = if unit.target.is_example() { + // Examples live in their own little world. + self.layout(unit.kind).examples().join(filename) + } else if unit.target.is_custom_build() { + self.build_script_dir(unit).join(filename) } else { - None + self.layout(unit.kind).dest().join(filename) + }; + if from_path == uplift_path { + // This can happen with things like examples that reside in the + // same directory, do not have a metadata hash (like on Windows), + // and do not have hyphens. + return None; } + Some(uplift_path) } fn calc_outputs( @@ -359,18 +364,6 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult>> { let ret = match unit.mode { - CompileMode::Check { .. } => { - // This may be confusing. rustc outputs a file named `lib*.rmeta` - // for both libraries and binaries. - let file_stem = self.file_stem(unit); - let path = self.out_dir(unit).join(format!("lib{}.rmeta", file_stem)); - vec![OutputFile { - path, - hardlink: None, - export_path: None, - flavor: FileFlavor::Linkable { rmeta: false }, - }] - } CompileMode::Doc { .. } => { let path = self .out_dir(unit) @@ -394,128 +387,73 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { // but Cargo does not know about that. vec![] } - CompileMode::Test | CompileMode::Build | CompileMode::Bench => { - self.calc_outputs_rustc(unit, bcx)? - } + CompileMode::Test + | CompileMode::Build + | CompileMode::Bench + | CompileMode::Check { .. } => self.calc_outputs_rustc(unit, bcx)?, }; info!("Target filenames: {:?}", ret); Ok(Arc::new(ret)) } + /// Computes the actual, full pathnames for all the files generated by rustc. + /// + /// The `OutputFile` also contains the paths where those files should be + /// "uplifted" to. fn calc_outputs_rustc( &self, unit: &Unit, bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult> { - let mut ret = Vec::new(); - let mut unsupported = Vec::new(); - let out_dir = self.out_dir(unit); - let link_stem = self.link_stem(unit); - let info = bcx.target_data.info(unit.kind); - let file_stem = self.file_stem(unit); - let mut add = |crate_type: &str, flavor: FileFlavor| -> CargoResult<()> { - let crate_type = if crate_type == "lib" { - "rlib" - } else { - crate_type - }; - let file_types = info.file_types( - crate_type, - flavor, - unit.target.kind(), - bcx.target_data.short_name(&unit.kind), - )?; - - match file_types { - Some(types) => { - for file_type in types { - let path = out_dir.join(file_type.filename(&file_stem)); - // Don't create hardlink for tests - let hardlink = if unit.mode.is_any_test() { - None - } else { - link_stem - .as_ref() - .map(|&(ref ld, ref ls)| ld.join(file_type.filename(ls))) - }; - let export_path = if unit.target.is_custom_build() { - None - } else { - self.export_dir.as_ref().and_then(|export_dir| { - hardlink - .as_ref() - .map(|hardlink| export_dir.join(hardlink.file_name().unwrap())) - }) - }; - ret.push(OutputFile { - path, - hardlink, - export_path, - flavor: file_type.flavor, - }); - } - } - // Not supported; don't worry about it. - None => { - unsupported.push(crate_type.to_string()); - } - } - Ok(()) - }; - match *unit.target.kind() { - TargetKind::Bin - | TargetKind::CustomBuild - | TargetKind::ExampleBin - | TargetKind::Bench - | TargetKind::Test => { - add("bin", FileFlavor::Normal)?; - } - TargetKind::Lib(..) | TargetKind::ExampleLib(..) if unit.mode.is_any_test() => { - add("bin", FileFlavor::Normal)?; - } - TargetKind::ExampleLib(ref kinds) | TargetKind::Lib(ref kinds) => { - for kind in kinds { - add( - kind.crate_type(), - if kind.linkable() { - FileFlavor::Linkable { rmeta: false } - } else { - FileFlavor::Normal - }, - )?; - } - let path = out_dir.join(format!("lib{}.rmeta", file_stem)); - if !unit.requires_upstream_objects() { - ret.push(OutputFile { - path, - hardlink: None, - export_path: None, - flavor: FileFlavor::Linkable { rmeta: true }, - }); - } - } - } - if ret.is_empty() { + let info = bcx.target_data.info(unit.kind); + let triple = bcx.target_data.short_name(&unit.kind); + let (file_types, unsupported) = + info.rustc_outputs(unit.mode, unit.target.kind(), triple)?; + if file_types.is_empty() { if !unsupported.is_empty() { + let unsupported_strs: Vec<_> = unsupported.iter().map(|ct| ct.as_str()).collect(); anyhow::bail!( "cannot produce {} for `{}` as the target `{}` \ does not support these crate types", - unsupported.join(", "), + unsupported_strs.join(", "), unit.pkg, - bcx.target_data.short_name(&unit.kind), + triple, ) } anyhow::bail!( "cannot compile `{}` as the target `{}` does not \ support any of the output crate types", unit.pkg, - bcx.target_data.short_name(&unit.kind), + triple, ); } - Ok(ret) + + // Convert FileType to OutputFile. + let mut outputs = Vec::new(); + for file_type in file_types { + let meta = self.metadata(unit).map(|m| m.to_string()); + let path = out_dir.join(file_type.output_filename(&unit.target, meta.as_deref())); + let hardlink = self.uplift_to(unit, &file_type, &path); + let export_path = if unit.target.is_custom_build() { + None + } else { + self.export_dir.as_ref().and_then(|export_dir| { + hardlink + .as_ref() + .map(|hardlink| export_dir.join(hardlink.file_name().unwrap())) + }) + }; + outputs.push(OutputFile { + path, + hardlink, + export_path, + flavor: file_type.flavor, + }); + } + Ok(outputs) } } @@ -539,47 +477,11 @@ fn compute_metadata( cx: &Context<'_, '_>, metas: &mut HashMap>, ) -> Option { - if unit.mode.is_doc_test() { - // Doc tests do not have metadata. - return None; - } - // No metadata for dylibs because of a couple issues: - // - macOS encodes the dylib name in the executable, - // - Windows rustc multiple files of which we can't easily link all of them. - // - // No metadata for bin because of an issue: - // - wasm32 rustc/emcc encodes the `.wasm` name in the `.js` (rust-lang/cargo#4535). - // - msvc: The path to the PDB is embedded in the executable, and we don't - // want the PDB path to include the hash in it. - // - // Two exceptions: - // 1) Upstream dependencies (we aren't exporting + need to resolve name conflict), - // 2) `__CARGO_DEFAULT_LIB_METADATA` env var. - // - // Note, however, that the compiler's build system at least wants - // path dependencies (eg libstd) to have hashes in filenames. To account for - // that we have an extra hack here which reads the - // `__CARGO_DEFAULT_LIB_METADATA` environment variable and creates a - // hash in the filename if that's present. - // - // This environment variable should not be relied on! It's - // just here for rustbuild. We need a more principled method - // doing this eventually. let bcx = &cx.bcx; - let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA"); - let short_name = bcx.target_data.short_name(&unit.kind); - if !(unit.mode.is_any_test() || unit.mode.is_check()) - && (unit.target.is_dylib() - || unit.target.is_cdylib() - || (unit.target.is_executable() && short_name.starts_with("wasm32-")) - || (unit.target.is_executable() && short_name.contains("msvc"))) - && unit.pkg.package_id().source_id().is_path() - && __cargo_default_lib_metadata.is_err() - { + if !should_use_metadata(bcx, unit) { return None; } - - let mut hasher = SipHasher::new(); + let mut hasher = StableHasher::new(); // This is a generic version number that can be changed to make // backwards-incompatible changes to any file structures in the output @@ -638,7 +540,7 @@ fn compute_metadata( // Seed the contents of `__CARGO_DEFAULT_LIB_METADATA` to the hasher if present. // This should be the release channel, to get a different hash for each channel. - if let Ok(ref channel) = __cargo_default_lib_metadata { + if let Ok(ref channel) = env::var("__CARGO_DEFAULT_LIB_METADATA") { channel.hash(&mut hasher); } @@ -654,7 +556,7 @@ fn compute_metadata( Some(Metadata(hasher.finish())) } -fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut SipHasher) { +fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) { let vers = &bcx.rustc().version; if vers.pre.is_empty() || bcx.config.cli_unstable().separate_nightlies { // For stable, keep the artifacts separate. This helps if someone is @@ -685,3 +587,46 @@ fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut SipHasher) { // the future when cranelift sees more use, and people want to switch // between different backends without recompiling. } + +/// Returns whether or not this unit should use a metadata hash. +fn should_use_metadata(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool { + if unit.mode.is_doc_test() { + // Doc tests do not have metadata. + return false; + } + if unit.mode.is_any_test() || unit.mode.is_check() { + // These always use metadata. + return true; + } + // No metadata in these cases: + // + // - dylibs: + // - macOS encodes the dylib name in the executable, so it can't be renamed. + // - TODO: Are there other good reasons? If not, maybe this should be macos specific? + // - Windows MSVC executables: The path to the PDB is embedded in the + // executable, and we don't want the PDB path to include the hash in it. + // - wasm32 executables: When using emscripten, the path to the .wasm file + // is embedded in the .js file, so we don't want the hash in there. + // TODO: Is this necessary for wasm32-unknown-unknown? + // + // This is only done for local packages, as we don't expect to export + // dependencies. + // + // The __CARGO_DEFAULT_LIB_METADATA env var is used to override this to + // force metadata in the hash. This is only used for building libstd. For + // example, if libstd is placed in a common location, we don't want a file + // named /usr/lib/libstd.so which could conflict with other rustc + // installs. TODO: Is this still a realistic concern? + // See https://github.com/rust-lang/cargo/issues/3005 + let short_name = bcx.target_data.short_name(&unit.kind); + if (unit.target.is_dylib() + || unit.target.is_cdylib() + || (unit.target.is_executable() && short_name.starts_with("wasm32-")) + || (unit.target.is_executable() && short_name.contains("msvc"))) + && unit.pkg.package_id().source_id().is_path() + && env::var("__CARGO_DEFAULT_LIB_METADATA").is_err() + { + return false; + } + true +} diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index 0855fc09ce7..602aba2cda8 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -1,4 +1,3 @@ -#![allow(deprecated)] use std::collections::{BTreeSet, HashMap, HashSet}; use std::path::PathBuf; use std::sync::{Arc, Mutex}; @@ -16,6 +15,7 @@ use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts}; use super::fingerprint::Fingerprint; use super::job_queue::JobQueue; use super::layout::Layout; +use super::lto::Lto; use super::unit_graph::UnitDep; use super::{BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor}; @@ -72,6 +72,11 @@ pub struct Context<'a, 'cfg> { /// jobserver clients for each Unit (which eventually becomes a rustc /// process). pub rustc_clients: HashMap, + + /// Map of the LTO-status of each unit. This indicates what sort of + /// compilation is happening (only object, only bitcode, both, etc), and is + /// precalculated early on. + pub lto: HashMap, } impl<'a, 'cfg> Context<'a, 'cfg> { @@ -111,6 +116,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { rmeta_required: HashSet::new(), rustc_clients: HashMap::new(), pipelining, + lto: HashMap::new(), }) } @@ -123,6 +129,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { self.prepare_units()?; self.prepare()?; custom_build::build_map(&mut self)?; + super::lto::generate(&mut self)?; self.check_collistions()?; for unit in &self.bcx.roots { @@ -150,7 +157,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { if build_plan { plan.set_inputs(self.build_plan_inputs()?); - plan.output_plan(); + plan.output_plan(self.bcx.config); } // Collect the result of the build into `self.compilation`. @@ -254,7 +261,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// Returns the executable for the specified unit (if any). pub fn get_executable(&mut self, unit: &Unit) -> CargoResult> { for output in self.outputs(unit)?.iter() { - if output.flavor == FileFlavor::DebugInfo { + if output.flavor != FileFlavor::Normal { continue; } diff --git a/src/cargo/core/compiler/crate_type.rs b/src/cargo/core/compiler/crate_type.rs new file mode 100644 index 00000000000..fd0126cbe83 --- /dev/null +++ b/src/cargo/core/compiler/crate_type.rs @@ -0,0 +1,97 @@ +use std::fmt; + +#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub enum CrateType { + Bin, + Lib, + Rlib, + Dylib, + Cdylib, + Staticlib, + ProcMacro, + Other(String), +} + +impl CrateType { + pub fn as_str(&self) -> &str { + match self { + CrateType::Bin => "bin", + CrateType::Lib => "lib", + CrateType::Rlib => "rlib", + CrateType::Dylib => "dylib", + CrateType::Cdylib => "cdylib", + CrateType::Staticlib => "staticlib", + CrateType::ProcMacro => "proc-macro", + CrateType::Other(s) => s, + } + } + + pub fn is_linkable(&self) -> bool { + match self { + CrateType::Lib | CrateType::Rlib | CrateType::Dylib | CrateType::ProcMacro => true, + CrateType::Bin | CrateType::Cdylib | CrateType::Staticlib | CrateType::Other(..) => { + false + } + } + } + + pub fn is_dynamic(&self) -> bool { + match self { + CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro => true, + CrateType::Lib + | CrateType::Rlib + | CrateType::Bin + | CrateType::Staticlib + | CrateType::Other(..) => false, + } + } + + pub fn requires_upstream_objects(&self) -> bool { + match self { + // "lib" == "rlib" and is a compilation that doesn't actually + // require upstream object files to exist, only upstream metadata + // files. As a result, it doesn't require upstream artifacts + CrateType::Lib | CrateType::Rlib => false, + + // Everything else, however, is some form of "linkable output" or + // something that requires upstream object files. + _ => true, + } + } +} + +impl fmt::Display for CrateType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_str().fmt(f) + } +} + +impl<'a> From<&'a String> for CrateType { + fn from(s: &'a String) -> Self { + match s.as_str() { + "bin" => CrateType::Bin, + "lib" => CrateType::Lib, + "rlib" => CrateType::Rlib, + "dylib" => CrateType::Dylib, + "cdylib" => CrateType::Cdylib, + "staticlib" => CrateType::Staticlib, + "procmacro" => CrateType::ProcMacro, + _ => CrateType::Other(s.clone()), + } + } +} + +impl fmt::Debug for CrateType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.to_string().fmt(f) + } +} + +impl serde::Serialize for CrateType { + fn serialize(&self, s: S) -> Result + where + S: serde::ser::Serializer, + { + self.to_string().serialize(s) + } +} diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index 38af4e7d2a6..6b642392018 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -738,7 +738,7 @@ pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> { if dep_unit.target.for_host() { ret.plugins.extend(dep_scripts.to_link.iter().cloned()); - } else if dep_unit.target.linkable() { + } else if dep_unit.target.is_linkable() { for &(pkg, metadata) in dep_scripts.to_link.iter() { add_to_link(&mut ret, pkg, metadata); } diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs index 89263cc7c9b..145a622bb1f 100644 --- a/src/cargo/core/compiler/fingerprint.rs +++ b/src/cargo/core/compiler/fingerprint.rs @@ -43,8 +43,9 @@ //! The `Metadata` hash is a hash added to the output filenames to isolate //! each unit. See the documentation in the `compilation_files` module for //! more details. NOTE: Not all output files are isolated via filename hashes -//! (like dylibs), but the fingerprint directory always has the `Metadata` -//! hash in its directory name. +//! (like dylibs). The fingerprint directory uses a hash, but sometimes units +//! share the same fingerprint directory (when they don't have Metadata) so +//! care should be taken to handle this! //! //! Fingerprints and Metadata are similar, and track some of the same things. //! The Metadata contains information that is required to keep Units separate. @@ -71,6 +72,7 @@ //! -C incremental=… flag | ✓ | //! mtime of sources | ✓[^3] | //! RUSTFLAGS/RUSTDOCFLAGS | ✓ | +//! LTO flags | ✓ | //! is_std | | ✓ //! //! [^1]: Build script and bin dependencies are not included. @@ -103,8 +105,9 @@ //! - A "dep-info" file which contains a list of source filenames for the //! target. See below for details. //! - An `invoked.timestamp` file whose filesystem mtime is updated every time -//! the Unit is built. This is an experimental feature used for cleaning -//! unused artifacts. +//! the Unit is built. This is used for capturing the time when the build +//! starts, to detect if files are changed in the middle of the build. See +//! below for more details. //! //! Note that some units are a little different. A Unit for *running* a build //! script or for `rustdoc` does not have a dep-info file (it's not @@ -350,8 +353,7 @@ pub fn prepare_target(cx: &mut Context<'_, '_>, unit: &Unit, force: bool) -> Car unit.target.name() )); let bcx = cx.bcx; - let new = cx.files().fingerprint_dir(unit); - let loc = new.join(&filename(cx, unit)); + let loc = cx.files().fingerprint_file_path(unit, ""); debug!("fingerprint at: {}", loc.display()); @@ -1241,7 +1243,12 @@ fn calculate_normal(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult<()> { /// Returns the location that the dep-info file will show up at for the `unit` /// specified. pub fn dep_info_loc(cx: &mut Context<'_, '_>, unit: &Unit) -> PathBuf { - cx.files() - .fingerprint_dir(unit) - .join(&format!("dep-{}", filename(cx, unit))) + cx.files().fingerprint_file_path(unit, "dep-") } /// Returns an absolute path that target directory. @@ -1673,24 +1678,6 @@ where None } -fn filename(cx: &mut Context<'_, '_>, unit: &Unit) -> String { - // file_stem includes metadata hash. Thus we have a different - // fingerprint for every metadata hash version. This works because - // even if the package is fresh, we'll still link the fresh target - let file_stem = cx.files().file_stem(unit); - let kind = unit.target.kind().description(); - let flavor = if unit.mode.is_any_test() { - "test-" - } else if unit.mode.is_doc() { - "doc-" - } else if unit.mode.is_run_custom_build() { - "run-" - } else { - "" - }; - format!("{}{}-{}", flavor, kind, file_stem) -} - #[repr(u8)] enum DepInfoPathType { // src/, e.g. src/lib.rs diff --git a/src/cargo/core/compiler/job_queue.rs b/src/cargo/core/compiler/job_queue.rs index 7a103b13d21..a1f2bba16d3 100644 --- a/src/cargo/core/compiler/job_queue.rs +++ b/src/cargo/core/compiler/job_queue.rs @@ -500,7 +500,7 @@ impl<'cfg> DrainState<'cfg> { plan.update(&module_name, &cmd, &filenames)?; } Message::Stdout(out) => { - cx.bcx.config.shell().stdout_println(out); + writeln!(cx.bcx.config.shell().out(), "{}", out)?; } Message::Stderr(err) => { let mut shell = cx.bcx.config.shell(); @@ -700,7 +700,7 @@ impl<'cfg> DrainState<'cfg> { success: error.is_none(), } .to_json_string(); - cx.bcx.config.shell().stdout_println(msg); + writeln!(cx.bcx.config.shell().out(), "{}", msg)?; } if let Some(e) = error { diff --git a/src/cargo/core/compiler/layout.rs b/src/cargo/core/compiler/layout.rs index a896d5be38b..53c615fbe1d 100644 --- a/src/cargo/core/compiler/layout.rs +++ b/src/cargo/core/compiler/layout.rs @@ -26,19 +26,20 @@ //! # packages //! .fingerprint/ //! # Each package is in a separate directory. +//! # Note that different target kinds have different filename prefixes. //! $pkgname-$META/ //! # Set of source filenames for this package. -//! dep-lib-$pkgname-$META +//! dep-lib-$targetname //! # Timestamp when this package was last built. //! invoked.timestamp //! # The fingerprint hash. -//! lib-$pkgname-$META +//! lib-$targetname //! # Detailed information used for logging the reason why //! # something is being recompiled. -//! lib-$pkgname-$META.json +//! lib-$targetname.json //! # The console output from the compiler. This is cached //! # so that warnings can be redisplayed for "fresh" units. -//! output +//! output-lib-$targetname //! //! # This is the root directory for all rustc artifacts except build //! # scripts, examples, and test and bench executables. Almost every diff --git a/src/cargo/core/compiler/lto.rs b/src/cargo/core/compiler/lto.rs new file mode 100644 index 00000000000..c808520d39b --- /dev/null +++ b/src/cargo/core/compiler/lto.rs @@ -0,0 +1,117 @@ +use crate::core::compiler::{Context, Unit}; +use crate::core::interning::InternedString; +use crate::core::profiles; +use crate::util::errors::CargoResult; +use std::collections::hash_map::{Entry, HashMap}; + +/// Possible ways to run rustc and request various parts of LTO. +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub enum Lto { + /// LTO is run for this rustc, and it's `-Clto=foo` where `foo` is optional. + Run(Option), + + /// This rustc invocation only needs to produce bitcode, there's no need to + /// produce object files, so we can pass `-Clinker-plugin-lto` + OnlyBitcode, + + /// This rustc invocation needs to embed bitcode in object files. This means + /// that object files may be used for a normal link, and the crate may be + /// loaded for LTO later, so both are required. + EmbedBitcode, + + /// Nothing related to LTO is required of this compilation. + None, +} + +pub fn generate(cx: &mut Context<'_, '_>) -> CargoResult<()> { + let mut map = HashMap::new(); + for unit in cx.bcx.roots.iter() { + calculate(cx, &mut map, unit, false)?; + } + cx.lto = map; + Ok(()) +} + +fn calculate( + cx: &Context<'_, '_>, + map: &mut HashMap, + unit: &Unit, + require_bitcode: bool, +) -> CargoResult<()> { + let (lto, require_bitcode_for_deps) = if unit.target.for_host() { + // Disable LTO for host builds since we only really want to perform LTO + // for the final binary, and LTO on plugins/build scripts/proc macros is + // largely not desired. + (Lto::None, false) + } else if unit.target.can_lto() { + // Otherwise if this target can perform LTO then we're going to read the + // LTO value out of the profile. Note that we ignore `require_bitcode` + // here because if a unit depends on another unit than can LTO this + // isn't a rustc-level dependency but rather a Cargo-level dependency. + // For example this is an integration test depending on a binary. + match unit.profile.lto { + profiles::Lto::Named(s) => match s.as_str() { + "n" | "no" | "off" => (Lto::Run(Some(s)), false), + _ => (Lto::Run(Some(s)), true), + }, + profiles::Lto::Bool(true) => (Lto::Run(None), true), + profiles::Lto::Bool(false) => (Lto::None, false), + } + } else if require_bitcode { + // Otherwise we're a dependency of something, an rlib. This means that + // if our parent required bitcode of some kind then we need to generate + // bitcode. + (Lto::OnlyBitcode, true) + } else { + (Lto::None, false) + }; + + match map.entry(unit.clone()) { + // If we haven't seen this unit before then insert our value and keep + // going. + Entry::Vacant(v) => { + v.insert(lto); + } + + Entry::Occupied(mut v) => { + let result = match (lto, v.get()) { + // Once we're running LTO we keep running LTO. We should always + // calculate the same thing here each iteration because if we + // see this twice then it means, for example, two unit tests + // depend on a binary, which is normal. + (Lto::Run(s), _) | (_, &Lto::Run(s)) => Lto::Run(s), + + // If we calculated the same thing as before then we can bail + // out quickly. + (Lto::OnlyBitcode, Lto::OnlyBitcode) | (Lto::None, Lto::None) => return Ok(()), + + // This is where the trickiness happens. This unit needs + // bitcode and the previously calculated value for this unit + // says it didn't need bitcode (or vice versa). This means that + // we're a shared dependency between some targets which require + // LTO and some which don't. This means that instead of being + // either only-objects or only-bitcode we have to embed both in + // rlibs (used for different compilations), so we switch to + // embedding bitcode. + (Lto::OnlyBitcode, Lto::None) + | (Lto::OnlyBitcode, Lto::EmbedBitcode) + | (Lto::None, Lto::OnlyBitcode) + | (Lto::None, Lto::EmbedBitcode) => Lto::EmbedBitcode, + + // Currently this variant is never calculated above, so no need + // to handle this case. + (Lto::EmbedBitcode, _) => unreachable!(), + }; + // No need to recurse if we calculated the same value as before. + if result == *v.get() { + return Ok(()); + } + v.insert(result); + } + } + + for dep in cx.unit_deps(unit) { + calculate(cx, map, &dep.unit, require_bitcode_for_deps)?; + } + Ok(()) +} diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index cce5301cd48..f662495a1a1 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -4,12 +4,14 @@ mod build_plan; mod compilation; mod compile_kind; mod context; +mod crate_type; mod custom_build; mod fingerprint; mod job; mod job_queue; mod layout; mod links; +mod lto; mod output_depinfo; pub mod standard_lib; mod timings; @@ -29,20 +31,22 @@ use lazycell::LazyCell; use log::debug; pub use self::build_config::{BuildConfig, CompileMode, MessageFormat}; -pub use self::build_context::{BuildContext, FileFlavor, RustcTargetData, TargetInfo}; +pub use self::build_context::{BuildContext, FileFlavor, FileType, RustcTargetData, TargetInfo}; use self::build_plan::BuildPlan; pub use self::compilation::{Compilation, Doctest}; pub use self::compile_kind::{CompileKind, CompileTarget}; pub use self::context::{Context, Metadata}; +pub use self::crate_type::CrateType; pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts}; pub use self::job::Freshness; use self::job::{Job, Work}; use self::job_queue::{JobQueue, JobState}; +pub(crate) use self::layout::Layout; use self::output_depinfo::output_depinfo; use self::unit_graph::UnitDep; pub use crate::core::compiler::unit::{Unit, UnitInterner}; use crate::core::manifest::TargetSourcePath; -use crate::core::profiles::{Lto, PanicStrategy, Profile}; +use crate::core::profiles::{PanicStrategy, Profile}; use crate::core::{Edition, Feature, InternedString, PackageId, Target}; use crate::util::errors::{self, CargoResult, CargoResultExt, ProcessError, VerboseError}; use crate::util::machine_message::Message; @@ -142,7 +146,7 @@ fn compile<'cfg>( &unit.target, cx.files().message_cache_path(unit), cx.bcx.build_config.message_format, - cx.bcx.config.shell().supports_color(), + cx.bcx.config.shell().err_supports_color(), ) } else { Work::noop() @@ -189,17 +193,12 @@ fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc) -> Car // don't pass the `-l` flags. let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); let pass_cdylib_link_args = unit.target.is_cdylib(); - let do_rename = unit.target.allows_dashes() && !unit.mode.is_any_test(); - let real_name = unit.target.name().to_string(); - let crate_name = unit.target.crate_name(); - // Rely on `target_filenames` iterator as source of truth rather than rederiving filestem. - let rustc_dep_info_loc = if do_rename && cx.files().metadata(unit).is_none() { - root.join(&crate_name) - } else { - root.join(&cx.files().file_stem(unit)) - } - .with_extension("d"); + let dep_info_name = match cx.files().metadata(unit) { + Some(metadata) => format!("{}-{}.d", unit.target.crate_name(), metadata), + None => format!("{}.d", unit.target.crate_name()), + }; + let rustc_dep_info_loc = root.join(dep_info_name); let dep_info_loc = fingerprint::dep_info_loc(cx, unit); rustc.args(cx.bcx.rustflags_args(unit)); @@ -291,20 +290,6 @@ fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc) -> Car .chain_err(|| format!("could not compile `{}`.", name))?; } - if do_rename && real_name != crate_name { - let dst = &outputs[0].path; - let src = dst.with_file_name( - dst.file_name() - .unwrap() - .to_str() - .unwrap() - .replace(&real_name, &crate_name), - ); - if src.exists() && src.file_name() != dst.file_name() { - fs::rename(&src, &dst).chain_err(|| format!("could not rename crate {:?}", src))?; - } - } - if rustc_dep_info_loc.exists() { fingerprint::translate_dep_info( &rustc_dep_info_loc, @@ -531,7 +516,7 @@ where fn prepare_rustc( cx: &mut Context<'_, '_>, - crate_types: &[&str], + crate_types: &[CrateType], unit: &Unit, ) -> CargoResult { let is_primary = cx.is_primary_package(unit); @@ -733,14 +718,13 @@ fn build_base_args( cx: &mut Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit, - crate_types: &[&str], + crate_types: &[CrateType], ) -> CargoResult<()> { assert!(!unit.mode.is_run_custom_build()); let bcx = cx.bcx; let Profile { ref opt_level, - ref lto, codegen_units, debuginfo, debug_assertions, @@ -764,7 +748,7 @@ fn build_base_args( if !test { for crate_type in crate_types.iter() { - cmd.arg("--crate-type").arg(crate_type); + cmd.arg("--crate-type").arg(crate_type.as_str()); } } @@ -780,7 +764,7 @@ fn build_base_args( } let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) - || (crate_types.contains(&"dylib") && bcx.ws.members().any(|p| *p != unit.pkg)); + || (crate_types.contains(&CrateType::Dylib) && bcx.ws.members().any(|p| *p != unit.pkg)); if prefer_dynamic { cmd.arg("-C").arg("prefer-dynamic"); } @@ -793,32 +777,39 @@ fn build_base_args( cmd.arg("-C").arg(format!("panic={}", panic)); } - // Disable LTO for host builds as prefer_dynamic and it are mutually - // exclusive. - let lto_possible = unit.target.can_lto() && !unit.target.for_host(); - match lto { - Lto::Bool(true) => { - if lto_possible { - cmd.args(&["-C", "lto"]); - } + match cx.lto[unit] { + lto::Lto::Run(None) => { + cmd.arg("-C").arg("lto"); } - Lto::Named(s) => { - if lto_possible { - cmd.arg("-C").arg(format!("lto={}", s)); + lto::Lto::Run(Some(s)) => { + cmd.arg("-C").arg(format!("lto={}", s)); + } + lto::Lto::EmbedBitcode => {} // this is rustc's default + lto::Lto::OnlyBitcode => { + // Note that this compiler flag, like the one below, is just an + // optimization in terms of build time. If we don't pass it then + // both object code and bitcode will show up. This is lagely just + // compat until the feature lands on stable and we can remove the + // conditional branch. + if cx + .bcx + .target_data + .info(CompileKind::Host) + .supports_embed_bitcode + .unwrap() + { + cmd.arg("-Clinker-plugin-lto"); } } - // If LTO isn't being enabled then there's no need for bitcode to be - // present in the intermediate artifacts, so shave off some build time - // by removing it. - Lto::Bool(false) => { + lto::Lto::None => { if cx .bcx .target_data .info(CompileKind::Host) - .supports_bitcode_in_rlib + .supports_embed_bitcode .unwrap() { - cmd.arg("-Cbitcode-in-rlib=no"); + cmd.arg("-Cembed-bitcode=no"); } } } @@ -977,7 +968,7 @@ fn build_deps_args( // error in the future (see PR #4797). if !deps .iter() - .any(|dep| !dep.unit.mode.is_doc() && dep.unit.target.linkable()) + .any(|dep| !dep.unit.mode.is_doc() && dep.unit.target.is_linkable()) { if let Some(dep) = deps .iter() @@ -1060,19 +1051,18 @@ pub fn extern_args( }; let outputs = cx.outputs(&dep.unit)?; - let mut outputs = outputs.iter().filter_map(|output| match output.flavor { - FileFlavor::Linkable { rmeta } => Some((output, rmeta)), - _ => None, - }); - - if cx.only_requires_rmeta(unit, &dep.unit) { - let (output, _rmeta) = outputs - .find(|(_output, rmeta)| *rmeta) - .expect("failed to find rlib dep for pipelined dep"); + + if cx.only_requires_rmeta(unit, &dep.unit) || dep.unit.mode.is_check() { + // Example: rlib dependency for an rlib, rmeta is all that is required. + let output = outputs + .iter() + .find(|output| output.flavor == FileFlavor::Rmeta) + .expect("failed to find rmeta dep for pipelined dep"); pass(&output.path); } else { - for (output, rmeta) in outputs { - if !rmeta { + // Example: a bin needs `rlib` for dependencies, it cannot use rmeta. + for output in outputs.iter() { + if output.flavor == FileFlavor::Linkable { pass(&output.path); } } @@ -1081,7 +1071,7 @@ pub fn extern_args( }; for dep in deps { - if dep.unit.target.linkable() && !dep.unit.mode.is_doc() { + if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() { link_to(dep, dep.extern_crate_name, dep.noprelude)?; } } @@ -1119,7 +1109,7 @@ struct OutputOptions { impl OutputOptions { fn new(cx: &Context<'_, '_>, unit: &Unit) -> OutputOptions { let look_for_metadata_directive = cx.rmeta_required(unit); - let color = cx.bcx.config.shell().supports_color(); + let color = cx.bcx.config.shell().err_supports_color(); let path = cx.files().message_cache_path(unit); // Remove old cache, ignore ENOENT, which is the common case. drop(fs::remove_file(&path)); @@ -1154,7 +1144,7 @@ fn on_stderr_line( // Check if caching is enabled. if let Some((path, cell)) = &mut options.cache_cell { // Cache the output, which will be replayed later when Fresh. - let f = cell.try_borrow_mut_with(|| File::create(path))?; + let f = cell.try_borrow_mut_with(|| paths::create(path))?; debug_assert!(!line.contains('\n')); f.write_all(line.as_bytes())?; f.write_all(&[b'\n'])?; @@ -1342,7 +1332,7 @@ fn replay_output_cache( // We sometimes have gigabytes of output from the compiler, so avoid // loading it all into memory at once, as that can cause OOM where // otherwise there would be none. - let file = fs::File::open(&path)?; + let file = paths::open(&path)?; let mut reader = std::io::BufReader::new(file); let mut line = String::new(); loop { diff --git a/src/cargo/core/compiler/output_depinfo.rs b/src/cargo/core/compiler/output_depinfo.rs index 284cb338cd5..d017a81f700 100644 --- a/src/cargo/core/compiler/output_depinfo.rs +++ b/src/cargo/core/compiler/output_depinfo.rs @@ -23,7 +23,6 @@ //! be detected via changes to `Cargo.lock`. use std::collections::{BTreeSet, HashSet}; -use std::fs::File; use std::io::{BufWriter, Write}; use std::path::{Path, PathBuf}; @@ -132,7 +131,7 @@ pub fn output_depinfo(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> for output in cx .outputs(unit)? .iter() - .filter(|o| o.flavor != FileFlavor::DebugInfo) + .filter(|o| !matches!(o.flavor, FileFlavor::DebugInfo | FileFlavor::Auxiliary)) { if let Some(ref link_dst) = output.hardlink { let output_path = link_dst.with_extension("d"); @@ -148,7 +147,7 @@ pub fn output_depinfo(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> } // Otherwise write it all out - let mut outfile = BufWriter::new(File::create(output_path)?); + let mut outfile = BufWriter::new(paths::create(output_path)?); write!(outfile, "{}:", target_fn)?; for dep in &deps { write!(outfile, " {}", dep)?; diff --git a/src/cargo/core/compiler/standard_lib.rs b/src/cargo/core/compiler/standard_lib.rs index 0ae0b651546..c1c7f875c65 100644 --- a/src/cargo/core/compiler/standard_lib.rs +++ b/src/cargo/core/compiler/standard_lib.rs @@ -161,7 +161,7 @@ pub fn generate_std_roots( let features = std_features.activated_features(pkg.package_id(), FeaturesFor::NormalOrDev); for kind in kinds { - let list = ret.entry(*kind).or_insert(Vec::new()); + let list = ret.entry(*kind).or_insert_with(Vec::new); list.push(interner.intern( pkg, lib, @@ -173,7 +173,7 @@ pub fn generate_std_roots( )); } } - return Ok(ret); + Ok(ret) } fn detect_sysroot_src_path(target_data: &RustcTargetData) -> CargoResult { diff --git a/src/cargo/core/compiler/timings.rs b/src/cargo/core/compiler/timings.rs index 4704eb12813..8302475defd 100644 --- a/src/cargo/core/compiler/timings.rs +++ b/src/cargo/core/compiler/timings.rs @@ -10,7 +10,6 @@ use crate::util::cpu::State; use crate::util::machine_message::{self, Message}; use crate::util::{paths, CargoResult, Config}; use std::collections::HashMap; -use std::fs::File; use std::io::{BufWriter, Write}; use std::time::{Duration, Instant, SystemTime}; @@ -122,6 +121,17 @@ impl<'cfg> Timings<'cfg> { .collect(); let start_str = humantime::format_rfc3339_seconds(SystemTime::now()).to_string(); let profile = bcx.build_config.requested_profile.to_string(); + let last_cpu_state = if enabled { + match State::current() { + Ok(state) => Some(state), + Err(e) => { + log::info!("failed to get CPU state, CPU tracking disabled: {:?}", e); + None + } + } + } else { + None + }; Timings { config: bcx.config, @@ -138,7 +148,7 @@ impl<'cfg> Timings<'cfg> { unit_times: Vec::new(), active: HashMap::new(), concurrency: Vec::new(), - last_cpu_state: if enabled { State::current().ok() } else { None }, + last_cpu_state, last_cpu_recording: Instant::now(), cpu_usage: Vec::new(), } @@ -235,7 +245,7 @@ impl<'cfg> Timings<'cfg> { rmeta_time: unit_time.rmeta_time, } .to_json_string(); - self.config.shell().stdout_println(msg); + crate::drop_println!(self.config, "{}", msg); } self.unit_times.push(unit_time); } @@ -287,7 +297,10 @@ impl<'cfg> Timings<'cfg> { } let current = match State::current() { Ok(s) => s, - Err(_) => return, + Err(e) => { + log::info!("failed to get CPU state: {:?}", e); + return; + } }; let pct_idle = current.idle_since(prev); *prev = current; @@ -323,7 +336,7 @@ impl<'cfg> Timings<'cfg> { let duration = d_as_f64(self.start.elapsed()); let timestamp = self.start_str.replace(&['-', ':'][..], ""); let filename = format!("cargo-timing-{}.html", timestamp); - let mut f = BufWriter::new(File::create(&filename)?); + let mut f = BufWriter::new(paths::create(&filename)?); let roots: Vec<&str> = self .root_targets .iter() diff --git a/src/cargo/core/compiler/unit.rs b/src/cargo/core/compiler/unit.rs index ebbba387983..a7f80321d41 100644 --- a/src/cargo/core/compiler/unit.rs +++ b/src/cargo/core/compiler/unit.rs @@ -1,5 +1,5 @@ -use crate::core::compiler::{CompileKind, CompileMode}; -use crate::core::manifest::{LibKind, Target, TargetKind}; +use crate::core::compiler::{CompileKind, CompileMode, CrateType}; +use crate::core::manifest::{Target, TargetKind}; use crate::core::{profiles::Profile, InternedString, Package}; use crate::util::hex::short_hash; use crate::util::Config; @@ -178,9 +178,9 @@ impl UnitInterner { // // At some point in the future, it would be nice to have a // first-class way of overriding or specifying crate-types. - (true, TargetKind::Lib(crate_types)) if crate_types.contains(&LibKind::Dylib) => { + (true, TargetKind::Lib(crate_types)) if crate_types.contains(&CrateType::Dylib) => { let mut new_target = Target::clone(target); - new_target.set_kind(TargetKind::Lib(vec![LibKind::Rlib])); + new_target.set_kind(TargetKind::Lib(vec![CrateType::Rlib])); new_target } _ => target.clone(), @@ -204,6 +204,6 @@ impl UnitInterner { } let item = Rc::new(item.clone()); me.cache.insert(item.clone()); - return item; + item } } diff --git a/src/cargo/core/compiler/unit_dependencies.rs b/src/cargo/core/compiler/unit_dependencies.rs index 57cb6c68d06..da2ee2660cf 100644 --- a/src/cargo/core/compiler/unit_dependencies.rs +++ b/src/cargo/core/compiler/unit_dependencies.rs @@ -476,7 +476,7 @@ fn maybe_lib( unit.pkg .targets() .iter() - .find(|t| t.linkable()) + .find(|t| t.is_linkable()) .map(|t| { let mode = check_or_build_mode(unit.mode, t); new_unit_dep( @@ -681,7 +681,7 @@ fn connect_run_custom_build_deps(unit_dependencies: &mut UnitGraph) { // Only deps with `links`. .filter(|other| { other.unit.pkg != unit.pkg - && other.unit.target.linkable() + && other.unit.target.is_linkable() && other.unit.pkg.manifest().links().is_some() }) // Get the RunCustomBuild for other lib. diff --git a/src/cargo/core/compiler/unit_graph.rs b/src/cargo/core/compiler/unit_graph.rs index 2db9015058e..d242f6b0497 100644 --- a/src/cargo/core/compiler/unit_graph.rs +++ b/src/cargo/core/compiler/unit_graph.rs @@ -113,6 +113,6 @@ pub fn emit_serialized_unit_graph(root_units: &[Unit], unit_graph: &UnitGraph) - let stdout = std::io::stdout(); let mut lock = stdout.lock(); serde_json::to_writer(&mut lock, &s)?; - write!(lock, "\n")?; + drop(writeln!(lock)); Ok(()) } diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index c5c15a4a8b0..221b769295b 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -10,6 +10,7 @@ use serde::ser; use serde::Serialize; use url::Url; +use crate::core::compiler::CrateType; use crate::core::interning::InternedString; use crate::core::resolver::ResolveBehavior; use crate::core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary}; @@ -96,73 +97,13 @@ pub struct ManifestMetadata { pub links: Option, } -#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub enum LibKind { - Lib, - Rlib, - Dylib, - ProcMacro, - Other(String), -} - -impl LibKind { - /// Returns the argument suitable for `--crate-type` to pass to rustc. - pub fn crate_type(&self) -> &str { - match *self { - LibKind::Lib => "lib", - LibKind::Rlib => "rlib", - LibKind::Dylib => "dylib", - LibKind::ProcMacro => "proc-macro", - LibKind::Other(ref s) => s, - } - } - - pub fn linkable(&self) -> bool { - match *self { - LibKind::Lib | LibKind::Rlib | LibKind::Dylib | LibKind::ProcMacro => true, - LibKind::Other(..) => false, - } - } - - pub fn requires_upstream_objects(&self) -> bool { - match *self { - // "lib" == "rlib" and is a compilation that doesn't actually - // require upstream object files to exist, only upstream metadata - // files. As a result, it doesn't require upstream artifacts - LibKind::Lib | LibKind::Rlib => false, - - // Everything else, however, is some form of "linkable output" or - // something that requires upstream object files. - _ => true, - } - } -} - -impl fmt::Debug for LibKind { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.crate_type().fmt(f) - } -} - -impl<'a> From<&'a String> for LibKind { - fn from(string: &'a String) -> Self { - match string.as_ref() { - "lib" => LibKind::Lib, - "rlib" => LibKind::Rlib, - "dylib" => LibKind::Dylib, - "proc-macro" => LibKind::ProcMacro, - s => LibKind::Other(s.to_string()), - } - } -} - #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum TargetKind { - Lib(Vec), + Lib(Vec), Bin, Test, Bench, - ExampleLib(Vec), + ExampleLib(Vec), ExampleBin, CustomBuild, } @@ -173,8 +114,8 @@ impl ser::Serialize for TargetKind { S: ser::Serializer, { use self::TargetKind::*; - match *self { - Lib(ref kinds) => s.collect_seq(kinds.iter().map(LibKind::crate_type)), + match self { + Lib(kinds) => s.collect_seq(kinds.iter().map(|t| t.to_string())), Bin => ["bin"].serialize(s), ExampleBin | ExampleLib(_) => ["example"].serialize(s), Test => ["test"].serialize(s), @@ -223,6 +164,18 @@ impl TargetKind { _ => true, } } + + /// Returns the arguments suitable for `--crate-type` to pass to rustc. + pub fn rustc_crate_types(&self) -> Vec { + match self { + TargetKind::Lib(kinds) | TargetKind::ExampleLib(kinds) => kinds.clone(), + TargetKind::CustomBuild + | TargetKind::Bench + | TargetKind::Test + | TargetKind::ExampleBin + | TargetKind::Bin => vec![CrateType::Bin], + } + } } /// Information about a binary, a library, an example, etc. that is part of the @@ -303,7 +256,7 @@ struct SerializedTarget<'a> { kind: &'a TargetKind, /// Corresponds to `--crate-type` compiler attribute. /// See https://doc.rust-lang.org/reference/linkage.html - crate_types: Vec<&'a str>, + crate_types: Vec, name: &'a str, src_path: Option<&'a PathBuf>, edition: &'a str, @@ -547,7 +500,7 @@ impl Manifest { pub fn print_teapot(&self, config: &Config) { if let Some(teapot) = self.im_a_teapot { if config.cli_unstable().print_im_a_teapot { - println!("im-a-teapot = {}", teapot); + crate::drop_println!(config, "im-a-teapot = {}", teapot); } } } @@ -659,7 +612,7 @@ impl Target { pub fn lib_target( name: &str, - crate_targets: Vec, + crate_targets: Vec, src_path: PathBuf, edition: Edition, ) -> Target { @@ -669,7 +622,7 @@ impl Target { .set_name(name) .set_doctest(true) .set_doc(true); - return target; + target } pub fn bin_target( @@ -684,7 +637,7 @@ impl Target { .set_name(name) .set_required_features(required_features) .set_doc(true); - return target; + target } /// Builds a `Target` corresponding to the `build = "build.rs"` entry. @@ -696,7 +649,7 @@ impl Target { .set_for_host(true) .set_benched(false) .set_tested(false); - return target; + target } pub fn metabuild_target(name: &str) -> Target { @@ -707,20 +660,17 @@ impl Target { .set_for_host(true) .set_benched(false) .set_tested(false); - return target; + target } pub fn example_target( name: &str, - crate_targets: Vec, + crate_targets: Vec, src_path: PathBuf, required_features: Option>, edition: Edition, ) -> Target { - let kind = if crate_targets.is_empty() - || crate_targets - .iter() - .all(|t| *t == LibKind::Other("bin".into())) + let kind = if crate_targets.is_empty() || crate_targets.iter().all(|t| *t == CrateType::Bin) { TargetKind::ExampleBin } else { @@ -733,7 +683,7 @@ impl Target { .set_required_features(required_features) .set_tested(false) .set_benched(false); - return target; + target } pub fn test_target( @@ -748,7 +698,7 @@ impl Target { .set_name(name) .set_required_features(required_features) .set_benched(false); - return target; + target } pub fn bench_target( @@ -763,7 +713,7 @@ impl Target { .set_name(name) .set_required_features(required_features) .set_tested(false); - return target; + target } pub fn name(&self) -> &str { @@ -812,21 +762,13 @@ impl Target { pub fn doctestable(&self) -> bool { match self.kind() { - TargetKind::Lib(ref kinds) => kinds - .iter() - .any(|k| *k == LibKind::Rlib || *k == LibKind::Lib || *k == LibKind::ProcMacro), + TargetKind::Lib(ref kinds) => kinds.iter().any(|k| { + *k == CrateType::Rlib || *k == CrateType::Lib || *k == CrateType::ProcMacro + }), _ => false, } } - /// Whether or not this target allows dashes in its filename. - /// - /// Rustc will always emit filenames with underscores, and Cargo will - /// rename them back to dashes if this is true. - pub fn allows_dashes(&self) -> bool { - self.is_bin() || self.is_example() || self.is_custom_build() - } - pub fn is_lib(&self) -> bool { match self.kind() { TargetKind::Lib(_) => true, @@ -836,29 +778,25 @@ impl Target { pub fn is_dylib(&self) -> bool { match self.kind() { - TargetKind::Lib(libs) => libs.iter().any(|l| *l == LibKind::Dylib), + TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Dylib), _ => false, } } pub fn is_cdylib(&self) -> bool { - let libs = match self.kind() { - TargetKind::Lib(libs) => libs, - _ => return false, - }; - libs.iter().any(|l| match *l { - LibKind::Other(ref s) => s == "cdylib", + match self.kind() { + TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Cdylib), _ => false, - }) + } } /// Returns whether this target produces an artifact which can be linked /// into a Rust crate. /// /// This only returns true for certain kinds of libraries. - pub fn linkable(&self) -> bool { + pub fn is_linkable(&self) -> bool { match self.kind() { - TargetKind::Lib(kinds) => kinds.iter().any(|k| k.linkable()), + TargetKind::Lib(kinds) => kinds.iter().any(|k| k.is_linkable()), _ => false, } } @@ -900,25 +838,16 @@ impl Target { } /// Returns the arguments suitable for `--crate-type` to pass to rustc. - pub fn rustc_crate_types(&self) -> Vec<&str> { - match self.kind() { - TargetKind::Lib(ref kinds) | TargetKind::ExampleLib(ref kinds) => { - kinds.iter().map(LibKind::crate_type).collect() - } - TargetKind::CustomBuild - | TargetKind::Bench - | TargetKind::Test - | TargetKind::ExampleBin - | TargetKind::Bin => vec!["bin"], - } + pub fn rustc_crate_types(&self) -> Vec { + self.kind().rustc_crate_types() } pub fn can_lto(&self) -> bool { match self.kind() { - TargetKind::Lib(ref v) => { - !v.contains(&LibKind::Rlib) - && !v.contains(&LibKind::Dylib) - && !v.contains(&LibKind::Lib) + TargetKind::Lib(v) => { + !v.contains(&CrateType::Rlib) + && !v.contains(&CrateType::Dylib) + && !v.contains(&CrateType::Lib) } _ => true, } diff --git a/src/cargo/core/mod.rs b/src/cargo/core/mod.rs index b73c546493b..3d664d153b7 100644 --- a/src/cargo/core/mod.rs +++ b/src/cargo/core/mod.rs @@ -5,7 +5,7 @@ pub use self::features::{ pub use self::features::{CliUnstable, Edition, Feature, Features}; pub use self::interning::InternedString; pub use self::manifest::{EitherManifest, VirtualManifest}; -pub use self::manifest::{LibKind, Manifest, Target, TargetKind}; +pub use self::manifest::{Manifest, Target, TargetKind}; pub use self::package::{Package, PackageSet}; pub use self::package_id::PackageId; pub use self::package_id_spec::PackageIdSpec; diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index 5b45a4bc501..8a05b4510d6 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -622,7 +622,7 @@ fn lock( // Lock the summary's ID if possible let summary = match pair { - Some((precise, _)) => summary.override_id(precise.clone()), + Some((precise, _)) => summary.override_id(*precise), None => summary, }; summary.map_dependencies(|dep| { diff --git a/src/cargo/core/resolver/conflict_cache.rs b/src/cargo/core/resolver/conflict_cache.rs index c59e4117810..e0e64b96a5d 100644 --- a/src/cargo/core/resolver/conflict_cache.rs +++ b/src/cargo/core/resolver/conflict_cache.rs @@ -213,7 +213,7 @@ impl ConflictCache { for c in con.keys() { self.dep_from_pid - .entry(c.clone()) + .entry(*c) .or_insert_with(HashSet::new) .insert(dep.clone()); } diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 5fb4631952f..4d711498235 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -260,7 +260,7 @@ impl EncodableResolve { let mut g = Graph::new(); for &(ref id, _) in live_pkgs.values() { - g.add(id.clone()); + g.add(*id); } for &(ref id, pkg) in live_pkgs.values() { @@ -271,7 +271,7 @@ impl EncodableResolve { for edge in deps.iter() { if let Some(to_depend_on) = lookup_id(edge) { - g.link(id.clone(), to_depend_on); + g.link(*id, to_depend_on); } } } @@ -282,7 +282,7 @@ impl EncodableResolve { if let Some(ref replace) = pkg.replace { assert!(pkg.dependencies.is_none()); if let Some(replace_id) = lookup_id(replace) { - replacements.insert(id.clone(), replace_id); + replacements.insert(*id, replace_id); } } } diff --git a/src/cargo/core/resolver/features.rs b/src/cargo/core/resolver/features.rs index acbdec424b3..8f71007f610 100644 --- a/src/cargo/core/resolver/features.rs +++ b/src/cargo/core/resolver/features.rs @@ -208,36 +208,34 @@ impl ResolvedFeatures { pkg_id: PackageId, features_for: FeaturesFor, ) -> Vec { - self.activated_features_int(pkg_id, features_for, true) + self.activated_features_int(pkg_id, features_for) + .expect("activated_features for invalid package") } - /// Variant of `activated_features` that returns an empty Vec if this is + /// Variant of `activated_features` that returns `None` if this is /// not a valid pkg_id/is_build combination. Used in places which do /// not know which packages are activated (like `cargo clean`). pub fn activated_features_unverified( &self, pkg_id: PackageId, features_for: FeaturesFor, - ) -> Vec { - self.activated_features_int(pkg_id, features_for, false) + ) -> Option> { + self.activated_features_int(pkg_id, features_for).ok() } fn activated_features_int( &self, pkg_id: PackageId, features_for: FeaturesFor, - verify: bool, - ) -> Vec { + ) -> CargoResult> { if let Some(legacy) = &self.legacy { - legacy.get(&pkg_id).map_or_else(Vec::new, |v| v.clone()) + Ok(legacy.get(&pkg_id).map_or_else(Vec::new, |v| v.clone())) } else { let is_build = self.opts.decouple_host_deps && features_for == FeaturesFor::HostDep; if let Some(fs) = self.activated_features.get(&(pkg_id, is_build)) { - fs.iter().cloned().collect() - } else if verify { - panic!("features did not find {:?} {:?}", pkg_id, is_build) + Ok(fs.iter().cloned().collect()) } else { - Vec::new() + anyhow::bail!("features did not find {:?} {:?}", pkg_id, is_build) } } } @@ -575,9 +573,13 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { for ((pkg_id, dep_kind), features) in &self.activated_features { let r_features = self.resolve.features(*pkg_id); if !r_features.iter().eq(features.iter()) { - eprintln!( + crate::drop_eprintln!( + self.ws.config(), "{}/{:?} features mismatch\nresolve: {:?}\nnew: {:?}\n", - pkg_id, dep_kind, r_features, features + pkg_id, + dep_kind, + r_features, + features ); found = true; } diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs index 2d442c0214a..030f956cdb2 100644 --- a/src/cargo/core/resolver/resolve.rs +++ b/src/cargo/core/resolver/resolve.rs @@ -16,14 +16,14 @@ use std::fmt; /// for each package. pub struct Resolve { /// A graph, whose vertices are packages and edges are dependency specifications - /// from `Cargo.toml`. We need a `Vec` here because the same package + /// from `Cargo.toml`. We need a `HashSet` here because the same package /// might be present in both `[dependencies]` and `[build-dependencies]`. graph: Graph>, /// Replacements from the `[replace]` table. replacements: HashMap, /// Inverted version of `replacements`. reverse_replacements: HashMap, - /// An empty `HashSet` to avoid creating a new `HashSet` for every package + /// An empty `Vec` to avoid creating a new `Vec` for every package /// that does not have any features, and to avoid using `Option` to /// simplify the API. empty_features: Vec, diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs index eae8a70cc8d..e1198f17689 100644 --- a/src/cargo/core/shell.rs +++ b/src/cargo/core/shell.rs @@ -14,13 +14,13 @@ pub enum Verbosity { Quiet, } -/// An abstraction around a `Write`able object that remembers preferences for output verbosity and -/// color. +/// An abstraction around console output that remembers preferences for output +/// verbosity and color. pub struct Shell { - /// the `Write`able object, either with or without color support (represented by different enum - /// variants) - err: ShellOut, - /// How verbose messages should be + /// Wrapper around stdout/stderr. This helps with supporting sending + /// output to a memory buffer which is useful for tests. + output: ShellOut, + /// How verbose messages should be. verbosity: Verbosity, /// Flag that indicates the current line needs to be cleared before /// printing. Used when a progress bar is currently displayed. @@ -29,7 +29,7 @@ pub struct Shell { impl fmt::Debug for Shell { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.err { + match self.output { ShellOut::Write(_) => f .debug_struct("Shell") .field("verbosity", &self.verbosity) @@ -49,8 +49,9 @@ enum ShellOut { Write(Box), /// Color-enabled stdio, with information on whether color should be used Stream { - stream: StandardStream, - tty: bool, + stdout: StandardStream, + stderr: StandardStream, + stderr_tty: bool, color_choice: ColorChoice, }, } @@ -70,11 +71,13 @@ impl Shell { /// Creates a new shell (color choice and verbosity), defaulting to 'auto' color and verbose /// output. pub fn new() -> Shell { + let auto = ColorChoice::CargoAuto.to_termcolor_color_choice(); Shell { - err: ShellOut::Stream { - stream: StandardStream::stderr(ColorChoice::CargoAuto.to_termcolor_color_choice()), + output: ShellOut::Stream { + stdout: StandardStream::stdout(auto), + stderr: StandardStream::stderr(auto), color_choice: ColorChoice::CargoAuto, - tty: atty::is(atty::Stream::Stderr), + stderr_tty: atty::is(atty::Stream::Stderr), }, verbosity: Verbosity::Verbose, needs_clear: false, @@ -84,7 +87,7 @@ impl Shell { /// Creates a shell from a plain writable object, with no color, and max verbosity. pub fn from_write(out: Box) -> Shell { Shell { - err: ShellOut::Write(out), + output: ShellOut::Write(out), verbosity: Verbosity::Verbose, needs_clear: false, } @@ -105,18 +108,12 @@ impl Shell { if self.needs_clear { self.err_erase_line(); } - self.err.print(status, message, color, justified) + self.output + .message_stderr(status, message, color, justified) } } } - pub fn stdout_println(&mut self, message: impl fmt::Display) { - if self.needs_clear { - self.err_erase_line(); - } - println!("{}", message); - } - /// Sets whether the next print should clear the current line. pub fn set_needs_clear(&mut self, needs_clear: bool) { self.needs_clear = needs_clear; @@ -129,31 +126,44 @@ impl Shell { /// Returns the width of the terminal in spaces, if any. pub fn err_width(&self) -> Option { - match self.err { - ShellOut::Stream { tty: true, .. } => imp::stderr_width(), + match self.output { + ShellOut::Stream { + stderr_tty: true, .. + } => imp::stderr_width(), _ => None, } } /// Returns `true` if stderr is a tty. pub fn is_err_tty(&self) -> bool { - match self.err { - ShellOut::Stream { tty, .. } => tty, + match self.output { + ShellOut::Stream { stderr_tty, .. } => stderr_tty, _ => false, } } - /// Gets a reference to the underlying writer. + /// Gets a reference to the underlying stdout writer. + pub fn out(&mut self) -> &mut dyn Write { + if self.needs_clear { + self.err_erase_line(); + } + self.output.stdout() + } + + /// Gets a reference to the underlying stderr writer. pub fn err(&mut self) -> &mut dyn Write { if self.needs_clear { self.err_erase_line(); } - self.err.as_write() + self.output.stderr() } /// Erase from cursor to end of line. pub fn err_erase_line(&mut self) { - if let ShellOut::Stream { tty: true, .. } = self.err { + if let ShellOut::Stream { + stderr_tty: true, .. + } = self.output + { imp::err_erase_line(self); self.needs_clear = false; } @@ -216,7 +226,8 @@ impl Shell { if self.needs_clear { self.err_erase_line(); } - self.err.print(&"error", Some(&message), Red, false) + self.output + .message_stderr(&"error", Some(&message), Red, false) } /// Prints an amber 'warning' message. @@ -245,10 +256,11 @@ impl Shell { /// Updates the color choice (always, never, or auto) from a string.. pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> { if let ShellOut::Stream { - ref mut stream, + ref mut stdout, + ref mut stderr, ref mut color_choice, .. - } = self.err + } = self.output { let cfg = match color { Some("always") => ColorChoice::Always, @@ -263,7 +275,9 @@ impl Shell { ), }; *color_choice = cfg; - *stream = StandardStream::stderr(cfg.to_termcolor_color_choice()); + let choice = cfg.to_termcolor_color_choice(); + *stdout = StandardStream::stdout(choice); + *stderr = StandardStream::stderr(choice); } Ok(()) } @@ -273,17 +287,17 @@ impl Shell { /// If we are not using a color stream, this will always return `Never`, even if the color /// choice has been set to something else. pub fn color_choice(&self) -> ColorChoice { - match self.err { + match self.output { ShellOut::Stream { color_choice, .. } => color_choice, ShellOut::Write(_) => ColorChoice::Never, } } /// Whether the shell supports color. - pub fn supports_color(&self) -> bool { - match &self.err { + pub fn err_supports_color(&self) -> bool { + match &self.output { ShellOut::Write(_) => false, - ShellOut::Stream { stream, .. } => stream.supports_color(), + ShellOut::Stream { stderr, .. } => stderr.supports_color(), } } @@ -294,14 +308,19 @@ impl Shell { } #[cfg(windows)] { - if let ShellOut::Stream { stream, .. } = &mut self.err { - ::fwdansi::write_ansi(stream, message)?; + if let ShellOut::Stream { stderr, .. } = &mut self.output { + ::fwdansi::write_ansi(stderr, message)?; return Ok(()); } } self.err().write_all(message)?; Ok(()) } + + pub fn print_json(&mut self, obj: &T) { + let encoded = serde_json::to_string(&obj).unwrap(); + drop(writeln!(self.out(), "{}", encoded)); + } } impl Default for Shell { @@ -314,7 +333,7 @@ impl ShellOut { /// Prints out a message with a status. The status comes first, and is bold plus the given /// color. The status can be justified, in which case the max width that will right align is /// 12 chars. - fn print( + fn message_stderr( &mut self, status: &dyn fmt::Display, message: Option<&dyn fmt::Display>, @@ -322,20 +341,20 @@ impl ShellOut { justified: bool, ) -> CargoResult<()> { match *self { - ShellOut::Stream { ref mut stream, .. } => { - stream.reset()?; - stream.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?; + ShellOut::Stream { ref mut stderr, .. } => { + stderr.reset()?; + stderr.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?; if justified { - write!(stream, "{:>12}", status)?; + write!(stderr, "{:>12}", status)?; } else { - write!(stream, "{}", status)?; - stream.set_color(ColorSpec::new().set_bold(true))?; - write!(stream, ":")?; + write!(stderr, "{}", status)?; + stderr.set_color(ColorSpec::new().set_bold(true))?; + write!(stderr, ":")?; } - stream.reset()?; + stderr.reset()?; match message { - Some(message) => writeln!(stream, " {}", message)?, - None => write!(stream, " ")?, + Some(message) => writeln!(stderr, " {}", message)?, + None => write!(stderr, " ")?, } } ShellOut::Write(ref mut w) => { @@ -353,10 +372,18 @@ impl ShellOut { Ok(()) } - /// Gets this object as a `io::Write`. - fn as_write(&mut self) -> &mut dyn Write { + /// Gets stdout as a `io::Write`. + fn stdout(&mut self) -> &mut dyn Write { + match *self { + ShellOut::Stream { ref mut stdout, .. } => stdout, + ShellOut::Write(ref mut w) => w, + } + } + + /// Gets stderr as a `io::Write`. + fn stderr(&mut self) -> &mut dyn Write { match *self { - ShellOut::Stream { ref mut stream, .. } => stream, + ShellOut::Stream { ref mut stderr, .. } => stderr, ShellOut::Write(ref mut w) => w, } } @@ -404,7 +431,7 @@ mod imp { // This is the "EL - Erase in Line" sequence. It clears from the cursor // to the end of line. // https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_sequences - let _ = shell.err.as_write().write_all(b"\x1B[K"); + let _ = shell.output.stderr().write_all(b"\x1B[K"); } } @@ -468,6 +495,6 @@ mod imp { fn default_err_erase_line(shell: &mut Shell) { if let Some(max_width) = imp::stderr_width() { let blank = " ".repeat(max_width); - drop(write!(shell.err.as_write(), "{}\r", blank)); + drop(write!(shell.output.stderr(), "{}\r", blank)); } } diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index a5c94f73fa8..19cbd49c101 100644 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -34,7 +34,6 @@ use crate::core::shell::Verbosity::Verbose; use crate::core::Shell; use anyhow::Error; use log::debug; -use serde::ser; use std::fmt; pub use crate::util::errors::{InternalError, VerboseError}; @@ -93,11 +92,6 @@ impl fmt::Display for VersionInfo { } } -pub fn print_json(obj: &T) { - let encoded = serde_json::to_string(&obj).unwrap(); - println!("{}", encoded); -} - pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! { debug!("exit_with_error; err={:?}", err); if let Some(ref err) = err.error { @@ -144,7 +138,7 @@ pub fn display_error(err: &Error, shell: &mut Shell) { /// and context. pub fn display_warning_with_error(warning: &str, err: &Error, shell: &mut Shell) { drop(shell.warn(warning)); - drop(writeln!(shell.err(), "")); + drop(writeln!(shell.err())); _display_error(err, shell, false); } diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index beb9b03cd39..a8b39c4b5b9 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -1,22 +1,12 @@ -use crate::core::InternedString; -use std::collections::HashMap; -use std::fs; -use std::path::Path; - -use crate::core::compiler::unit_dependencies; -use crate::core::compiler::BuildContext; -use crate::core::compiler::{ - BuildConfig, CompileKind, CompileMode, Context, RustcTargetData, UnitInterner, -}; -use crate::core::profiles::{Profiles, UnitFor}; -use crate::core::resolver::features::HasDevUnits; -use crate::core::resolver::ResolveOpts; -use crate::core::{PackageIdSpec, Workspace}; +use crate::core::compiler::{CompileKind, CompileMode, Layout, RustcTargetData}; +use crate::core::profiles::Profiles; +use crate::core::{InternedString, PackageIdSpec, Workspace}; use crate::ops; -use crate::ops::resolve::WorkspaceResolve; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::paths; use crate::util::Config; +use std::fs; +use std::path::Path; pub struct CleanOptions<'a> { pub config: &'a Config, @@ -61,134 +51,131 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { if opts.spec.is_empty() { return rm_rf(&target_dir.into_path_unlocked(), config); } - let mut build_config = BuildConfig::new(config, Some(1), &opts.targets, CompileMode::Build)?; - build_config.requested_profile = opts.requested_profile; - let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?; - // Resolve for default features. In the future, `cargo clean` should be rewritten - // so that it doesn't need to guess filename hashes. - let resolve_opts = ResolveOpts::new( - /*dev_deps*/ true, - &[], - /*all features*/ false, - /*default*/ true, - ); - let specs = opts - .spec - .iter() - .map(|spec| PackageIdSpec::parse(spec)) - .collect::>>()?; - let ws_resolve = ops::resolve_ws_with_opts( - ws, - &target_data, - &build_config.requested_kinds, - &resolve_opts, - &specs, - HasDevUnits::Yes, - )?; - let WorkspaceResolve { - pkg_set, - targeted_resolve: resolve, - resolved_features: features, - .. - } = ws_resolve; - - let interner = UnitInterner::new(); - let mut units = Vec::new(); - - for spec in opts.spec.iter() { - // Translate the spec to a Package - let pkgid = resolve.query(spec)?; - let pkg = pkg_set.get_one(pkgid)?; - - // Generate all relevant `Unit` targets for this package - for target in pkg.targets() { - for kind in build_config - .requested_kinds - .iter() - .chain(Some(&CompileKind::Host)) - { - for mode in CompileMode::all_modes() { - for unit_for in UnitFor::all_values() { - let profile = if mode.is_run_custom_build() { - profiles.get_profile_run_custom_build(&profiles.get_profile( - pkg.package_id(), - ws.is_member(pkg), - /*is_local*/ true, - *unit_for, - CompileMode::Build, - )) - } else { - profiles.get_profile( - pkg.package_id(), - ws.is_member(pkg), - /*is_local*/ true, - *unit_for, - *mode, - ) - }; - // Use unverified here since this is being more - // exhaustive than what is actually needed. - let features_for = unit_for.map_to_features_for(); - let features = - features.activated_features_unverified(pkg.package_id(), features_for); - units.push(interner.intern( - pkg, target, profile, *kind, *mode, features, /*is_std*/ false, - )); - } - } - } + + // Clean specific packages. + let requested_kinds = CompileKind::from_requested_targets(config, &opts.targets)?; + let target_data = RustcTargetData::new(ws, &requested_kinds)?; + let (pkg_set, resolve) = ops::resolve_ws(ws)?; + let prof_dir_name = profiles.get_dir_name(); + let host_layout = Layout::new(ws, None, &prof_dir_name)?; + // Convert requested kinds to a Vec of layouts. + let target_layouts: Vec<(CompileKind, Layout)> = requested_kinds + .into_iter() + .filter_map(|kind| match kind { + CompileKind::Target(target) => match Layout::new(ws, Some(target), &prof_dir_name) { + Ok(layout) => Some(Ok((kind, layout))), + Err(e) => Some(Err(e)), + }, + CompileKind::Host => None, + }) + .collect::>()?; + // A Vec of layouts. This is a little convoluted because there can only be + // one host_layout. + let layouts = if opts.targets.is_empty() { + vec![(CompileKind::Host, &host_layout)] + } else { + target_layouts + .iter() + .map(|(kind, layout)| (*kind, layout)) + .collect() + }; + // Create a Vec that also includes the host for things that need to clean both. + let layouts_with_host: Vec<(CompileKind, &Layout)> = + std::iter::once((CompileKind::Host, &host_layout)) + .chain(layouts.iter().map(|(k, l)| (*k, *l))) + .collect(); + + // Cleaning individual rustdoc crates is currently not supported. + // For example, the search index would need to be rebuilt to fully + // remove it (otherwise you're left with lots of broken links). + // Doc tests produce no output. + + // Get Packages for the specified specs. + let mut pkg_ids = Vec::new(); + for spec_str in opts.spec.iter() { + // Translate the spec to a Package. + let spec = PackageIdSpec::parse(spec_str)?; + if spec.version().is_some() { + config.shell().warn(&format!( + "version qualifier in `-p {}` is ignored, \ + cleaning all versions of `{}` found", + spec_str, + spec.name() + ))?; + } + if spec.url().is_some() { + config.shell().warn(&format!( + "url qualifier in `-p {}` ignored, \ + cleaning all versions of `{}` found", + spec_str, + spec.name() + ))?; } + let matches: Vec<_> = resolve.iter().filter(|id| spec.matches(*id)).collect(); + if matches.is_empty() { + anyhow::bail!("package ID specification `{}` matched no packages", spec); + } + pkg_ids.extend(matches); } + let packages = pkg_set.get_many(pkg_ids)?; + + for pkg in packages { + let pkg_dir = format!("{}-*", pkg.name()); - let unit_graph = unit_dependencies::build_unit_dependencies( - ws, - &pkg_set, - &resolve, - &features, - None, - &units, - &Default::default(), - build_config.mode, - &target_data, - &profiles, - &interner, - )?; - let extra_args = HashMap::new(); - let bcx = BuildContext::new( - ws, - pkg_set, - &build_config, - profiles, - extra_args, - target_data, - units, - unit_graph, - )?; - let mut cx = Context::new(&bcx)?; - cx.prepare_units()?; - - for unit in &bcx.roots { - if unit.mode.is_doc() || unit.mode.is_doc_test() { - // Cleaning individual rustdoc crates is currently not supported. - // For example, the search index would need to be rebuilt to fully - // remove it (otherwise you're left with lots of broken links). - // Doc tests produce no output. - continue; + // Clean fingerprints. + for (_, layout) in &layouts_with_host { + rm_rf_glob(&layout.fingerprint().join(&pkg_dir), config)?; } - rm_rf(&cx.files().fingerprint_dir(unit), config)?; - if unit.target.is_custom_build() { - if unit.mode.is_run_custom_build() { - rm_rf(&cx.files().build_script_out_dir(unit), config)?; - } else { - rm_rf(&cx.files().build_script_dir(unit), config)?; + + for target in pkg.targets() { + if target.is_custom_build() { + // Get both the build_script_build and the output directory. + for (_, layout) in &layouts_with_host { + rm_rf_glob(&layout.build().join(&pkg_dir), config)?; + } + continue; } - continue; - } + let crate_name = target.crate_name(); + for &mode in &[ + CompileMode::Build, + CompileMode::Test, + CompileMode::Check { test: false }, + ] { + for (compile_kind, layout) in &layouts { + let triple = target_data.short_name(compile_kind); + + let (file_types, _unsupported) = target_data + .info(*compile_kind) + .rustc_outputs(mode, target.kind(), triple)?; + let (dir, uplift_dir) = if target.is_example() { + (layout.examples(), layout.examples()) + } else { + (layout.deps(), layout.dest()) + }; + for file_type in file_types { + // Some files include a hash in the filename, some don't. + let hashed_name = file_type.output_filename(target, Some("*")); + let unhashed_name = file_type.output_filename(target, None); + rm_rf_glob(&dir.join(&hashed_name), config)?; + rm_rf(&dir.join(&unhashed_name), config)?; + // Remove dep-info file generated by rustc. It is not tracked in + // file_types. It does not have a prefix. + let hashed_dep_info = dir.join(format!("{}-*.d", crate_name)); + let unhashed_dep_info = dir.join(format!("{}.d", crate_name)); + rm_rf_glob(&hashed_dep_info, config)?; + rm_rf(&unhashed_dep_info, config)?; - for output in cx.outputs(unit)?.iter() { - rm_rf(&output.path, config)?; - if let Some(ref dst) = output.hardlink { - rm_rf(dst, config)?; + // Remove the uplifted copy. + let uplifted_path = uplift_dir.join(file_type.uplift_filename(target)); + rm_rf(&uplifted_path, config)?; + // Dep-info generated by Cargo itself. + let dep_info = uplifted_path.with_extension("d"); + rm_rf(&dep_info, config)?; + } + // TODO: what to do about build_script_build? + let incremental = layout.incremental().join(format!("{}-*", crate_name)); + rm_rf_glob(&incremental, config)?; + } } } } @@ -196,8 +183,19 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { Ok(()) } +fn rm_rf_glob(pattern: &Path, config: &Config) -> CargoResult<()> { + // TODO: Display utf8 warning to user? Or switch to globset? + let pattern = pattern + .to_str() + .ok_or_else(|| anyhow::anyhow!("expected utf-8 path"))?; + for path in glob::glob(pattern)? { + rm_rf(&path?, config)?; + } + Ok(()) +} + fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> { - let m = fs::metadata(path); + let m = fs::symlink_metadata(path); if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) { config .shell() diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 29e3abf018e..24ce6f901bb 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -832,9 +832,11 @@ fn generate_targets( for proposal in filter_targets(packages, Target::is_lib, false, mode) { let Proposal { target, pkg, .. } = proposal; if mode.is_doc_test() && !target.doctestable() { + let types = target.rustc_crate_types(); + let types_str: Vec<&str> = types.iter().map(|t| t.as_str()).collect(); ws.config().shell().warn(format!( "doc tests are not supported for crate type(s) `{}` in package `{}`", - target.rustc_crate_types().join(", "), + types_str.join(", "), pkg.name() ))?; } else { @@ -978,7 +980,10 @@ pub fn resolve_all_features( .proc_macro(); for dep in deps { let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build()); - for feature in resolved_features.activated_features_unverified(dep_id, features_for) { + for feature in resolved_features + .activated_features_unverified(dep_id, features_for) + .unwrap_or_default() + { features.insert(format!("{}/{}", dep.name_in_toml(), feature)); } } diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs index a21c9443eb0..1e0d855d0d1 100644 --- a/src/cargo/ops/cargo_fetch.rs +++ b/src/cargo/ops/cargo_fetch.rs @@ -39,7 +39,7 @@ pub fn fetch<'a>( deps.iter().any(|d| { // If no target was specified then all dependencies are // fetched. - if options.targets.len() == 0 { + if options.targets.is_empty() { return true; } diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index ac2fff45f04..9434b1b3def 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -4,17 +4,17 @@ use std::sync::Arc; use std::{env, fs}; use anyhow::{bail, format_err}; +use semver::VersionReq; use tempfile::Builder as TempFileBuilder; use crate::core::compiler::Freshness; use crate::core::compiler::{CompileKind, DefaultExecutor, Executor}; use crate::core::{Dependency, Edition, Package, PackageId, Source, SourceId, Workspace}; -use crate::ops; use crate::ops::common_for_install_and_uninstall::*; use crate::sources::{GitSource, PathSource, SourceConfigMap}; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::{paths, Config, Filesystem, Rustc, ToSemver}; -use semver::VersionReq; +use crate::{drop_println, ops}; struct Transaction { bins: Vec, @@ -406,9 +406,7 @@ fn install_one( if !source_id.is_path() && fs::rename(src, &dst).is_ok() { continue; } - fs::copy(src, &dst).chain_err(|| { - format_err!("failed to copy `{}` to `{}`", src.display(), dst.display()) - })?; + paths::copy(src, &dst)?; } let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries @@ -686,9 +684,9 @@ pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> { let root = resolve_root(dst, config)?; let tracker = InstallTracker::load(config, &root)?; for (k, v) in tracker.all_installed_bins() { - println!("{}:", k); + drop_println!(config, "{}:", k); for bin in v { - println!(" {}", bin); + drop_println!(config, " {}", bin); } } Ok(()) diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs index bb6262c5f2e..fbd2a1637b3 100644 --- a/src/cargo/ops/cargo_new.rs +++ b/src/cargo/ops/cargo_new.rs @@ -9,7 +9,6 @@ use serde::Deserialize; use std::collections::BTreeMap; use std::env; use std::fmt; -use std::fs; use std::io::{BufRead, BufReader, ErrorKind}; use std::path::{Path, PathBuf}; use std::process::Command; @@ -562,10 +561,10 @@ fn write_ignore_file( VersionControl::NoVcs => return Ok("".to_string()), }; - let ignore: String = match fs::File::open(&fp_ignore) { - Err(why) => match why.kind() { - ErrorKind::NotFound => list.format_new(vcs), - _ => return Err(anyhow::format_err!("{}", why)), + let ignore: String = match paths::open(&fp_ignore) { + Err(err) => match err.downcast_ref::() { + Some(io_err) if io_err.kind() == ErrorKind::NotFound => list.format_new(vcs), + _ => return Err(err), }, Ok(file) => list.format_existing(BufReader::new(file), vcs), }; diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs index d10a6a4f3c9..e6261d3a927 100644 --- a/src/cargo/ops/cargo_output_metadata.rs +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -176,7 +176,7 @@ fn build_resolve_graph_r( let deps: Vec = resolve .deps(pkg_id) .filter(|(_dep_id, deps)| { - if requested_kinds == &[CompileKind::Host] { + if requested_kinds == [CompileKind::Host] { true } else { requested_kinds.iter().any(|kind| { diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index b84e6b3caf1..4b59afdae1e 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -15,12 +15,12 @@ use tar::{Archive, Builder, EntryType, Header}; use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor}; use crate::core::{Feature, Shell, Verbosity, Workspace}; use crate::core::{Package, PackageId, PackageSet, Resolve, Source, SourceId}; -use crate::ops; use crate::sources::PathSource; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::paths; use crate::util::toml::TomlManifest; use crate::util::{self, restricted_names, Config, FileLock}; +use crate::{drop_println, ops}; pub struct PackageOpts<'cfg> { pub config: &'cfg Config, @@ -102,7 +102,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult