diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs index bf3e1c4c98c..8ce24a0f28d 100644 --- a/src/cargo/core/compiler/build_context/mod.rs +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -1,4 +1,3 @@ -use crate::core::compiler::unit::UnitInterner; use crate::core::compiler::{BuildConfig, BuildOutput, CompileKind, Unit}; use crate::core::profiles::Profiles; use crate::core::{InternedString, Workspace}; @@ -8,6 +7,7 @@ use crate::util::errors::CargoResult; use crate::util::Rustc; use std::collections::HashMap; use std::path::PathBuf; +use std::rc::Rc; use std::str; mod target_info; @@ -27,13 +27,10 @@ pub struct BuildContext<'a, 'cfg> { pub profiles: Profiles, pub build_config: &'a BuildConfig, /// Extra compiler args for either `rustc` or `rustdoc`. - pub extra_compiler_args: HashMap, Vec>, + pub extra_compiler_args: HashMap, Vec>, /// Package downloader. pub packages: &'a PackageSet<'cfg>, - /// Source of interning new units as they're created. - pub units: &'a UnitInterner<'a>, - /// Information about rustc and the target platform. pub target_data: RustcTargetData, } @@ -45,8 +42,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { config: &'cfg Config, build_config: &'a BuildConfig, profiles: Profiles, - units: &'a UnitInterner<'a>, - extra_compiler_args: HashMap, Vec>, + extra_compiler_args: HashMap, Vec>, target_data: RustcTargetData, ) -> CargoResult> { Ok(BuildContext { @@ -56,7 +52,6 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { build_config, profiles, extra_compiler_args, - units, target_data, }) } @@ -89,11 +84,11 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { self.build_config.jobs } - pub fn rustflags_args(&self, unit: &Unit<'_>) -> &[String] { + pub fn rustflags_args(&self, unit: &Unit) -> &[String] { &self.target_data.info(unit.kind).rustflags } - pub fn rustdocflags_args(&self, unit: &Unit<'_>) -> &[String] { + pub fn rustdocflags_args(&self, unit: &Unit) -> &[String] { &self.target_data.info(unit.kind).rustdocflags } @@ -101,7 +96,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { pkg.source_id().is_path() || self.config.extra_verbose() } - pub fn extra_args_for(&self, unit: &Unit<'a>) -> Option<&Vec> { + pub fn extra_args_for(&self, unit: &Unit) -> Option<&Vec> { self.extra_compiler_args.get(unit) } diff --git a/src/cargo/core/compiler/build_plan.rs b/src/cargo/core/compiler/build_plan.rs index 48072ba7aeb..a257c3095e5 100644 --- a/src/cargo/core/compiler/build_plan.rs +++ b/src/cargo/core/compiler/build_plan.rs @@ -45,7 +45,7 @@ struct SerializedBuildPlan { } impl Invocation { - pub fn new(unit: &Unit<'_>, deps: Vec) -> Invocation { + pub fn new(unit: &Unit, deps: Vec) -> Invocation { let id = unit.pkg.package_id(); Invocation { package_name: id.name().to_string(), @@ -109,7 +109,7 @@ impl BuildPlan { } } - pub fn add<'a>(&mut self, cx: &Context<'a, '_>, unit: &Unit<'a>) -> CargoResult<()> { + pub fn add<'a>(&mut self, cx: &Context<'a, '_>, unit: &Unit) -> CargoResult<()> { let id = self.plan.invocations.len(); self.invocation_map.insert(unit.buildkey(), id); let deps = cx diff --git a/src/cargo/core/compiler/compilation.rs b/src/cargo/core/compiler/compilation.rs index 58a6c39d2da..7563880661d 100644 --- a/src/cargo/core/compiler/compilation.rs +++ b/src/cargo/core/compiler/compilation.rs @@ -2,6 +2,7 @@ use std::collections::{BTreeSet, HashMap, HashSet}; use std::env; use std::ffi::{OsStr, OsString}; use std::path::PathBuf; +use std::rc::Rc; use cargo_platform::CfgExpr; use semver::Version; @@ -14,9 +15,9 @@ use crate::util::{self, config, join_paths, process, CargoResult, Config, Proces /// Structure with enough information to run `rustdoc --test`. pub struct Doctest { /// The package being doc-tested. - pub package: Package, + pub package: Rc, /// The target being tested (currently always the package's lib). - pub target: Target, + pub target: Rc, /// Arguments needed to pass to rustdoc to run this test. pub args: Vec, /// Whether or not -Zunstable-options is needed. @@ -27,7 +28,7 @@ pub struct Doctest { pub struct Compilation<'cfg> { /// An array of all tests created during this compilation. /// `(package, target, path_to_test_exe)` - pub tests: Vec<(Package, Target, PathBuf)>, + pub tests: Vec<(Rc, Rc, PathBuf)>, /// An array of all binaries created. pub binaries: Vec, diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs index 8df63c4ce28..8402b51f539 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/context/compilation_files.rs @@ -3,6 +3,7 @@ use std::env; use std::fmt; use std::hash::{Hash, Hasher, SipHasher}; use std::path::{Path, PathBuf}; +use std::rc::Rc; use std::sync::Arc; use lazycell::LazyCell; @@ -67,15 +68,15 @@ pub struct CompilationFiles<'a, 'cfg> { export_dir: Option, /// The root targets requested by the user on the command line (does not /// include dependencies). - roots: Vec>, + roots: Vec>, ws: &'a Workspace<'cfg>, /// Metadata hash to use for each unit. /// /// `None` if the unit should not use a metadata data hash (like rustdoc, /// or some dylibs). - metas: HashMap, Option>, + metas: HashMap, Option>, /// For each Unit, a list all files produced. - outputs: HashMap, LazyCell>>>, + outputs: HashMap, LazyCell>>>, } /// Info about a single file emitted by the compiler. @@ -104,7 +105,7 @@ impl OutputFile { impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { pub(super) fn new( - roots: &[Unit<'a>], + roots: &[Rc], host: Layout, target: HashMap, export_dir: Option, @@ -146,20 +147,20 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { /// /// Returns `None` if the unit should not use a metadata data hash (like /// rustdoc, or some dylibs). - pub fn metadata(&self, unit: &Unit<'a>) -> Option { + pub fn metadata(&self, unit: &Unit) -> Option { self.metas[unit] } /// Gets the short hash based only on the `PackageId`. /// Used for the metadata when `metadata` returns `None`. - pub fn target_short_hash(&self, unit: &Unit<'_>) -> String { + pub fn target_short_hash(&self, unit: &Unit) -> String { let hashable = unit.pkg.package_id().stable_hash(self.ws.root()); util::short_hash(&hashable) } /// Returns the appropriate output directory for the specified package and /// target. - pub fn out_dir(&self, unit: &Unit<'a>) -> PathBuf { + pub fn out_dir(&self, unit: &Unit) -> PathBuf { if unit.mode.is_doc() { self.layout(unit.kind).doc().to_path_buf() } else if unit.mode.is_doc_test() { @@ -179,7 +180,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { } /// Directory name to use for a package in the form `NAME-HASH`. - pub fn pkg_dir(&self, unit: &Unit<'a>) -> String { + pub fn pkg_dir(&self, unit: &Unit) -> String { let name = unit.pkg.package_id().name(); match self.metas[unit] { Some(ref meta) => format!("{}-{}", name, meta), @@ -199,24 +200,24 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { /// Returns the directories where Rust crate dependencies are found for the /// specified unit. - pub fn deps_dir(&self, unit: &Unit<'_>) -> &Path { + pub fn deps_dir(&self, unit: &Unit) -> &Path { self.layout(unit.kind).deps() } /// Directory where the fingerprint for the given unit should go. - pub fn fingerprint_dir(&self, unit: &Unit<'a>) -> PathBuf { + pub fn fingerprint_dir(&self, unit: &Unit) -> PathBuf { let dir = self.pkg_dir(unit); self.layout(unit.kind).fingerprint().join(dir) } /// Path where compiler output is cached. - pub fn message_cache_path(&self, unit: &Unit<'a>) -> PathBuf { + pub fn message_cache_path(&self, unit: &Unit) -> PathBuf { self.fingerprint_dir(unit).join("output") } /// Returns the directory where a compiled build script is stored. /// `/path/to/target/{debug,release}/build/PKG-HASH` - pub fn build_script_dir(&self, unit: &Unit<'a>) -> PathBuf { + pub fn build_script_dir(&self, unit: &Unit) -> PathBuf { assert!(unit.target.is_custom_build()); assert!(!unit.mode.is_run_custom_build()); assert!(self.metas.contains_key(unit)); @@ -227,7 +228,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { /// Returns the directory where information about running a build script /// is stored. /// `/path/to/target/{debug,release}/build/PKG-HASH` - pub fn build_script_run_dir(&self, unit: &Unit<'a>) -> PathBuf { + pub fn build_script_run_dir(&self, unit: &Unit) -> PathBuf { assert!(unit.target.is_custom_build()); assert!(unit.mode.is_run_custom_build()); let dir = self.pkg_dir(unit); @@ -236,12 +237,12 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { /// Returns the "OUT_DIR" directory for running a build script. /// `/path/to/target/{debug,release}/build/PKG-HASH/out` - pub fn build_script_out_dir(&self, unit: &Unit<'a>) -> PathBuf { + pub fn build_script_out_dir(&self, unit: &Unit) -> PathBuf { self.build_script_run_dir(unit).join("out") } /// Returns the file stem for a given target/profile combo (with metadata). - pub fn file_stem(&self, unit: &Unit<'a>) -> String { + pub fn file_stem(&self, unit: &Unit) -> String { match self.metas[unit] { Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), metadata), None => self.bin_stem(unit), @@ -280,7 +281,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { /// Returns the filenames that the given unit will generate. pub(super) fn outputs( &self, - unit: &Unit<'a>, + unit: &Unit, bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult>> { self.outputs[unit] @@ -289,7 +290,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { } /// Returns the bin filename for a given target, without extension and metadata. - fn bin_stem(&self, unit: &Unit<'_>) -> String { + fn bin_stem(&self, unit: &Unit) -> String { if unit.target.allows_underscores() { unit.target.name().to_string() } else { @@ -310,7 +311,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { /// /// Returns an `Option` because in some cases we don't want to link /// (eg a dependent lib). - fn link_stem(&self, unit: &Unit<'a>) -> Option<(PathBuf, String)> { + fn link_stem(&self, unit: &Unit) -> Option<(PathBuf, String)> { let out_dir = self.out_dir(unit); let bin_stem = self.bin_stem(unit); // Stem without metadata. let file_stem = self.file_stem(unit); // Stem with metadata. @@ -320,7 +321,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { // we don't want to link it up. if out_dir.ends_with("deps") { // Don't lift up library dependencies. - if unit.target.is_bin() || self.roots.contains(unit) { + if unit.target.is_bin() || self.roots.iter().any(|root| &**root == unit) { Some(( out_dir.parent().unwrap().to_owned(), if unit.mode.is_any_test() { @@ -343,7 +344,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { fn calc_outputs( &self, - unit: &Unit<'a>, + unit: &Unit, bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult>> { let ret = match unit.mode { @@ -393,7 +394,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { fn calc_outputs_rustc( &self, - unit: &Unit<'a>, + unit: &Unit, bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult> { let mut ret = Vec::new(); @@ -508,14 +509,14 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { } fn metadata_of<'a, 'cfg>( - unit: &Unit<'a>, + unit: &Rc, cx: &Context<'a, 'cfg>, - metas: &mut HashMap, Option>, + metas: &mut HashMap, Option>, ) -> Option { if !metas.contains_key(unit) { - let meta = compute_metadata(unit, cx, metas); - metas.insert(*unit, meta); - for dep in cx.unit_deps(unit) { + let meta = compute_metadata(&unit, cx, metas); + metas.insert(Rc::clone(unit), meta); + for dep in cx.unit_deps(&unit) { metadata_of(&dep.unit, cx, metas); } } @@ -523,9 +524,9 @@ fn metadata_of<'a, 'cfg>( } fn compute_metadata<'a, 'cfg>( - unit: &Unit<'a>, + unit: &Unit, cx: &Context<'a, 'cfg>, - metas: &mut HashMap, Option>, + metas: &mut HashMap, Option>, ) -> Option { if unit.mode.is_doc_test() { // Doc tests do not have metadata. @@ -616,7 +617,7 @@ fn compute_metadata<'a, 'cfg>( bcx.rustc().verbose_version.hash(&mut hasher); - if cx.bcx.ws.is_member(unit.pkg) { + if cx.bcx.ws.is_member(&unit.pkg) { // This is primarily here for clippy. This ensures that the clippy // artifacts are separate from the `check` ones. if let Some(path) = &cx.bcx.rustc().workspace_wrapper { diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index 29f39f3b7e1..e5ac973eed1 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -1,6 +1,7 @@ #![allow(deprecated)] use std::collections::{BTreeSet, HashMap, HashSet}; use std::path::PathBuf; +use std::rc::Rc; use std::sync::{Arc, Mutex}; use filetime::FileTime; @@ -34,18 +35,18 @@ pub struct Context<'a, 'cfg> { /// Dependencies (like rerun-if-changed) declared by a build script. /// This is *only* populated from the output from previous runs. /// If the build script hasn't ever been run, then it must be run. - pub build_explicit_deps: HashMap, BuildDeps>, + pub build_explicit_deps: HashMap, BuildDeps>, /// Fingerprints used to detect if a unit is out-of-date. - pub fingerprints: HashMap, Arc>, + pub fingerprints: HashMap, Arc>, /// Cache of file mtimes to reduce filesystem hits. pub mtime_cache: HashMap, /// A set used to track which units have been compiled. /// A unit may appear in the job graph multiple times as a dependency of /// multiple packages, but it only needs to run once. - pub compiled: HashSet>, + pub compiled: HashSet>, /// Linking information for each `Unit`. /// See `build_map` for details. - pub build_scripts: HashMap, Arc>, + pub build_scripts: HashMap, Arc>, /// Job server client to manage concurrency with other processes. pub jobserver: Client, /// "Primary" packages are the ones the user selected on the command-line @@ -53,7 +54,7 @@ pub struct Context<'a, 'cfg> { /// based on the current directory and the default workspace members. primary_packages: HashSet, /// The dependency graph of units to compile. - unit_dependencies: UnitGraph<'a>, + unit_dependencies: UnitGraph, /// An abstraction of the files and directories that will be generated by /// the compilation. This is `None` until after `unit_dependencies` has /// been computed. @@ -68,19 +69,19 @@ pub struct Context<'a, 'cfg> { /// A set of units which are compiling rlibs and are expected to produce /// metadata files in addition to the rlib itself. This is only filled in /// when `pipelining` above is enabled. - rmeta_required: HashSet>, + rmeta_required: HashSet>, /// When we're in jobserver-per-rustc process mode, this keeps those /// jobserver clients for each Unit (which eventually becomes a rustc /// process). - pub rustc_clients: HashMap, Client>, + pub rustc_clients: HashMap, Client>, } impl<'a, 'cfg> Context<'a, 'cfg> { pub fn new( config: &'cfg Config, bcx: &'a BuildContext<'a, 'cfg>, - unit_dependencies: UnitGraph<'a>, + unit_dependencies: UnitGraph, default_kind: CompileKind, ) -> CargoResult { // Load up the jobserver that we'll use to manage our parallelism. This @@ -126,7 +127,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// about the result of compilation. pub fn compile( mut self, - units: &[Unit<'a>], + units: &'a [Rc], export_dir: Option, exec: &Arc, ) -> CargoResult> { @@ -266,7 +267,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } /// Returns the executable for the specified unit (if any). - pub fn get_executable(&mut self, unit: &Unit<'a>) -> CargoResult> { + pub fn get_executable(&mut self, unit: &Unit) -> CargoResult> { for output in self.outputs(unit)?.iter() { if output.flavor == FileFlavor::DebugInfo { continue; @@ -285,7 +286,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { pub fn prepare_units( &mut self, export_dir: Option, - units: &[Unit<'a>], + units: &[Rc], ) -> CargoResult<()> { let dest = self.bcx.profiles.get_dir_name(); let host_layout = Layout::new(self.bcx.ws, None, &dest)?; @@ -338,49 +339,51 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } /// Returns the filenames that the given unit will generate. - pub fn outputs(&self, unit: &Unit<'a>) -> CargoResult>> { + pub fn outputs(&self, unit: &Unit) -> CargoResult>> { self.files.as_ref().unwrap().outputs(unit, self.bcx) } /// Direct dependencies for the given unit. - pub fn unit_deps(&self, unit: &Unit<'a>) -> &[UnitDep<'a>] { + pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] { &self.unit_dependencies[unit] } /// Returns the RunCustomBuild Unit associated with the given Unit. /// /// If the package does not have a build script, this returns None. - pub fn find_build_script_unit(&self, unit: Unit<'a>) -> Option> { + pub fn find_build_script_unit<'u>(&'u self, unit: &'u Unit) -> Option<&'u Unit> { + // TODO: Consider returning Rc to avoid weird lifetime annotation? + // Or take &Rc as parameter, and return &Rc if unit.mode.is_run_custom_build() { return Some(unit); } - self.unit_dependencies[&unit] + self.unit_dependencies[unit] .iter() .find(|unit_dep| { unit_dep.unit.mode.is_run_custom_build() && unit_dep.unit.pkg.package_id() == unit.pkg.package_id() }) - .map(|unit_dep| unit_dep.unit) + .map(|unit_dep| &*unit_dep.unit) } /// Returns the metadata hash for the RunCustomBuild Unit associated with /// the given unit. /// /// If the package does not have a build script, this returns None. - pub fn find_build_script_metadata(&self, unit: Unit<'a>) -> Option { + pub fn find_build_script_metadata(&self, unit: &Unit) -> Option { let script_unit = self.find_build_script_unit(unit)?; Some(self.get_run_build_script_metadata(&script_unit)) } /// Returns the metadata hash for a RunCustomBuild unit. - pub fn get_run_build_script_metadata(&self, unit: &Unit<'a>) -> Metadata { + pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata { assert!(unit.mode.is_run_custom_build()); self.files() .metadata(unit) .expect("build script should always have hash") } - pub fn is_primary_package(&self, unit: &Unit<'a>) -> bool { + pub fn is_primary_package(&self, unit: &Unit) -> bool { self.primary_packages.contains(&unit.pkg.package_id()) } @@ -398,21 +401,20 @@ impl<'a, 'cfg> Context<'a, 'cfg> { fn check_collistions(&self) -> CargoResult<()> { let mut output_collisions = HashMap::new(); - let describe_collision = - |unit: &Unit<'_>, other_unit: &Unit<'_>, path: &PathBuf| -> String { - format!( - "The {} target `{}` in package `{}` has the same output \ + let describe_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> String { + format!( + "The {} target `{}` in package `{}` has the same output \ filename as the {} target `{}` in package `{}`.\n\ Colliding filename is: {}\n", - unit.target.kind().description(), - unit.target.name(), - unit.pkg.package_id(), - other_unit.target.kind().description(), - other_unit.target.name(), - other_unit.pkg.package_id(), - path.display() - ) - }; + unit.target.kind().description(), + unit.target.name(), + unit.pkg.package_id(), + other_unit.target.kind().description(), + other_unit.target.name(), + other_unit.pkg.package_id(), + path.display() + ) + }; let suggestion = "Consider changing their names to be unique or compiling them separately.\n\ This may become a hard error in the future; see \ @@ -420,8 +422,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> { let rustdoc_suggestion = "This is a known bug where multiple crates with the same name use\n\ the same path; see ."; - let report_collision = |unit: &Unit<'_>, - other_unit: &Unit<'_>, + let report_collision = |unit: &Unit, + other_unit: &Unit, path: &PathBuf, suggestion: &str| -> CargoResult<()> { @@ -504,7 +506,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { for (key, deps) in self.unit_dependencies.iter() { for dep in deps { if self.only_requires_rmeta(key, &dep.unit) { - self.rmeta_required.insert(dep.unit); + self.rmeta_required.insert(Rc::clone(&dep.unit)); } } } @@ -512,7 +514,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// Returns whether when `parent` depends on `dep` if it only requires the /// metadata file from `dep`. - pub fn only_requires_rmeta(&self, parent: &Unit<'a>, dep: &Unit<'a>) -> bool { + pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool { // this is only enabled when pipelining is enabled self.pipelining // We're only a candidate for requiring an `rmeta` file if we @@ -527,7 +529,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// Returns whether when `unit` is built whether it should emit metadata as /// well because some compilations rely on that. - pub fn rmeta_required(&self, unit: &Unit<'a>) -> bool { + pub fn rmeta_required(&self, unit: &Unit) -> bool { self.rmeta_required.contains(unit) || self.bcx.config.cli_unstable().timings.is_some() } diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index 6853bd3a758..d369c036d12 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -10,6 +10,7 @@ use cargo_platform::Cfg; use std::collections::hash_map::{Entry, HashMap}; use std::collections::{BTreeSet, HashSet}; use std::path::{Path, PathBuf}; +use std::rc::Rc; use std::str; use std::sync::{Arc, Mutex}; @@ -102,7 +103,7 @@ pub struct BuildDeps { } /// Prepares a `Work` that executes the target as a custom build script. -pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult { +pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Rc) -> CargoResult { let _p = profile::start(format!( "build script prepare: {}/{}", unit.pkg, @@ -147,7 +148,7 @@ fn emit_build_output( state.stdout(msg); } -fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult { +fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Rc) -> CargoResult { assert!(unit.mode.is_run_custom_build()); let bcx = &cx.bcx; let dependencies = cx.unit_deps(unit); @@ -177,7 +178,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes // `Profiles::get_profile_run_custom_build` so that those flags get // carried over. let to_exec = to_exec.into_os_string(); - let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?; + let mut cmd = cx.compilation.host_process(to_exec, &unit.pkg)?; let debug = unit.profile.debuginfo.unwrap_or(0) != 0; cmd.env("OUT_DIR", &script_out_dir) .env("CARGO_MANIFEST_DIR", unit.pkg.root()) @@ -619,7 +620,7 @@ impl BuildOutput { fn prepare_metabuild<'a, 'cfg>( cx: &Context<'a, 'cfg>, - unit: &Unit<'a>, + unit: &Unit, deps: &[String], ) -> CargoResult<()> { let mut output = Vec::new(); @@ -681,7 +682,7 @@ impl BuildDeps { /// /// The given set of units to this function is the initial set of /// targets/profiles which are being built. -pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CargoResult<()> { +pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Rc]) -> CargoResult<()> { let mut ret = HashMap::new(); for unit in units { build(&mut ret, cx, unit)?; @@ -693,9 +694,9 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca // Recursive function to build up the map we're constructing. This function // memoizes all of its return values as it goes along. fn build<'a, 'b, 'cfg>( - out: &'a mut HashMap, BuildScripts>, + out: &'a mut HashMap, BuildScripts>, cx: &mut Context<'b, 'cfg>, - unit: &Unit<'b>, + unit: &Rc, ) -> CargoResult<&'a BuildScripts> { // Do a quick pre-flight check to see if we've already calculated the // set of dependencies. @@ -722,7 +723,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca // If a package has a build script, add itself as something to inspect for linking. if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { let script_meta = cx - .find_build_script_metadata(*unit) + .find_build_script_metadata(unit) .expect("has_custom_build should have RunCustomBuild"); add_to_link(&mut ret, unit.pkg.package_id(), script_meta); } @@ -736,7 +737,12 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca // to rustc invocation caching schemes, so be sure to generate the same // set of build script dependency orderings via sorting the targets that // come out of the `Context`. - let mut dependencies: Vec> = cx.unit_deps(unit).iter().map(|d| d.unit).collect(); + let mut dependencies: Vec> = cx + .unit_deps(unit) + .iter() + .map(|d| Rc::clone(&d.unit)) + .collect(); + // TODO: This sort can maybe be removed since the UnitGraph is sorted. dependencies.sort_by_key(|u| u.pkg.package_id()); for dep_unit in dependencies.iter() { @@ -751,7 +757,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca } } - match out.entry(*unit) { + match out.entry(Rc::clone(unit)) { Entry::Vacant(entry) => Ok(entry.insert(ret)), Entry::Occupied(_) => panic!("cyclic dependencies in `build_map`"), } @@ -767,13 +773,13 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca fn parse_previous_explicit_deps<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, - unit: &Unit<'a>, + unit: &Rc, ) -> CargoResult<()> { let script_run_dir = cx.files().build_script_run_dir(unit); let output_file = script_run_dir.join("output"); let (prev_output, _) = prev_build_output(cx, unit); let deps = BuildDeps::new(&output_file, prev_output.as_ref()); - cx.build_explicit_deps.insert(*unit, deps); + cx.build_explicit_deps.insert(unit.clone(), deps); Ok(()) } } @@ -785,7 +791,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> Ca /// processing. fn prev_build_output<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, - unit: &Unit<'a>, + unit: &Unit, ) -> (Option, PathBuf) { let script_out_dir = cx.files().build_script_out_dir(unit); let script_run_dir = cx.files().build_script_run_dir(unit); diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs index 4c1674b1493..1bca770ad18 100644 --- a/src/cargo/core/compiler/fingerprint.rs +++ b/src/cargo/core/compiler/fingerprint.rs @@ -192,6 +192,7 @@ use std::collections::hash_map::{Entry, HashMap}; use std::env; use std::hash::{self, Hasher}; use std::path::{Path, PathBuf}; +use std::rc::Rc; use std::sync::{Arc, Mutex}; use std::time::SystemTime; @@ -232,7 +233,7 @@ use super::{BuildContext, Context, FileFlavor, Unit}; /// exclusively talking about top-level units. pub fn prepare_target<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, - unit: &Unit<'a>, + unit: &Rc, force: bool, ) -> CargoResult { let _p = profile::start(format!( @@ -957,8 +958,8 @@ impl<'de> de::Deserialize<'de> for MtimeSlot { impl DepFingerprint { fn new<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, - parent: &Unit<'a>, - dep: &UnitDep<'a>, + parent: &Unit, + dep: &UnitDep, ) -> CargoResult { let fingerprint = calculate(cx, &dep.unit)?; // We need to be careful about what we hash here. We have a goal of @@ -1027,7 +1028,7 @@ impl StaleFile { /// dependencies. fn calculate<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, - unit: &Unit<'a>, + unit: &Rc, ) -> CargoResult> { // This function is slammed quite a lot, so the result is memoized. if let Some(s) = cx.fingerprints.get(unit) { @@ -1047,16 +1048,14 @@ fn calculate<'a, 'cfg>( fingerprint.check_filesystem(&mut cx.mtime_cache, unit.pkg.root(), &target_root)?; let fingerprint = Arc::new(fingerprint); - cx.fingerprints.insert(*unit, Arc::clone(&fingerprint)); + cx.fingerprints + .insert(Rc::clone(unit), Arc::clone(&fingerprint)); Ok(fingerprint) } /// Calculate a fingerprint for a "normal" unit, or anything that's not a build /// script. This is an internal helper of `calculate`, don't call directly. -fn calculate_normal<'a, 'cfg>( - cx: &mut Context<'a, 'cfg>, - unit: &Unit<'a>, -) -> CargoResult { +fn calculate_normal<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit) -> CargoResult { // Recursively calculate the fingerprint for all of our dependencies. // // Skip fingerprints of binaries because they don't actually induce a @@ -1083,7 +1082,7 @@ fn calculate_normal<'a, 'cfg>( let dep_info = dep_info.strip_prefix(&target_root).unwrap().to_path_buf(); vec![LocalFingerprint::CheckDepInfo { dep_info }] } else { - let fingerprint = pkg_fingerprint(cx.bcx, unit.pkg)?; + let fingerprint = pkg_fingerprint(cx.bcx, &unit.pkg)?; vec![LocalFingerprint::Precalculated(fingerprint)] }; @@ -1130,7 +1129,7 @@ fn calculate_normal<'a, 'cfg>( /// Whether or not the fingerprint should track the dependencies from the /// dep-info file for this unit. -fn use_dep_info(unit: &Unit<'_>) -> bool { +fn use_dep_info(unit: &Unit) -> bool { !unit.mode.is_doc() } @@ -1138,7 +1137,7 @@ fn use_dep_info(unit: &Unit<'_>) -> bool { /// internal helper of `calculate`, don't call directly. fn calculate_run_custom_build<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, - unit: &Unit<'a>, + unit: &Unit, ) -> CargoResult { assert!(unit.mode.is_run_custom_build()); // Using the `BuildDeps` information we'll have previously parsed and @@ -1149,7 +1148,7 @@ fn calculate_run_custom_build<'a, 'cfg>( // the whole crate. let (gen_local, overridden) = build_script_local_fingerprints(cx, unit); let deps = &cx.build_explicit_deps[unit]; - let local = (gen_local)(deps, Some(&|| pkg_fingerprint(cx.bcx, unit.pkg)))?.unwrap(); + let local = (gen_local)(deps, Some(&|| pkg_fingerprint(cx.bcx, &unit.pkg)))?.unwrap(); let output = deps.build_script_output.clone(); // Include any dependencies of our execution, which is typically just the @@ -1216,7 +1215,7 @@ fn calculate_run_custom_build<'a, 'cfg>( /// FIXME(#6779) - see all the words above fn build_script_local_fingerprints<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, - unit: &Unit<'a>, + unit: &Unit, ) -> ( Box< dyn FnOnce( @@ -1291,7 +1290,7 @@ fn build_script_local_fingerprints<'a, 'cfg>( /// Returns None if it is not overridden. fn build_script_override_fingerprint<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, - unit: &Unit<'a>, + unit: &Unit, ) -> Option { // Build script output is only populated at this stage when it is // overridden. @@ -1364,7 +1363,7 @@ fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> { } /// Prepare for work when a package starts to build -pub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<()> { +pub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit) -> CargoResult<()> { let new1 = cx.files().fingerprint_dir(unit); // Doc tests have no output, thus no fingerprint. @@ -1377,7 +1376,7 @@ pub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> Ca /// Returns the location that the dep-info file will show up at for the `unit` /// specified. -pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> PathBuf { +pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit) -> PathBuf { cx.files() .fingerprint_dir(unit) .join(&format!("dep-{}", filename(cx, unit))) @@ -1418,7 +1417,7 @@ fn compare_old_fingerprint( result } -fn log_compare(unit: &Unit<'_>, compare: &CargoResult<()>) { +fn log_compare(unit: &Unit, compare: &CargoResult<()>) { let ce = match compare { Ok(..) => return, Err(e) => e, @@ -1533,7 +1532,7 @@ where None } -fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String { +fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit) -> String { // file_stem includes metadata hash. Thus we have a different // fingerprint for every metadata hash version. This works because // even if the package is fresh, we'll still link the fresh target diff --git a/src/cargo/core/compiler/job_queue.rs b/src/cargo/core/compiler/job_queue.rs index 4b5ab5c9370..c4f80bc1b13 100644 --- a/src/cargo/core/compiler/job_queue.rs +++ b/src/cargo/core/compiler/job_queue.rs @@ -54,6 +54,7 @@ use std::collections::{BTreeMap, HashMap, HashSet}; use std::io; use std::marker; use std::mem; +use std::rc::Rc; use std::sync::Arc; use std::time::Duration; @@ -81,10 +82,10 @@ use crate::util::{Progress, ProgressStyle}; /// queueing of compilation steps for each package. Packages enqueue units of /// work and then later on the entire graph is converted to DrainState and /// executed. -pub struct JobQueue<'a, 'cfg> { - queue: DependencyQueue, Artifact, Job>, +pub struct JobQueue<'cfg> { + queue: DependencyQueue, Artifact, Job>, counts: HashMap, - timings: Timings<'a, 'cfg>, + timings: Timings<'cfg>, } /// This structure is backed by the `DependencyQueue` type and manages the @@ -115,19 +116,19 @@ pub struct JobQueue<'a, 'cfg> { /// error, the drop will deadlock. This should be fixed at some point in the /// future. The jobserver thread has a similar problem, though it will time /// out after 1 second. -struct DrainState<'a, 'cfg> { +struct DrainState<'cfg> { // This is the length of the DependencyQueue when starting out total_units: usize, - queue: DependencyQueue, Artifact, Job>, + queue: DependencyQueue, Artifact, Job>, messages: Arc>, - active: HashMap>, + active: HashMap>, compiled: HashSet, documented: HashSet, counts: HashMap, progress: Progress<'cfg>, next_id: u32, - timings: Timings<'a, 'cfg>, + timings: Timings<'cfg>, /// Tokens that are currently owned by this Cargo, and may be "associated" /// with a rustc process. They may also be unused, though if so will be @@ -148,7 +149,7 @@ struct DrainState<'a, 'cfg> { /// The list of jobs that we have not yet started executing, but have /// retrieved from the `queue`. We eagerly pull jobs off the main queue to /// allow us to request jobserver tokens pretty early. - pending_queue: Vec<(Unit<'a>, Job)>, + pending_queue: Vec<(Rc, Job)>, print: DiagnosticPrinter<'cfg>, // How many jobs we've finished @@ -269,8 +270,8 @@ impl<'a> JobState<'a> { } } -impl<'a, 'cfg> JobQueue<'a, 'cfg> { - pub fn new(bcx: &BuildContext<'a, 'cfg>, root_units: &[Unit<'a>]) -> JobQueue<'a, 'cfg> { +impl<'cfg> JobQueue<'cfg> { + pub fn new(bcx: &BuildContext<'_, 'cfg>, root_units: &[Rc]) -> JobQueue<'cfg> { JobQueue { queue: DependencyQueue::new(), counts: HashMap::new(), @@ -280,8 +281,8 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> { pub fn enqueue( &mut self, - cx: &Context<'a, 'cfg>, - unit: &Unit<'a>, + cx: &Context<'_, 'cfg>, + unit: &Rc, job: Job, ) -> CargoResult<()> { let dependencies = cx.unit_deps(unit); @@ -302,7 +303,7 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> { } else { Artifact::All }; - (dep.unit, artifact) + (Rc::clone(&dep.unit), artifact) }) .collect::>(); @@ -329,23 +330,23 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> { // transitively contains the `Metadata` edge. if unit.requires_upstream_objects() { for dep in dependencies { - depend_on_deps_of_deps(cx, &mut queue_deps, dep.unit); + depend_on_deps_of_deps(cx, &mut queue_deps, &dep.unit); } fn depend_on_deps_of_deps<'a>( - cx: &Context<'a, '_>, - deps: &mut HashMap, Artifact>, - unit: Unit<'a>, + cx: &'a Context<'a, '_>, + deps: &mut HashMap, Artifact>, + unit: &Unit, ) { for dep in cx.unit_deps(&unit) { - if deps.insert(dep.unit, Artifact::All).is_none() { - depend_on_deps_of_deps(cx, deps, dep.unit); + if deps.insert(Rc::clone(&dep.unit), Artifact::All).is_none() { + depend_on_deps_of_deps(cx, deps, &dep.unit); } } } } - self.queue.queue(*unit, job, queue_deps); + self.queue.queue(Rc::clone(unit), job, queue_deps); *self.counts.entry(unit.pkg.package_id()).or_insert(0) += 1; Ok(()) } @@ -355,7 +356,7 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> { /// This function will spawn off `config.jobs()` workers to build all of the /// necessary dependencies, in order. Freshness is propagated as far as /// possible along each dependency chain. - pub fn execute(mut self, cx: &mut Context<'a, '_>, plan: &mut BuildPlan) -> CargoResult<()> { + pub fn execute(mut self, cx: &mut Context<'_, '_>, plan: &mut BuildPlan) -> CargoResult<()> { let _p = profile::start("executing the job graph"); self.queue.queue_finished(); @@ -412,7 +413,7 @@ impl<'a, 'cfg> JobQueue<'a, 'cfg> { } } -impl<'a, 'cfg> DrainState<'a, 'cfg> { +impl<'a, 'cfg> DrainState<'cfg> { fn spawn_work_if_possible( &mut self, cx: &mut Context<'a, '_>, @@ -500,7 +501,7 @@ impl<'a, 'cfg> DrainState<'a, 'cfg> { .config .shell() .verbose(|c| c.status("Running", &cmd))?; - self.timings.unit_start(id, self.active[&id]); + self.timings.unit_start(id, Rc::clone(&self.active[&id])); } Message::BuildPlanMsg(module_name, cmd, filenames) => { plan.update(&module_name, &cmd, &filenames)?; @@ -542,7 +543,7 @@ impl<'a, 'cfg> DrainState<'a, 'cfg> { // in there as we'll get another `Finish` later on. Artifact::Metadata => { info!("end (meta): {:?}", id); - self.active[&id] + Rc::clone(&self.active[&id]) } }; info!("end ({:?}): {:?}", unit, result); @@ -746,7 +747,7 @@ impl<'a, 'cfg> DrainState<'a, 'cfg> { )); } - fn name_for_progress(&self, unit: &Unit<'_>) -> String { + fn name_for_progress(&self, unit: &Unit) -> String { let pkg_name = unit.pkg.name(); match unit.mode { CompileMode::Doc { .. } => format!("{}(doc)", pkg_name), @@ -768,7 +769,7 @@ impl<'a, 'cfg> DrainState<'a, 'cfg> { /// Executes a job, pushing the spawned thread's handled onto `threads`. fn run( &mut self, - unit: &Unit<'a>, + unit: &Rc, job: Job, cx: &Context<'a, '_>, scope: &Scope<'_>, @@ -778,7 +779,7 @@ impl<'a, 'cfg> DrainState<'a, 'cfg> { info!("start {}: {:?}", id, unit); - assert!(self.active.insert(id, *unit).is_none()); + assert!(self.active.insert(id, Rc::clone(unit)).is_none()); *self.counts.get_mut(&unit.pkg.package_id()).unwrap() -= 1; let messages = self.messages.clone(); @@ -852,11 +853,11 @@ impl<'a, 'cfg> DrainState<'a, 'cfg> { fn emit_warnings( &mut self, msg: Option<&str>, - unit: &Unit<'a>, + unit: &Unit, cx: &mut Context<'a, '_>, ) -> CargoResult<()> { let outputs = cx.build_script_outputs.lock().unwrap(); - let metadata = match cx.find_build_script_metadata(*unit) { + let metadata = match cx.find_build_script_metadata(unit) { Some(metadata) => metadata, None => return Ok(()), }; @@ -884,14 +885,19 @@ impl<'a, 'cfg> DrainState<'a, 'cfg> { fn finish( &mut self, id: JobId, - unit: &Unit<'a>, + unit: &Rc, artifact: Artifact, cx: &mut Context<'a, '_>, ) -> CargoResult<()> { if unit.mode.is_run_custom_build() && cx.bcx.show_warnings(unit.pkg.package_id()) { self.emit_warnings(None, unit, cx)?; } - let unlocked = self.queue.finish(unit, &artifact); + let unlocked = self + .queue + .finish(unit, &artifact) + .into_iter() + .map(|unit| Rc::clone(unit)) + .collect(); match artifact { Artifact::All => self.timings.unit_finished(id, unlocked), Artifact::Metadata => self.timings.unit_rmeta_finished(id, unlocked), @@ -911,7 +917,7 @@ impl<'a, 'cfg> DrainState<'a, 'cfg> { fn note_working_on( &mut self, config: &Config, - unit: &Unit<'a>, + unit: &Unit, fresh: Freshness, ) -> CargoResult<()> { if (self.compiled.contains(&unit.pkg.package_id()) && !unit.mode.is_doc()) @@ -926,15 +932,15 @@ impl<'a, 'cfg> DrainState<'a, 'cfg> { Dirty => { if unit.mode.is_doc() { self.documented.insert(unit.pkg.package_id()); - config.shell().status("Documenting", unit.pkg)?; + config.shell().status("Documenting", &*unit.pkg)?; } else if unit.mode.is_doc_test() { // Skip doc test. } else { self.compiled.insert(unit.pkg.package_id()); if unit.mode.is_check() { - config.shell().status("Checking", unit.pkg)?; + config.shell().status("Checking", &unit.pkg)?; } else { - config.shell().status("Compiling", unit.pkg)?; + config.shell().status("Compiling", &unit.pkg)?; } } } @@ -944,7 +950,7 @@ impl<'a, 'cfg> DrainState<'a, 'cfg> { && !(unit.mode.is_doc_test() && self.compiled.contains(&unit.pkg.package_id())) { self.compiled.insert(unit.pkg.package_id()); - config.shell().verbose(|c| c.status("Fresh", unit.pkg))?; + config.shell().verbose(|c| c.status("Fresh", &unit.pkg))?; } } } diff --git a/src/cargo/core/compiler/links.rs b/src/cargo/core/compiler/links.rs index 2fc1d4d28ac..8faa831ef4d 100644 --- a/src/cargo/core/compiler/links.rs +++ b/src/cargo/core/compiler/links.rs @@ -5,7 +5,7 @@ use std::collections::{HashMap, HashSet}; use std::fmt::Write; /// Validate `links` field does not conflict between packages. -pub fn validate_links(resolve: &Resolve, unit_graph: &UnitGraph<'_>) -> CargoResult<()> { +pub fn validate_links(resolve: &Resolve, unit_graph: &UnitGraph) -> CargoResult<()> { // NOTE: This is the *old* links validator. Links are usually validated in // the resolver. However, the `links` field was added to the index in // early 2018 (see https://github.com/rust-lang/cargo/pull/4978). However, diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index c6d03ab3c9f..fe8f91df77b 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -22,6 +22,7 @@ use std::ffi::{OsStr, OsString}; use std::fs::{self, File}; use std::io::{BufRead, Write}; use std::path::PathBuf; +use std::rc::Rc; use std::sync::Arc; use anyhow::Error; @@ -40,7 +41,7 @@ use self::job::{Job, Work}; use self::job_queue::{JobQueue, JobState}; use self::output_depinfo::output_depinfo; use self::unit_graph::UnitDep; -pub use crate::core::compiler::unit::{Unit, UnitInterner}; +pub use crate::core::compiler::unit::Unit; use crate::core::manifest::TargetSourcePath; use crate::core::profiles::{Lto, PanicStrategy, Profile}; use crate::core::{Edition, Feature, InternedString, PackageId, Target}; @@ -58,7 +59,7 @@ pub trait Executor: Send + Sync + 'static { /// Called after a rustc process invocation is prepared up-front for a given /// unit of work (may still be modified for runtime-known dependencies, when /// the work is actually executed). - fn init<'a, 'cfg>(&self, _cx: &Context<'a, 'cfg>, _unit: &Unit<'a>) {} + fn init<'a, 'cfg>(&self, _cx: &Context<'a, 'cfg>, _unit: &Unit) {} /// In case of an `Err`, Cargo will not continue with the build process for /// this package. @@ -74,7 +75,7 @@ pub trait Executor: Send + Sync + 'static { /// Queried when queuing each unit of work. If it returns true, then the /// unit will always be rebuilt, independent of whether it needs to be. - fn force_rebuild(&self, _unit: &Unit<'_>) -> bool { + fn force_rebuild(&self, _unit: &Unit) -> bool { false } } @@ -101,15 +102,15 @@ impl Executor for DefaultExecutor { fn compile<'a, 'cfg: 'a>( cx: &mut Context<'a, 'cfg>, - jobs: &mut JobQueue<'a, 'cfg>, + jobs: &mut JobQueue<'cfg>, plan: &mut BuildPlan, - unit: &Unit<'a>, + unit: &Rc, exec: &Arc, force_rebuild: bool, ) -> CargoResult<()> { let bcx = cx.bcx; let build_plan = bcx.build_config.build_plan; - if !cx.compiled.insert(*unit) { + if !cx.compiled.insert(Rc::clone(unit)) { return Ok(()); } @@ -139,7 +140,7 @@ fn compile<'a, 'cfg: 'a>( let work = if cx.bcx.show_warnings(unit.pkg.package_id()) { replay_output_cache( unit.pkg.package_id(), - unit.target, + &unit.target, cx.files().message_cache_path(unit), cx.bcx.build_config.message_format, cx.bcx.config.shell().supports_color(), @@ -170,7 +171,7 @@ fn compile<'a, 'cfg: 'a>( fn rustc<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, - unit: &Unit<'a>, + unit: &Rc, exec: &Arc, ) -> CargoResult { let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?; @@ -212,7 +213,7 @@ fn rustc<'a, 'cfg>( } let mut output_options = OutputOptions::new(cx, unit); let package_id = unit.pkg.package_id(); - let target = unit.target.clone(); + let target = (*unit.target).clone(); let mode = unit.mode; exec.init(cx, unit); @@ -226,7 +227,7 @@ fn rustc<'a, 'cfg>( .unwrap_or_else(|| cx.bcx.config.cwd()) .to_path_buf(); let fingerprint_dir = cx.files().fingerprint_dir(unit); - let script_metadata = cx.find_build_script_metadata(*unit); + let script_metadata = cx.find_build_script_metadata(unit); return Ok(Work::new(move |state| { // Only at runtime have we discovered what the extra -L and -l @@ -396,7 +397,7 @@ fn rustc<'a, 'cfg>( /// final target. This must happen during both "Fresh" and "Compile". fn link_targets<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, - unit: &Unit<'a>, + unit: &Unit, fresh: bool, ) -> CargoResult { let bcx = cx.bcx; @@ -408,7 +409,7 @@ fn link_targets<'a, 'cfg>( let features = unit.features.iter().map(|s| s.to_string()).collect(); let json_messages = bcx.build_config.emit_json(); let executable = cx.get_executable(unit)?; - let mut target = unit.target.clone(); + let mut target = (*unit.target).clone(); if let TargetSourcePath::Metabuild = target.src_path() { // Give it something to serialize. let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir()); @@ -538,19 +539,19 @@ where fn prepare_rustc<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, crate_types: &[&str], - unit: &Unit<'a>, + unit: &Rc, ) -> CargoResult { let is_primary = cx.is_primary_package(unit); - let is_workspace = cx.bcx.ws.is_member(unit.pkg); + let is_workspace = cx.bcx.ws.is_member(&unit.pkg); let mut base = cx .compilation - .rustc_process(unit.pkg, is_primary, is_workspace)?; + .rustc_process(&unit.pkg, is_primary, is_workspace)?; if cx.bcx.config.cli_unstable().jobserver_per_rustc { let client = cx.new_jobserver()?; base.inherit_jobserver(&client); base.arg("-Zjobserver-token-requests"); - assert!(cx.rustc_clients.insert(*unit, client).is_none()); + assert!(cx.rustc_clients.insert(Rc::clone(unit), client).is_none()); } else { base.inherit_jobserver(&cx.jobserver); } @@ -559,9 +560,9 @@ fn prepare_rustc<'a, 'cfg>( Ok(base) } -fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult { +fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit) -> CargoResult { let bcx = cx.bcx; - let mut rustdoc = cx.compilation.rustdoc_process(unit.pkg, unit.target)?; + let mut rustdoc = cx.compilation.rustdoc_process(&unit.pkg, &unit.target)?; rustdoc.inherit_jobserver(&cx.jobserver); rustdoc.arg("--crate-name").arg(&unit.target.crate_name()); add_path_args(bcx, unit, &mut rustdoc); @@ -599,10 +600,10 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult let name = unit.pkg.name().to_string(); let build_script_outputs = Arc::clone(&cx.build_script_outputs); let package_id = unit.pkg.package_id(); - let target = unit.target.clone(); + let target = (*unit.target).clone(); let mut output_options = OutputOptions::new(cx, unit); let pkg_id = unit.pkg.package_id(); - let script_metadata = cx.find_build_script_metadata(*unit); + let script_metadata = cx.find_build_script_metadata(unit); Ok(Work::new(move |state| { if let Some(script_metadata) = script_metadata { @@ -634,7 +635,7 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult fn add_crate_versions_if_requested<'a>( bcx: &BuildContext<'a, '_>, - unit: &Unit<'a>, + unit: &Unit, rustdoc: &mut ProcessBuilder, ) { if bcx.config.cli_unstable().crate_versions && !crate_version_flag_already_present(rustdoc) { @@ -651,7 +652,7 @@ fn crate_version_flag_already_present(rustdoc: &ProcessBuilder) -> bool { }) } -fn append_crate_version_flag(unit: &Unit<'_>, rustdoc: &mut ProcessBuilder) { +fn append_crate_version_flag(unit: &Unit, rustdoc: &mut ProcessBuilder) { rustdoc .arg("-Z") .arg("unstable-options") @@ -673,7 +674,7 @@ fn append_crate_version_flag(unit: &Unit<'_>, rustdoc: &mut ProcessBuilder) { // // The first returned value here is the argument to pass to rustc, and the // second is the cwd that rustc should operate in. -fn path_args(bcx: &BuildContext<'_, '_>, unit: &Unit<'_>) -> (PathBuf, PathBuf) { +fn path_args(bcx: &BuildContext<'_, '_>, unit: &Unit) -> (PathBuf, PathBuf) { let ws_root = bcx.ws.root(); let src = match unit.target.src_path() { TargetSourcePath::Path(path) => path.to_path_buf(), @@ -688,13 +689,13 @@ fn path_args(bcx: &BuildContext<'_, '_>, unit: &Unit<'_>) -> (PathBuf, PathBuf) (src, unit.pkg.root().to_path_buf()) } -fn add_path_args(bcx: &BuildContext<'_, '_>, unit: &Unit<'_>, cmd: &mut ProcessBuilder) { +fn add_path_args(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuilder) { let (arg, cwd) = path_args(bcx, unit); cmd.arg(arg); cmd.cwd(cwd); } -fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit<'_>, cmd: &mut ProcessBuilder) { +fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuilder) { // If this is an upstream dep we don't want warnings from, turn off all // lints. if !bcx.show_warnings(unit.pkg.package_id()) { @@ -740,7 +741,7 @@ fn add_error_format_and_color( fn build_base_args<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, cmd: &mut ProcessBuilder, - unit: &Unit<'a>, + unit: &Unit, crate_types: &[&str], ) -> CargoResult<()> { assert!(!unit.mode.is_run_custom_build()); @@ -788,7 +789,7 @@ fn build_base_args<'a, 'cfg>( } let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) - || (crate_types.contains(&"dylib") && bcx.ws.members().any(|p| p != unit.pkg)); + || (crate_types.contains(&"dylib") && bcx.ws.members().any(|p| p != &*unit.pkg)); if prefer_dynamic { cmd.arg("-C").arg("prefer-dynamic"); } @@ -943,7 +944,7 @@ fn build_base_args<'a, 'cfg>( fn build_deps_args<'a, 'cfg>( cmd: &mut ProcessBuilder, cx: &mut Context<'a, 'cfg>, - unit: &Unit<'a>, + unit: &Unit, ) -> CargoResult<()> { let bcx = cx.bcx; cmd.arg("-L").arg(&{ @@ -1011,68 +1012,66 @@ fn build_deps_args<'a, 'cfg>( /// Generates a list of `--extern` arguments. pub fn extern_args<'a>( cx: &Context<'a, '_>, - unit: &Unit<'a>, + unit: &Unit, unstable_opts: &mut bool, ) -> CargoResult> { let mut result = Vec::new(); let deps = cx.unit_deps(unit); // Closure to add one dependency to `result`. - let mut link_to = |dep: &UnitDep<'a>, - extern_crate_name: InternedString, - noprelude: bool| - -> CargoResult<()> { - let mut value = OsString::new(); - let mut opts = Vec::new(); - if unit - .pkg - .manifest() - .features() - .require(Feature::public_dependency()) - .is_ok() - && !dep.public - { - opts.push("priv"); - *unstable_opts = true; - } - if noprelude { - opts.push("noprelude"); - *unstable_opts = true; - } - if !opts.is_empty() { - value.push(opts.join(",")); - value.push(":"); - } - value.push(extern_crate_name.as_str()); - value.push("="); - - let mut pass = |file| { - let mut value = value.clone(); - value.push(file); - result.push(OsString::from("--extern")); - result.push(value); - }; - - let outputs = cx.outputs(&dep.unit)?; - let mut outputs = outputs.iter().filter_map(|output| match output.flavor { - FileFlavor::Linkable { rmeta } => Some((output, rmeta)), - _ => None, - }); + let mut link_to = + |dep: &UnitDep, extern_crate_name: InternedString, noprelude: bool| -> CargoResult<()> { + let mut value = OsString::new(); + let mut opts = Vec::new(); + if unit + .pkg + .manifest() + .features() + .require(Feature::public_dependency()) + .is_ok() + && !dep.public + { + opts.push("priv"); + *unstable_opts = true; + } + if noprelude { + opts.push("noprelude"); + *unstable_opts = true; + } + if !opts.is_empty() { + value.push(opts.join(",")); + value.push(":"); + } + value.push(extern_crate_name.as_str()); + value.push("="); + + let mut pass = |file| { + let mut value = value.clone(); + value.push(file); + result.push(OsString::from("--extern")); + result.push(value); + }; - if cx.only_requires_rmeta(unit, &dep.unit) { - let (output, _rmeta) = outputs - .find(|(_output, rmeta)| *rmeta) - .expect("failed to find rlib dep for pipelined dep"); - pass(&output.path); - } else { - for (output, rmeta) in outputs { - if !rmeta { - pass(&output.path); + let outputs = cx.outputs(&dep.unit)?; + let mut outputs = outputs.iter().filter_map(|output| match output.flavor { + FileFlavor::Linkable { rmeta } => Some((output, rmeta)), + _ => None, + }); + + if cx.only_requires_rmeta(unit, &dep.unit) { + let (output, _rmeta) = outputs + .find(|(_output, rmeta)| *rmeta) + .expect("failed to find rlib dep for pipelined dep"); + pass(&output.path); + } else { + for (output, rmeta) in outputs { + if !rmeta { + pass(&output.path); + } } } - } - Ok(()) - }; + Ok(()) + }; for dep in deps { if dep.unit.target.linkable() && !dep.unit.mode.is_doc() { @@ -1111,7 +1110,7 @@ struct OutputOptions { } impl OutputOptions { - fn new<'a>(cx: &Context<'a, '_>, unit: &Unit<'a>) -> OutputOptions { + fn new<'a>(cx: &Context<'a, '_>, unit: &Unit) -> OutputOptions { let look_for_metadata_directive = cx.rmeta_required(unit); let color = cx.bcx.config.shell().supports_color(); let path = cx.files().message_cache_path(unit); diff --git a/src/cargo/core/compiler/output_depinfo.rs b/src/cargo/core/compiler/output_depinfo.rs index 44375e7a992..e8d130ac97c 100644 --- a/src/cargo/core/compiler/output_depinfo.rs +++ b/src/cargo/core/compiler/output_depinfo.rs @@ -26,6 +26,7 @@ use std::collections::{BTreeSet, HashSet}; use std::fs::File; use std::io::{BufWriter, Write}; use std::path::{Path, PathBuf}; +use std::rc::Rc; use log::debug; @@ -48,13 +49,13 @@ fn render_filename>(path: P, basedir: Option<&str>) -> CargoResul .map(|f| f.replace(" ", "\\ ")) } -fn add_deps_for_unit<'a, 'b>( +fn add_deps_for_unit<'a, 'cfg>( deps: &mut BTreeSet, - cx: &mut Context<'a, 'b>, - unit: &Unit<'a>, - visited: &mut HashSet>, + cx: &mut Context<'a, 'cfg>, + unit: &Rc, + visited: &mut HashSet>, ) -> CargoResult<()> { - if !visited.insert(*unit) { + if !visited.insert(Rc::clone(unit)) { return Ok(()); } @@ -80,7 +81,7 @@ fn add_deps_for_unit<'a, 'b>( } // Add rerun-if-changed dependencies - if let Some(metadata) = cx.find_build_script_metadata(*unit) { + if let Some(metadata) = cx.find_build_script_metadata(unit) { if let Some(output) = cx .build_script_outputs .lock() @@ -107,7 +108,7 @@ fn add_deps_for_unit<'a, 'b>( /// Save a `.d` dep-info file for the given unit. /// /// This only saves files for uplifted artifacts. -pub fn output_depinfo<'a, 'b>(cx: &mut Context<'a, 'b>, unit: &Unit<'a>) -> CargoResult<()> { +pub fn output_depinfo<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Rc) -> CargoResult<()> { let bcx = cx.bcx; let mut deps = BTreeSet::new(); let mut visited = HashSet::new(); diff --git a/src/cargo/core/compiler/standard_lib.rs b/src/cargo/core/compiler/standard_lib.rs index 7acfcff9a1b..c9883967bd4 100644 --- a/src/cargo/core/compiler/standard_lib.rs +++ b/src/cargo/core/compiler/standard_lib.rs @@ -10,6 +10,7 @@ use crate::util::errors::CargoResult; use std::collections::{HashMap, HashSet}; use std::env; use std::path::PathBuf; +use std::rc::Rc; /// Parse the `-Zbuild-std` flag. pub fn parse_unstable_flag(value: Option<&str>) -> Vec { @@ -126,7 +127,7 @@ pub fn generate_std_roots<'a>( std_resolve: &'a Resolve, std_features: &ResolvedFeatures, kind: CompileKind, -) -> CargoResult>> { +) -> CargoResult>> { // Generate the root Units for the standard library. let std_ids = crates .iter() @@ -156,9 +157,16 @@ pub fn generate_std_roots<'a>( ); let features = std_features.activated_features(pkg.package_id(), FeaturesFor::NormalOrDev); - Ok(bcx.units.intern( - pkg, lib, profile, kind, mode, features, /*is_std*/ true, - )) + let unit = Unit { + pkg: Rc::clone(pkg), + target: Rc::clone(lib), + profile, + kind, + mode, + features, + is_std: true, + }; + Ok(Rc::new(unit)) }) .collect::>>() } diff --git a/src/cargo/core/compiler/timings.rs b/src/cargo/core/compiler/timings.rs index 0983ad012ea..b7637c0bc6e 100644 --- a/src/cargo/core/compiler/timings.rs +++ b/src/cargo/core/compiler/timings.rs @@ -12,9 +12,10 @@ use crate::util::{paths, CargoResult, Config}; use std::collections::HashMap; use std::fs::File; use std::io::{BufWriter, Write}; +use std::rc::Rc; use std::time::{Duration, Instant, SystemTime}; -pub struct Timings<'a, 'cfg> { +pub struct Timings<'cfg> { config: &'cfg Config, /// Whether or not timings should be captured. enabled: bool, @@ -39,10 +40,10 @@ pub struct Timings<'a, 'cfg> { /// Total number of dirty units. total_dirty: u32, /// Time tracking for each individual unit. - unit_times: Vec>, + unit_times: Vec, /// Units that are in the process of being built. /// When they finished, they are moved to `unit_times`. - active: HashMap>, + active: HashMap, /// Concurrency-tracking information. This is periodically updated while /// compilation progresses. concurrency: Vec, @@ -56,8 +57,8 @@ pub struct Timings<'a, 'cfg> { } /// Tracking information for an individual unit. -struct UnitTime<'a> { - unit: Unit<'a>, +struct UnitTime { + unit: Rc, /// A string describing the cargo target. target: String, /// The time when this unit started as an offset in seconds from `Timings::start`. @@ -68,9 +69,9 @@ struct UnitTime<'a> { /// from `start`. rmeta_time: Option, /// Reverse deps that are freed to run after this unit finished. - unlocked_units: Vec>, + unlocked_units: Vec>, /// Same as `unlocked_units`, but unlocked by rmeta. - unlocked_rmeta_units: Vec>, + unlocked_rmeta_units: Vec>, } /// Periodic concurrency tracking information. @@ -91,8 +92,8 @@ struct Concurrency { rustc_parallelism: usize, } -impl<'a, 'cfg> Timings<'a, 'cfg> { - pub fn new(bcx: &BuildContext<'a, 'cfg>, root_units: &[Unit<'_>]) -> Timings<'a, 'cfg> { +impl<'a, 'cfg> Timings<'cfg> { + pub fn new(bcx: &BuildContext<'a, 'cfg>, root_units: &[Rc]) -> Timings<'cfg> { let has_report = |what| { bcx.config .cli_unstable() @@ -145,7 +146,7 @@ impl<'a, 'cfg> Timings<'a, 'cfg> { } /// Mark that a unit has started running. - pub fn unit_start(&mut self, id: JobId, unit: Unit<'a>) { + pub fn unit_start(&mut self, id: JobId, unit: Rc) { if !self.enabled { return; } @@ -179,7 +180,7 @@ impl<'a, 'cfg> Timings<'a, 'cfg> { } /// Mark that the `.rmeta` file as generated. - pub fn unit_rmeta_finished(&mut self, id: JobId, unlocked: Vec<&Unit<'a>>) { + pub fn unit_rmeta_finished(&mut self, id: JobId, unlocked: Vec>) { if !self.enabled { return; } @@ -197,7 +198,7 @@ impl<'a, 'cfg> Timings<'a, 'cfg> { } /// Mark that a unit has finished running. - pub fn unit_finished(&mut self, id: JobId, unlocked: Vec<&Unit<'a>>) { + pub fn unit_finished(&mut self, id: JobId, unlocked: Vec>) { if !self.enabled { return; } @@ -225,7 +226,7 @@ impl<'a, 'cfg> Timings<'a, 'cfg> { if self.report_json { let msg = machine_message::TimingInfo { package_id: unit_time.unit.pkg.package_id(), - target: unit_time.unit.target, + target: &unit_time.unit.target, mode: unit_time.unit.mode, duration: unit_time.duration, rmeta_time: unit_time.rmeta_time, @@ -456,11 +457,11 @@ impl<'a, 'cfg> Timings<'a, 'cfg> { fn write_js_data(&self, f: &mut impl Write) -> CargoResult<()> { // Create a map to link indices of unlocked units. - let unit_map: HashMap, usize> = self + let unit_map: HashMap<&Unit, usize> = self .unit_times .iter() .enumerate() - .map(|(i, ut)| (ut.unit, i)) + .map(|(i, ut)| (&*ut.unit, i)) .collect(); #[derive(serde::Serialize)] struct UnitData { @@ -494,12 +495,12 @@ impl<'a, 'cfg> Timings<'a, 'cfg> { let unlocked_units: Vec = ut .unlocked_units .iter() - .filter_map(|unit| unit_map.get(unit).copied()) + .filter_map(|unit| unit_map.get(&**unit).copied()) .collect(); let unlocked_rmeta_units: Vec = ut .unlocked_rmeta_units .iter() - .filter_map(|unit| unit_map.get(unit).copied()) + .filter_map(|unit| unit_map.get(&**unit).copied()) .collect(); UnitData { i, @@ -551,7 +552,7 @@ impl<'a, 'cfg> Timings<'a, 'cfg> { "# )?; - let mut units: Vec<&UnitTime<'_>> = self.unit_times.iter().collect(); + let mut units: Vec<&UnitTime> = self.unit_times.iter().collect(); units.sort_unstable_by(|a, b| b.duration.partial_cmp(&a.duration).unwrap()); for (i, unit) in units.iter().enumerate() { let codegen = match unit.codegen_time() { @@ -583,7 +584,7 @@ impl<'a, 'cfg> Timings<'a, 'cfg> { } } -impl<'a> UnitTime<'a> { +impl UnitTime { /// Returns the codegen time as (rmeta_time, codegen_time, percent of total) fn codegen_time(&self) -> Option<(f64, f64, f64)> { self.rmeta_time.map(|rmeta_time| { diff --git a/src/cargo/core/compiler/unit.rs b/src/cargo/core/compiler/unit.rs index dbd66edf8a9..f26aeabe604 100644 --- a/src/cargo/core/compiler/unit.rs +++ b/src/cargo/core/compiler/unit.rs @@ -1,11 +1,8 @@ use crate::core::compiler::{CompileKind, CompileMode}; use crate::core::{profiles::Profile, InternedString, Package, Target}; use crate::util::hex::short_hash; -use std::cell::RefCell; -use std::collections::HashSet; -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::ops::Deref; +use std::hash::Hash; +use std::rc::Rc; /// All information needed to define a unit. /// @@ -21,21 +18,15 @@ use std::ops::Deref; /// example, it needs to know the target architecture (OS, chip arch etc.) and it needs to know /// whether you want a debug or release build. There is enough information in this struct to figure /// all that out. -#[derive(Clone, Copy, PartialOrd, Ord)] -pub struct Unit<'a> { - inner: &'a UnitInner<'a>, -} - -/// Internal fields of `Unit` which `Unit` will dereference to. -#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct UnitInner<'a> { +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Unit { /// Information about available targets, which files to include/exclude, etc. Basically stuff in /// `Cargo.toml`. - pub pkg: &'a Package, + pub pkg: Rc, /// Information about the specific target to build, out of the possible targets in `pkg`. Not /// to be confused with *target-triple* (or *target architecture* ...), the target arch for a /// build. - pub target: &'a Target, + pub target: Rc, /// The profile contains information about *how* the build should be run, including debug /// level, etc. pub profile: Profile, @@ -55,7 +46,7 @@ pub struct UnitInner<'a> { pub is_std: bool, } -impl UnitInner<'_> { +impl Unit { /// Returns whether compilation of this unit requires all upstream artifacts /// to be available. /// @@ -65,129 +56,8 @@ impl UnitInner<'_> { pub fn requires_upstream_objects(&self) -> bool { self.mode.is_any_test() || self.target.kind().requires_upstream_objects() } -} -impl<'a> Unit<'a> { pub fn buildkey(&self) -> String { format!("{}-{}", self.pkg.name(), short_hash(self)) } } - -// Just hash the pointer for fast hashing -impl<'a> Hash for Unit<'a> { - fn hash(&self, hasher: &mut H) { - (self.inner as *const UnitInner<'a>).hash(hasher) - } -} - -// Just equate the pointer since these are interned -impl<'a> PartialEq for Unit<'a> { - fn eq(&self, other: &Unit<'a>) -> bool { - self.inner as *const UnitInner<'a> == other.inner as *const UnitInner<'a> - } -} - -impl<'a> Eq for Unit<'a> {} - -impl<'a> Deref for Unit<'a> { - type Target = UnitInner<'a>; - - fn deref(&self) -> &UnitInner<'a> { - self.inner - } -} - -impl<'a> fmt::Debug for Unit<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Unit") - .field("pkg", &self.pkg) - .field("target", &self.target) - .field("profile", &self.profile) - .field("kind", &self.kind) - .field("mode", &self.mode) - .field("features", &self.features) - .finish() - } -} - -/// A small structure used to "intern" `Unit` values. -/// -/// A `Unit` is just a thin pointer to an internal `UnitInner`. This is done to -/// ensure that `Unit` itself is quite small as well as enabling a very -/// efficient hash/equality implementation for `Unit`. All units are -/// manufactured through an interner which guarantees that each equivalent value -/// is only produced once. -pub struct UnitInterner<'a> { - state: RefCell>, -} - -struct InternerState<'a> { - cache: HashSet>>, -} - -impl<'a> UnitInterner<'a> { - /// Creates a new blank interner - pub fn new() -> UnitInterner<'a> { - UnitInterner { - state: RefCell::new(InternerState { - cache: HashSet::new(), - }), - } - } - - /// Creates a new `unit` from its components. The returned `Unit`'s fields - /// will all be equivalent to the provided arguments, although they may not - /// be the exact same instance. - pub fn intern( - &'a self, - pkg: &'a Package, - target: &'a Target, - profile: Profile, - kind: CompileKind, - mode: CompileMode, - features: Vec, - is_std: bool, - ) -> Unit<'a> { - let inner = self.intern_inner(&UnitInner { - pkg, - target, - profile, - kind, - mode, - features, - is_std, - }); - Unit { inner } - } - - // Ok so interning here is a little unsafe, hence the usage of `unsafe` - // internally. The primary issue here is that we've got an internal cache of - // `UnitInner` instances added so far, but we may need to mutate it to add - // it, and the mutation for an interner happens behind a shared borrow. - // - // Our goal though is to escape the lifetime `borrow_mut` to the same - // lifetime as the borrowed passed into this function. That's where `unsafe` - // comes into play. What we're subverting here is resizing internally in the - // `HashSet` as well as overwriting previous keys in the `HashSet`. - // - // As a result we store `Box` internally to have an extra layer - // of indirection. That way `*const UnitInner` is a stable address that - // doesn't change with `HashSet` resizing. Furthermore we're careful to - // never overwrite an entry once inserted. - // - // Ideally we'd use an off-the-shelf interner from crates.io which avoids a - // small amount of unsafety here, but at the time this was written one - // wasn't obviously available. - fn intern_inner(&'a self, item: &UnitInner<'a>) -> &'a UnitInner<'a> { - let mut me = self.state.borrow_mut(); - if let Some(item) = me.cache.get(item) { - // note that `item` has type `&Box`. Use `&**` to - // convert that to `&UnitInner<'a>`, then do some trickery to extend - // the lifetime to the `'a` on the function here. - return unsafe { &*(&**item as *const UnitInner<'a>) }; - } - me.cache.insert(Box::new(item.clone())); - let item = me.cache.get(item).unwrap(); - unsafe { &*(&**item as *const UnitInner<'a>) } - } -} diff --git a/src/cargo/core/compiler/unit_dependencies.rs b/src/cargo/core/compiler/unit_dependencies.rs index 48ca740d4ef..e18dc6506e0 100644 --- a/src/cargo/core/compiler/unit_dependencies.rs +++ b/src/cargo/core/compiler/unit_dependencies.rs @@ -27,14 +27,15 @@ use crate::core::{InternedString, Package, PackageId, Target}; use crate::CargoResult; use log::trace; use std::collections::{HashMap, HashSet}; +use std::rc::Rc; /// Collection of stuff used while creating the `UnitGraph`. struct State<'a, 'cfg> { bcx: &'a BuildContext<'a, 'cfg>, waiting_on_download: HashSet, downloads: Downloads<'a, 'cfg>, - unit_dependencies: UnitGraph<'a>, - package_cache: HashMap, + unit_dependencies: UnitGraph, + package_cache: HashMap>, usr_resolve: &'a Resolve, usr_features: &'a ResolvedFeatures, std_resolve: Option<&'a Resolve>, @@ -49,9 +50,9 @@ pub fn build_unit_dependencies<'a, 'cfg>( resolve: &'a Resolve, features: &'a ResolvedFeatures, std_resolve: Option<&'a (Resolve, ResolvedFeatures)>, - roots: &[Unit<'a>], - std_roots: &[Unit<'a>], -) -> CargoResult> { + roots: &[Rc], + std_roots: &[Rc], +) -> CargoResult { let (std_resolve, std_features) = match std_resolve { Some((r, f)) => (Some(r), Some(f)), None => (None, None), @@ -96,8 +97,8 @@ pub fn build_unit_dependencies<'a, 'cfg>( /// Compute all the dependencies for the standard library. fn calc_deps_of_std<'a, 'cfg>( mut state: &mut State<'a, 'cfg>, - std_roots: &[Unit<'a>], -) -> CargoResult>> { + std_roots: &[Rc], +) -> CargoResult> { if std_roots.is_empty() { return Ok(None); } @@ -114,14 +115,14 @@ fn calc_deps_of_std<'a, 'cfg>( /// Add the standard library units to the `unit_dependencies`. fn attach_std_deps<'a, 'cfg>( state: &mut State<'a, 'cfg>, - std_roots: &[Unit<'a>], - std_unit_deps: UnitGraph<'a>, + std_roots: &[Rc], + std_unit_deps: UnitGraph, ) { // Attach the standard library as a dependency of every target unit. for (unit, deps) in state.unit_dependencies.iter_mut() { if !unit.kind.is_host() && !unit.mode.is_run_custom_build() { deps.extend(std_roots.iter().map(|unit| UnitDep { - unit: *unit, + unit: Rc::clone(unit), unit_for: UnitFor::new_normal(), extern_crate_name: unit.pkg.name(), // TODO: Does this `public` make sense? @@ -140,7 +141,7 @@ fn attach_std_deps<'a, 'cfg>( /// Compute all the dependencies of the given root units. /// The result is stored in state.unit_dependencies. -fn deps_of_roots<'a, 'cfg>(roots: &[Unit<'a>], mut state: &mut State<'a, 'cfg>) -> CargoResult<()> { +fn deps_of_roots<'a, 'cfg>(roots: &[Rc], mut state: &mut State<'a, 'cfg>) -> CargoResult<()> { // Loop because we are downloading while building the dependency graph. // The partially-built unit graph is discarded through each pass of the // loop because it is incomplete because not all required Packages have @@ -184,7 +185,7 @@ fn deps_of_roots<'a, 'cfg>(roots: &[Unit<'a>], mut state: &mut State<'a, 'cfg>) /// Compute the dependencies of a single unit. fn deps_of<'a, 'cfg>( - unit: &Unit<'a>, + unit: &Rc, state: &mut State<'a, 'cfg>, unit_for: UnitFor, ) -> CargoResult<()> { @@ -196,7 +197,9 @@ fn deps_of<'a, 'cfg>( // affect anything else in the hierarchy. if !state.unit_dependencies.contains_key(unit) { let unit_deps = compute_deps(unit, state, unit_for)?; - state.unit_dependencies.insert(*unit, unit_deps.clone()); + state + .unit_dependencies + .insert(Rc::clone(unit), unit_deps.clone()); for unit_dep in unit_deps { deps_of(&unit_dep.unit, state, unit_dep.unit_for)?; } @@ -209,10 +212,10 @@ fn deps_of<'a, 'cfg>( /// This returns a `Vec` of `(Unit, UnitFor)` pairs. The `UnitFor` /// is the profile type that should be used for dependencies of the unit. fn compute_deps<'a, 'cfg>( - unit: &Unit<'a>, + unit: &Unit, state: &mut State<'a, 'cfg>, unit_for: UnitFor, -) -> CargoResult>> { +) -> CargoResult> { if unit.mode.is_run_custom_build() { return compute_deps_custom_build(unit, unit_for, state); } else if unit.mode.is_doc() { @@ -343,14 +346,14 @@ fn compute_deps<'a, 'cfg>( new_unit_dep( state, unit, - unit.pkg, + &unit.pkg, t, UnitFor::new_normal(), unit.kind.for_target(t), CompileMode::Build, ) }) - .collect::>>>()?, + .collect::>>()?, ); } @@ -362,10 +365,10 @@ fn compute_deps<'a, 'cfg>( /// The `unit` provided must represent an execution of a build script, and /// the returned set of units must all be run before `unit` is run. fn compute_deps_custom_build<'a, 'cfg>( - unit: &Unit<'a>, + unit: &Unit, unit_for: UnitFor, state: &mut State<'a, 'cfg>, -) -> CargoResult>> { +) -> CargoResult> { if let Some(links) = unit.pkg.manifest().links() { if state.bcx.script_override(links, unit.kind).is_some() { // Overridden build scripts don't have any dependencies. @@ -388,8 +391,8 @@ fn compute_deps_custom_build<'a, 'cfg>( let unit_dep = new_unit_dep( state, unit, - unit.pkg, - unit.target, + &unit.pkg, + &unit.target, script_unit_for, // Build scripts always compiled for the host. CompileKind::Host, @@ -400,9 +403,9 @@ fn compute_deps_custom_build<'a, 'cfg>( /// Returns the dependencies necessary to document a package. fn compute_deps_doc<'a, 'cfg>( - unit: &Unit<'a>, + unit: &Unit, state: &mut State<'a, 'cfg>, -) -> CargoResult>> { +) -> CargoResult> { let bcx = state.bcx; let deps = state .resolve() @@ -469,10 +472,10 @@ fn compute_deps_doc<'a, 'cfg>( } fn maybe_lib<'a>( - unit: &Unit<'a>, + unit: &Unit, state: &mut State<'a, '_>, unit_for: UnitFor, -) -> CargoResult>> { +) -> CargoResult> { unit.pkg .targets() .iter() @@ -482,7 +485,7 @@ fn maybe_lib<'a>( new_unit_dep( state, unit, - unit.pkg, + &unit.pkg, t, unit_for, unit.kind.for_target(t), @@ -500,10 +503,10 @@ fn maybe_lib<'a>( /// of work is still returned. `None` is only returned if the package has no /// build script. fn dep_build_script<'a>( - unit: &Unit<'a>, + unit: &Unit, unit_for: UnitFor, state: &State<'a, '_>, -) -> CargoResult>> { +) -> CargoResult> { unit.pkg .targets() .iter() @@ -544,7 +547,7 @@ fn dep_build_script<'a>( new_unit_dep_with_profile( state, unit, - unit.pkg, + &unit.pkg, t, script_unit_for, unit.kind, @@ -576,13 +579,13 @@ fn check_or_build_mode(mode: CompileMode, target: &Target) -> CompileMode { /// Create a new Unit for a dependency from `parent` to `pkg` and `target`. fn new_unit_dep<'a>( state: &State<'a, '_>, - parent: &Unit<'a>, - pkg: &'a Package, - target: &'a Target, + parent: &Unit, + pkg: &Rc, + target: &Rc, unit_for: UnitFor, kind: CompileKind, mode: CompileMode, -) -> CargoResult> { +) -> CargoResult { let profile = state.bcx.profiles.get_profile( pkg.package_id(), state.bcx.ws.is_member(pkg), @@ -594,14 +597,14 @@ fn new_unit_dep<'a>( fn new_unit_dep_with_profile<'a>( state: &State<'a, '_>, - parent: &Unit<'a>, - pkg: &'a Package, - target: &'a Target, + parent: &Unit, + pkg: &Rc, + target: &Rc, unit_for: UnitFor, kind: CompileKind, mode: CompileMode, profile: Profile, -) -> CargoResult> { +) -> CargoResult { // TODO: consider making extern_crate_name return InternedString? let extern_crate_name = InternedString::new(&state.resolve().extern_crate_name( parent.pkg.package_id(), @@ -613,10 +616,15 @@ fn new_unit_dep_with_profile<'a>( .is_public_dep(parent.pkg.package_id(), pkg.package_id()); let features_for = unit_for.map_to_features_for(); let features = state.activated_features(pkg.package_id(), features_for); - let unit = state - .bcx - .units - .intern(pkg, target, profile, kind, mode, features, state.is_std); + let unit = Rc::new(Unit { + pkg: Rc::clone(pkg), + target: Rc::clone(target), + profile, + kind, + mode, + features, + is_std: state.is_std, + }); Ok(UnitDep { unit, unit_for, @@ -636,7 +644,7 @@ fn new_unit_dep_with_profile<'a>( /// /// Here we take the entire `deps` map and add more dependencies from execution /// of one build script to execution of another build script. -fn connect_run_custom_build_deps(unit_dependencies: &mut UnitGraph<'_>) { +fn connect_run_custom_build_deps(unit_dependencies: &mut UnitGraph) { let mut new_deps = Vec::new(); { @@ -650,7 +658,7 @@ fn connect_run_custom_build_deps(unit_dependencies: &mut UnitGraph<'_>) { for dep in deps { if dep.unit.mode == CompileMode::RunCustomBuild { reverse_deps_map - .entry(dep.unit) + .entry(Rc::clone(&dep.unit)) .or_insert_with(HashSet::new) .insert(unit); } @@ -679,7 +687,7 @@ fn connect_run_custom_build_deps(unit_dependencies: &mut UnitGraph<'_>) { let to_add = reverse_deps .iter() // Get all deps for lib. - .flat_map(|reverse_dep| unit_dependencies[reverse_dep].iter()) + .flat_map(|reverse_dep| unit_dependencies[*reverse_dep].iter()) // Only deps with `links`. .filter(|other| { other.unit.pkg != unit.pkg @@ -697,7 +705,7 @@ fn connect_run_custom_build_deps(unit_dependencies: &mut UnitGraph<'_>) { if !to_add.is_empty() { // (RunCustomBuild, set(other RunCustomBuild)) - new_deps.push((*unit, to_add)); + new_deps.push((Rc::clone(unit), to_add)); } } } @@ -730,7 +738,7 @@ impl<'a, 'cfg> State<'a, 'cfg> { features.activated_features(pkg_id, features_for) } - fn get(&mut self, id: PackageId) -> CargoResult> { + fn get(&mut self, id: PackageId) -> CargoResult>> { if let Some(pkg) = self.package_cache.get(&id) { return Ok(Some(pkg)); } diff --git a/src/cargo/core/compiler/unit_graph.rs b/src/cargo/core/compiler/unit_graph.rs index 6119c439e5a..70c0a48c7e9 100644 --- a/src/cargo/core/compiler/unit_graph.rs +++ b/src/cargo/core/compiler/unit_graph.rs @@ -5,15 +5,16 @@ use crate::core::{nightly_features_allowed, InternedString, PackageId, Target}; use crate::util::CargoResult; use std::collections::HashMap; use std::io::Write; +use std::rc::Rc; /// The dependency graph of Units. -pub type UnitGraph<'a> = HashMap, Vec>>; +pub type UnitGraph = HashMap, Vec>; /// A unit dependency. #[derive(Debug, Clone, Hash, Eq, PartialEq, PartialOrd, Ord)] -pub struct UnitDep<'a> { +pub struct UnitDep { /// The dependency unit. - pub unit: Unit<'a>, + pub unit: Rc, /// The purpose of this dependency (a dependency for a test, or a build /// script, etc.). pub unit_for: UnitFor, @@ -62,14 +63,14 @@ struct SerializedUnitDep { } pub fn emit_serialized_unit_graph( - root_units: &[Unit<'_>], - unit_graph: &UnitGraph<'_>, + root_units: &[Rc], + unit_graph: &UnitGraph, ) -> CargoResult<()> { let is_nightly = nightly_features_allowed(); - let mut units: Vec<(&Unit<'_>, &Vec>)> = unit_graph.iter().collect(); + let mut units: Vec<(&Rc, &Vec)> = unit_graph.iter().collect(); units.sort_unstable(); // Create a map for quick lookup for dependencies. - let indices: HashMap<&Unit<'_>, usize> = units + let indices: HashMap<&Rc, usize> = units .iter() .enumerate() .map(|(i, val)| (val.0, i)) @@ -97,7 +98,7 @@ pub fn emit_serialized_unit_graph( .collect(); SerializedUnit { pkg_id: unit.pkg.package_id(), - target: unit.target, + target: &unit.target, profile: &unit.profile, platform: unit.kind, mode: unit.mode, diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index 27f5b62fe3a..09f31ef48a3 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -25,7 +25,7 @@ pub enum EitherManifest { #[derive(Clone, Debug)] pub struct Manifest { summary: Summary, - targets: Vec, + targets: Vec>, links: Option, warnings: Warnings, exclude: Vec, @@ -392,7 +392,7 @@ compact_debug! { impl Manifest { pub fn new( summary: Summary, - targets: Vec, + targets: Vec>, exclude: Vec, include: Vec, links: Option, @@ -459,10 +459,10 @@ impl Manifest { pub fn summary_mut(&mut self) -> &mut Summary { &mut self.summary } - pub fn targets(&self) -> &[Target] { + pub fn targets(&self) -> &[Rc] { &self.targets } - pub fn targets_mut(&mut self) -> &mut [Target] { + pub fn targets_mut(&mut self) -> &mut [Rc] { &mut self.targets } pub fn version(&self) -> &Version { diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 77910c979b7..ed28a45f760 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -5,6 +5,7 @@ use std::fmt; use std::hash; use std::mem; use std::path::{Path, PathBuf}; +use std::rc::Rc; use std::time::{Duration, Instant}; use anyhow::Context; @@ -103,6 +104,7 @@ impl ser::Serialize for Package { .targets() .iter() .filter(|t| t.src_path().is_path()) + .map(|t| &**t) // consider making &Rc to remove this .collect(); SerializedPackage { @@ -174,7 +176,7 @@ impl Package { self.manifest.summary() } /// Gets the targets specified in the manifest. - pub fn targets(&self) -> &[Target] { + pub fn targets(&self) -> &[Rc] { self.manifest.targets() } /// Gets the current package version. @@ -267,7 +269,7 @@ impl hash::Hash for Package { /// This is primarily used to convert a set of `PackageId`s to `Package`s. It /// will download as needed, or used the cached download if available. pub struct PackageSet<'cfg> { - packages: HashMap>, + packages: HashMap>>, sources: RefCell>, config: &'cfg Config, multi: Multi, @@ -431,11 +433,14 @@ impl<'cfg> PackageSet<'cfg> { }) } - pub fn get_one(&self, id: PackageId) -> CargoResult<&Package> { + pub fn get_one(&self, id: PackageId) -> CargoResult<&Rc> { Ok(self.get_many(Some(id))?.remove(0)) } - pub fn get_many(&self, ids: impl IntoIterator) -> CargoResult> { + pub fn get_many( + &self, + ids: impl IntoIterator, + ) -> CargoResult>> { let mut pkgs = Vec::new(); let mut downloads = self.enable_download()?; for id in ids { @@ -469,9 +474,9 @@ impl<'cfg> PackageSet<'cfg> { } /// Get mutable access to an already downloaded package, if it's already - /// downoaded and it's part of this set. Does not actually attempt to + /// downloaded and it's part of this set. Does not actually attempt to /// download anything if it's not already downloaded. - pub fn lookup_mut(&mut self, id: PackageId) -> Option<&mut Package> { + pub fn lookup_mut(&mut self, id: PackageId) -> Option<&mut Rc> { self.packages .get_mut(&id) .and_then(|cell| cell.borrow_mut()) @@ -501,13 +506,13 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { /// Returns `None` if the package is queued up for download and will /// eventually be returned from `wait_for_download`. Returns `Some(pkg)` if /// the package is ready and doesn't need to be downloaded. - pub fn start(&mut self, id: PackageId) -> CargoResult> { + pub fn start(&mut self, id: PackageId) -> CargoResult>> { Ok(self .start_inner(id) .chain_err(|| format!("failed to download `{}`", id))?) } - fn start_inner(&mut self, id: PackageId) -> CargoResult> { + fn start_inner(&mut self, id: PackageId) -> CargoResult>> { // First up see if we've already cached this package, in which case // there's nothing to do. let slot = self @@ -532,7 +537,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { let (url, descriptor) = match pkg { MaybePackage::Ready(pkg) => { debug!("{} doesn't need a download", id); - assert!(slot.fill(pkg).is_ok()); + assert!(slot.fill(Rc::new(pkg)).is_ok()); return Ok(Some(slot.borrow().unwrap())); } MaybePackage::Download { url, descriptor } => (url, descriptor), @@ -645,7 +650,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { /// # Panics /// /// This function will panic if there are no remaining downloads. - pub fn wait(&mut self) -> CargoResult<&'a Package> { + pub fn wait(&mut self) -> CargoResult<&'a Rc> { let (dl, data) = loop { assert_eq!(self.pending.len(), self.pending_ids.len()); let (token, result) = self.wait_for_curl()?; @@ -758,7 +763,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { .set(self.next_speed_check.get() + finish_dur); let slot = &self.set.packages[&dl.id]; - assert!(slot.fill(pkg).is_ok()); + assert!(slot.fill(Rc::new(pkg)).is_ok()); Ok(slot.borrow().unwrap()) } diff --git a/src/cargo/core/source/mod.rs b/src/cargo/core/source/mod.rs index c06ff1f5e45..356f0197208 100644 --- a/src/cargo/core/source/mod.rs +++ b/src/cargo/core/source/mod.rs @@ -55,8 +55,10 @@ pub trait Source { { let mut sources = SourceMap::new(); sources.insert(self); + // TODO: add something to PackageSet to get owned value to avoid the clone here (who uses this method?) let pkg_set = PackageSet::new(&[package], sources, config)?; - Ok(pkg_set.get_one(package)?.clone()) + let pkg = pkg_set.get_one(package)?; + Ok((**pkg).clone()) } fn finish_download(&mut self, package: PackageId, contents: Vec) -> CargoResult; diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index 1e13d5a2f0d..33af81d6667 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -2,10 +2,11 @@ use crate::core::InternedString; use std::collections::HashMap; use std::fs; use std::path::Path; +use std::rc::Rc; use crate::core::compiler::unit_dependencies; use crate::core::compiler::{BuildConfig, BuildContext, CompileKind, CompileMode, Context}; -use crate::core::compiler::{RustcTargetData, UnitInterner}; +use crate::core::compiler::{RustcTargetData, Unit}; use crate::core::profiles::{Profiles, UnitFor}; use crate::core::resolver::features::{FeatureResolver, HasDevUnits, RequestedFeatures}; use crate::core::{PackageIdSpec, Workspace}; @@ -59,7 +60,6 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { } let (packages, resolve) = ops::resolve_ws(ws)?; - let interner = UnitInterner::new(); let mut build_config = BuildConfig::new(config, Some(1), &opts.target, CompileMode::Build)?; build_config.requested_profile = opts.requested_profile; let target_data = RustcTargetData::new(ws, build_config.requested_kind)?; @@ -69,7 +69,6 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { opts.config, &build_config, profiles, - &interner, HashMap::new(), target_data, )?; @@ -121,9 +120,16 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { let features_for = unit_for.map_to_features_for(); let features = features.activated_features_unverified(pkg.package_id(), features_for); - units.push(bcx.units.intern( - pkg, target, profile, *kind, *mode, features, /*is_std*/ false, - )); + let unit = Unit { + pkg: Rc::clone(pkg), + target: Rc::clone(target), + profile, + kind: *kind, + mode: *mode, + features, + is_std: false, + }; + units.push(Rc::new(unit)); } } } diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index e698f3a3a25..df423bb7771 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -26,6 +26,7 @@ use std::collections::{BTreeSet, HashMap, HashSet}; use std::iter::FromIterator; use std::path::PathBuf; +use std::rc::Rc; use std::sync::Arc; use crate::core::compiler::standard_lib; @@ -33,11 +34,11 @@ use crate::core::compiler::unit_dependencies::build_unit_dependencies; use crate::core::compiler::unit_graph; use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; use crate::core::compiler::{CompileKind, CompileMode, RustcTargetData, Unit}; -use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; +use crate::core::compiler::{DefaultExecutor, Executor}; use crate::core::profiles::{Profiles, UnitFor}; use crate::core::resolver::features::{self, FeaturesFor}; use crate::core::resolver::{HasDevUnits, Resolve, ResolveOpts}; -use crate::core::{LibKind, Package, PackageSet, Target}; +use crate::core::{Package, PackageSet, Target}; use crate::core::{PackageId, PackageIdSpec, TargetKind, Workspace}; use crate::ops; use crate::ops::resolve::WorkspaceResolve; @@ -405,14 +406,12 @@ pub fn compile_ws<'a>( workspace_resolve.as_ref().unwrap_or(&resolve), )?; - let interner = UnitInterner::new(); let mut bcx = BuildContext::new( ws, &pkg_set, config, build_config, profiles, - &interner, HashMap::new(), target_data, )?; @@ -461,7 +460,7 @@ pub fn compile_ws<'a>( extra_args_name ); } - bcx.extra_compiler_args.insert(units[0], args); + bcx.extra_compiler_args.insert(Rc::clone(&units[0]), args); } for unit in &units { if unit.mode.is_doc() || unit.mode.is_doc_test() { @@ -477,7 +476,8 @@ pub fn compile_ws<'a>( } if let Some(args) = extra_args { - bcx.extra_compiler_args.insert(*unit, args.clone()); + bcx.extra_compiler_args + .insert(Rc::clone(unit), args.clone()); } } } @@ -673,8 +673,8 @@ impl CompileFilter { /// not the target requires its features to be present. #[derive(Debug)] struct Proposal<'a> { - pkg: &'a Package, - target: &'a Target, + pkg: &'a Rc, + target: &'a Rc, /// Indicates whether or not all required features *must* be present. If /// false, and the features are not available, then it will be silently /// skipped. Generally, targets specified by name (`--bin foo`) are @@ -687,16 +687,16 @@ struct Proposal<'a> { /// compile. Dependencies for these targets are computed later in `unit_dependencies`. fn generate_targets<'a>( ws: &Workspace<'_>, - packages: &[&'a Package], + packages: &[&Rc], filter: &CompileFilter, default_arch_kind: CompileKind, resolve: &'a Resolve, resolved_features: &features::ResolvedFeatures, bcx: &BuildContext<'a, '_>, -) -> CargoResult>> { +) -> CargoResult>> { // Helper for creating a `Unit` struct. - let new_unit = |pkg: &'a Package, target: &'a Target, target_mode: CompileMode| { - let unit_for = if target_mode.is_any_test() { + let new_unit = |pkg: &Rc, target: &Rc, mode: CompileMode| { + let unit_for = if mode.is_any_test() { // NOTE: the `UnitFor` here is subtle. If you have a profile // with `panic` set, the `panic` flag is cleared for // tests/benchmarks and their dependencies. If this @@ -724,7 +724,7 @@ fn generate_targets<'a>( }; // Custom build units are added in `build_unit_dependencies`. assert!(!target.is_custom_build()); - let target_mode = match target_mode { + let mode = match mode { CompileMode::Test => { if target.is_example() && !filter.is_specific() && !target.tested() { // Examples are included as regular binaries to verify @@ -748,12 +748,12 @@ fn generate_targets<'a>( // and since these are the same, we want them to be de-duplicated in // `unit_dependencies`. CompileMode::Bench => CompileMode::Test, - _ => target_mode, + _ => mode, }; let kind = default_arch_kind.for_target(target); - let profile = - bcx.profiles - .get_profile(pkg.package_id(), ws.is_member(pkg), unit_for, target_mode); + let profile = bcx + .profiles + .get_profile(pkg.package_id(), ws.is_member(pkg), unit_for, mode); let features_for = if target.proc_macro() { FeaturesFor::HostDep @@ -763,15 +763,16 @@ fn generate_targets<'a>( }; let features = Vec::from(resolved_features.activated_features(pkg.package_id(), features_for)); - bcx.units.intern( - pkg, - target, + let unit = Unit { + pkg: Rc::clone(pkg), + target: Rc::clone(target), profile, kind, - target_mode, + mode, features, - /*is_std*/ false, - ) + is_std: false, + }; + Rc::new(unit) }; // Create a list of proposed targets. @@ -785,7 +786,7 @@ fn generate_targets<'a>( let default = filter_default_targets(pkg.targets(), bcx.build_config.mode); proposals.extend(default.into_iter().map(|target| Proposal { pkg, - target, + target: &target, requires_features: !required_features_filterable, mode: bcx.build_config.mode, })); @@ -919,7 +920,7 @@ fn generate_targets<'a>( None => Vec::new(), }; if target.is_lib() || unavailable_features.is_empty() { - let unit = new_unit(pkg, target, mode); + let unit = new_unit(pkg, &target, mode); units.insert(unit); } else if requires_features { let required_features = target.required_features().unwrap(); @@ -978,7 +979,7 @@ fn resolve_all_features( /// Given a list of all targets for a package, filters out only the targets /// that are automatically included when the user doesn't specify any targets. -fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> { +fn filter_default_targets(targets: &[Rc], mode: CompileMode) -> Vec<&Rc> { match mode { CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(), CompileMode::Test => targets @@ -1006,7 +1007,7 @@ fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> /// Returns a list of proposed targets based on command-line target selection flags. fn list_rule_targets<'a>( - packages: &[&'a Package], + packages: &[&'a Rc], rule: &FilterRule, target_desc: &'static str, is_expected_kind: fn(&Target) -> bool, @@ -1034,7 +1035,7 @@ fn list_rule_targets<'a>( /// Finds the targets for a specifically named target. fn find_named_targets<'a>( - packages: &[&'a Package], + packages: &[&'a Rc], target_name: &str, target_desc: &'static str, is_expected_kind: fn(&Target) -> bool, @@ -1060,7 +1061,7 @@ fn find_named_targets<'a>( } fn filter_targets<'a>( - packages: &[&'a Package], + packages: &[&'a Rc], predicate: impl Fn(&Target) -> bool, requires_features: bool, mode: CompileMode, @@ -1090,23 +1091,24 @@ fn filter_targets<'a>( /// `PackageSet` here to rewrite downloaded packages. We iterate over all `path` /// packages (which should download immediately and not actually cause blocking /// here) and edit their manifests to only list one `LibKind` for an `Rlib`. -fn remove_dylib_crate_type(set: &mut PackageSet<'_>) -> CargoResult<()> { - let ids = set - .package_ids() - .filter(|p| p.source_id().is_path()) - .collect::>(); - set.get_many(ids.iter().cloned())?; - - for id in ids { - let pkg = set.lookup_mut(id).expect("should be downloaded now"); - - for target in pkg.manifest_mut().targets_mut() { - if let TargetKind::Lib(crate_types) = target.kind_mut() { - crate_types.truncate(0); - crate_types.push(LibKind::Rlib); - } - } - } +fn remove_dylib_crate_type(_set: &mut PackageSet<'_>) -> CargoResult<()> { + // let ids = set + // .package_ids() + // .filter(|p| p.source_id().is_path()) + // .collect::>(); + // set.get_many(ids.iter().cloned())?; + + // for id in ids { + // let pkg = set.lookup_mut(id).expect("should be downloaded now"); + + // for target in pkg.manifest_mut().targets_mut() { + // let target = Rc::get_mut(target).expect("should only be one reference"); + // if let TargetKind::Lib(crate_types) = target.kind_mut() { + // crate_types.truncate(0); + // crate_types.push(LibKind::Rlib); + // } + // } + // } Ok(()) } diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs index 7892c2cc4ae..276ee74288c 100644 --- a/src/cargo/ops/cargo_output_metadata.rs +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -133,7 +133,7 @@ fn build_resolve_graph( .pkg_set .get_many(ws_resolve.pkg_set.package_ids())? .into_iter() - .map(|pkg| (pkg.package_id(), pkg.clone())) + .map(|pkg| (pkg.package_id(), (**pkg).clone())) // TODO: explain clone .collect(); // Start from the workspace roots, and recurse through filling out the diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index 13adf093fec..49bb2488007 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -1238,6 +1238,7 @@ impl TomlManifest { } let custom_metadata = project.metadata.clone(); + let targets = targets.into_iter().map(Rc::new).collect(); let mut manifest = Manifest::new( summary, targets, diff --git a/src/cargo/util/workspace.rs b/src/cargo/util/workspace.rs index b3c4966cf33..c3aa5b1086c 100644 --- a/src/cargo/util/workspace.rs +++ b/src/cargo/util/workspace.rs @@ -3,12 +3,13 @@ use crate::ops::CompileOptions; use crate::util::CargoResult; use anyhow::bail; use std::fmt::Write; +use std::rc::Rc; fn get_available_targets<'a>( filter_fn: fn(&Target) -> bool, ws: &'a Workspace<'_>, options: &'a CompileOptions<'_>, -) -> CargoResult> { +) -> CargoResult>> { let packages = options.spec.get_packages(ws)?; let mut targets: Vec<_> = packages