diff --git a/src/cargo/ops/cargo_rustc/context.rs b/src/cargo/ops/cargo_rustc/context.rs index 906a5155a..56bbe9470 100755 --- a/src/cargo/ops/cargo_rustc/context.rs +++ b/src/cargo/ops/cargo_rustc/context.rs @@ -40,6 +40,7 @@ pub struct Context<'a, 'cfg: 'a> { pub compilation: Compilation<'cfg>, pub packages: &'a PackageSet<'cfg>, pub build_state: Arc, + pub build_script_overridden: HashSet<(PackageId, Kind)>, pub build_explicit_deps: HashMap, BuildDeps>, pub fingerprints: HashMap, Arc>, pub compiled: HashSet>, @@ -55,7 +56,9 @@ pub struct Context<'a, 'cfg: 'a> { host_info: TargetInfo, profiles: &'a Profiles, incremental_enabled: bool, + target_filenames: HashMap, Arc, bool)>>>, + target_metadatas: HashMap, Option>, } #[derive(Clone, Default)] @@ -82,7 +85,7 @@ impl TargetInfo { } } -#[derive(Clone)] +#[derive(Clone, Hash, Eq, PartialEq, Ord, PartialOrd)] pub struct Metadata(u64); impl<'a, 'cfg> Context<'a, 'cfg> { @@ -154,7 +157,11 @@ impl<'a, 'cfg> Context<'a, 'cfg> { used_in_plugin: HashSet::new(), incremental_enabled: incremental_enabled, jobserver: jobserver, + build_script_overridden: HashSet::new(), + + // TODO: Pre-Calculate these with a topo-sort, rather than lazy-calculating target_filenames: HashMap::new(), + target_metadatas: HashMap::new(), }) } @@ -362,13 +369,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// Returns the directory for the specified unit where fingerprint /// information is stored. - pub fn fingerprint_dir(&mut self, unit: &Unit) -> PathBuf { + pub fn fingerprint_dir(&mut self, unit: &Unit<'a>) -> PathBuf { let dir = self.pkg_dir(unit); self.layout(unit.kind).fingerprint().join(dir) } /// Returns the appropriate directory layout for either a plugin or not. - pub fn build_script_dir(&mut self, unit: &Unit) -> PathBuf { + pub fn build_script_dir(&mut self, unit: &Unit<'a>) -> PathBuf { assert!(unit.target.is_custom_build()); assert!(!unit.profile.run_custom_build); let dir = self.pkg_dir(unit); @@ -376,7 +383,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } /// Returns the appropriate directory layout for either a plugin or not. - pub fn build_script_out_dir(&mut self, unit: &Unit) -> PathBuf { + pub fn build_script_out_dir(&mut self, unit: &Unit<'a>) -> PathBuf { assert!(unit.target.is_custom_build()); assert!(unit.profile.run_custom_build); let dir = self.pkg_dir(unit); @@ -394,7 +401,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// Returns the appropriate output directory for the specified package and /// target. - pub fn out_dir(&mut self, unit: &Unit) -> PathBuf { + pub fn out_dir(&mut self, unit: &Unit<'a>) -> PathBuf { if unit.profile.doc { self.layout(unit.kind).root().parent().unwrap().join("doc") } else if unit.target.is_custom_build() { @@ -406,7 +413,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } } - fn pkg_dir(&mut self, unit: &Unit) -> String { + fn pkg_dir(&mut self, unit: &Unit<'a>) -> String { let name = unit.pkg.package_id().name(); match self.target_metadata(unit) { Some(meta) => format!("{}-{}", name, meta), @@ -440,7 +447,17 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// We build to the path: "{filename}-{target_metadata}" /// We use a linking step to link/copy to a predictable filename /// like `target/debug/libfoo.{a,so,rlib}` and such. - pub fn target_metadata(&mut self, unit: &Unit) -> Option { + pub fn target_metadata(&mut self, unit: &Unit<'a>) -> Option { + if let Some(cache) = self.target_metadatas.get(unit) { + return cache.clone() + } + + let metadata = self.calc_target_metadata(unit); + self.target_metadatas.insert(*unit, metadata.clone()); + metadata + } + + fn calc_target_metadata(&mut self, unit: &Unit<'a>) -> Option { // No metadata for dylibs because of a couple issues // - OSX encodes the dylib name in the executable // - Windows rustc multiple files of which we can't easily link all of them @@ -483,6 +500,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> { // when changing feature sets each lib is separately cached. self.resolve.features_sorted(unit.pkg.package_id()).hash(&mut hasher); + // Mix in the target-metadata of all the dependencies of this target + if let Ok(deps) = self.dep_targets(unit) { + let mut deps_metadata = deps.into_iter().map(|dep_unit| { + self.target_metadata(&dep_unit) + }).collect::>(); + deps_metadata.sort(); + deps_metadata.hash(&mut hasher); + } + // Throw in the profile we're compiling with. This helps caching // panic=abort and panic=unwind artifacts, additionally with various // settings like debuginfo and whatnot. @@ -512,7 +538,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } /// Returns the file stem for a given target/profile combo (with metadata) - pub fn file_stem(&mut self, unit: &Unit) -> String { + pub fn file_stem(&mut self, unit: &Unit<'a>) -> String { match self.target_metadata(unit) { Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), metadata), @@ -537,7 +563,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// Returns an Option because in some cases we don't want to link /// (eg a dependent lib) - pub fn link_stem(&mut self, unit: &Unit) -> Option<(PathBuf, String)> { + pub fn link_stem(&mut self, unit: &Unit<'a>) -> Option<(PathBuf, String)> { let src_dir = self.out_dir(unit); let bin_stem = self.bin_stem(unit); let file_stem = self.file_stem(unit); @@ -578,6 +604,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> { return Ok(cache.clone()) } + let result = self.calc_target_filenames(unit); + if let Ok(ref ret) = result { + self.target_filenames.insert(*unit, ret.clone()); + } + result + } + + fn calc_target_filenames(&mut self, unit: &Unit<'a>) + -> CargoResult, bool)>>> { let out_dir = self.out_dir(unit); let stem = self.file_stem(unit); let link_stem = self.link_stem(unit); @@ -659,9 +694,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } info!("Target filenames: {:?}", ret); - let ret = Arc::new(ret); - self.target_filenames.insert(*unit, ret.clone()); - Ok(ret) + Ok(Arc::new(ret)) } /// For a package, return all targets which are registered as dependencies @@ -776,7 +809,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { // actually depend on anything, we've reached the end of the dependency // chain as we've got all the info we're gonna get. let key = (unit.pkg.package_id().clone(), unit.kind); - if self.build_state.outputs.lock().unwrap().contains_key(&key) { + if self.build_script_overridden.contains(&key) { return Ok(Vec::new()) } diff --git a/src/cargo/ops/cargo_rustc/custom_build.rs b/src/cargo/ops/cargo_rustc/custom_build.rs index 727f0d324..ec78ce4e4 100644 --- a/src/cargo/ops/cargo_rustc/custom_build.rs +++ b/src/cargo/ops/cargo_rustc/custom_build.rs @@ -77,7 +77,9 @@ pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work, Freshness)> { let _p = profile::start(format!("build script prepare: {}/{}", unit.pkg, unit.target.name())); - let overridden = cx.build_state.has_override(unit); + + let key = (unit.pkg.package_id().clone(), unit.kind); + let overridden = cx.build_script_overridden.contains(&key); let (work_dirty, work_fresh) = if overridden { (Work::noop(), Work::noop()) } else { @@ -314,18 +316,6 @@ impl BuildState { fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) { self.outputs.lock().unwrap().insert((id, kind), output); } - - fn has_override(&self, unit: &Unit) -> bool { - let key = unit.pkg.manifest().links().map(|l| (l.to_string(), unit.kind)); - match key.and_then(|k| self.overrides.get(&k)) { - Some(output) => { - self.insert(unit.pkg.package_id().clone(), unit.kind, - output.clone()); - true - } - None => false, - } - } } impl BuildOutput { @@ -483,7 +473,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, // Recursive function to build up the map we're constructing. This function // memoizes all of its return values as it goes along. fn build<'a, 'b, 'cfg>(out: &'a mut HashMap, BuildScripts>, - cx: &Context<'b, 'cfg>, + cx: &mut Context<'b, 'cfg>, unit: &Unit<'b>) -> CargoResult<&'a BuildScripts> { // Do a quick pre-flight check to see if we've already calculated the @@ -492,6 +482,20 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, return Ok(&out[unit]) } + { + let key = unit.pkg.manifest().links().map(|l| (l.to_string(), unit.kind)); + let build_state = &cx.build_state; + if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) { + let key = (unit.pkg.package_id().clone(), unit.kind); + cx.build_script_overridden.insert(key.clone()); + build_state + .outputs + .lock() + .unwrap() + .insert(key, output.clone()); + } + } + let mut ret = BuildScripts::default(); if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs index fd32f2b9c..38cef8f1c 100644 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -538,7 +538,7 @@ fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> { } /// Prepare for work when a package starts to build -pub fn prepare_init(cx: &mut Context, unit: &Unit) -> CargoResult<()> { +pub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<()> { let new1 = cx.fingerprint_dir(unit); if fs::metadata(&new1).is_err() { @@ -548,7 +548,7 @@ pub fn prepare_init(cx: &mut Context, unit: &Unit) -> CargoResult<()> { Ok(()) } -pub fn dep_info_loc(cx: &mut Context, unit: &Unit) -> PathBuf { +pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> PathBuf { cx.fingerprint_dir(unit).join(&format!("dep-{}", filename(cx, unit))) } @@ -670,7 +670,7 @@ fn mtime_if_fresh(output: &Path, paths: I) -> Option } } -fn filename(cx: &mut Context, unit: &Unit) -> String { +fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String { // file_stem includes metadata hash. Thus we have a different // fingerprint for every metadata hash version. This works because // even if the package is fresh, we'll still link the fresh target diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs index febeb1c54..02fa67727 100644 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -684,10 +684,10 @@ fn root_path(cx: &Context, unit: &Unit) -> PathBuf { } } -fn build_base_args(cx: &mut Context, - cmd: &mut ProcessBuilder, - unit: &Unit, - crate_types: &[&str]) { +fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, + cmd: &mut ProcessBuilder, + unit: &Unit<'a>, + crate_types: &[&str]) { let Profile { ref opt_level, lto, codegen_units, ref rustc_args, debuginfo, debug_assertions, overflow_checks, rpath, test, doc: _doc, diff --git a/tests/workspaces.rs b/tests/workspaces.rs index 1228ae282..d7eabcf7a 100644 --- a/tests/workspaces.rs +++ b/tests/workspaces.rs @@ -1465,3 +1465,96 @@ Caused by: ")); } +/// This is a freshness test for feature use with workspaces +/// +/// feat_lib is used by caller1 and caller2, but with different features enabled. +/// This test ensures that alternating building caller1, caller2 doesn't force +/// recompile of feat_lib. +/// +/// Ideally once we solve https://github.com/rust-lang/cargo/issues/3620, then +/// a single cargo build at the top level will be enough. +#[test] +fn dep_used_with_separate_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [workspace] + members = ["feat_lib", "caller1", "caller2"] + "#) + .file("feat_lib/Cargo.toml", r#" + [project] + name = "feat_lib" + version = "0.1.0" + authors = [] + + [features] + myfeature = [] + "#) + .file("feat_lib/src/lib.rs", "") + .file("caller1/Cargo.toml", r#" + [project] + name = "caller1" + version = "0.1.0" + authors = [] + + [dependencies] + feat_lib = { path = "../feat_lib" } + "#) + .file("caller1/src/main.rs", "fn main() {}") + .file("caller1/src/lib.rs", "") + .file("caller2/Cargo.toml", r#" + [project] + name = "caller2" + version = "0.1.0" + authors = [] + + [dependencies] + feat_lib = { path = "../feat_lib", features = ["myfeature"] } + caller1 = { path = "../caller1" } + "#) + .file("caller2/src/main.rs", "fn main() {}") + .file("caller2/src/lib.rs", ""); + p.build(); + + // Build the entire workspace + assert_that(p.cargo("build").arg("--all"), + execs().with_status(0) + .with_stderr("\ +[..]Compiling feat_lib v0.1.0 ([..]) +[..]Compiling caller1 v0.1.0 ([..]) +[..]Compiling caller2 v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(&p.bin("caller1"), existing_file()); + assert_that(&p.bin("caller2"), existing_file()); + + + // Build caller1. should build the dep library. Because the features + // are different than the full workspace, it rebuilds. + // Ideally once we solve https://github.com/rust-lang/cargo/issues/3620, then + // a single cargo build at the top level will be enough. + assert_that(p.cargo("build").cwd(p.root().join("caller1")), + execs().with_status(0) + .with_stderr("\ +[..]Compiling feat_lib v0.1.0 ([..]) +[..]Compiling caller1 v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + + // Alternate building caller2/caller1 a few times, just to make sure + // features are being built separately. Should not rebuild anything + assert_that(p.cargo("build").cwd(p.root().join("caller2")), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build").cwd(p.root().join("caller1")), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); + assert_that(p.cargo("build").cwd(p.root().join("caller2")), + execs().with_status(0) + .with_stderr("\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +")); +}