Auto merge of #4469 - nipunn1313:workspace_features, r=alexcrichton

Hashed dependencies of metadata into the metadata of a lib

This fixes one part of #3620. To my understanding, the more fundamental fix is more challenging
This commit is contained in:
bors 2017-09-09 21:39:44 +00:00
commit 921c4a5b05
5 changed files with 164 additions and 34 deletions

View file

@ -40,6 +40,7 @@ pub struct Context<'a, 'cfg: 'a> {
pub compilation: Compilation<'cfg>,
pub packages: &'a PackageSet<'cfg>,
pub build_state: Arc<BuildState>,
pub build_script_overridden: HashSet<(PackageId, Kind)>,
pub build_explicit_deps: HashMap<Unit<'a>, BuildDeps>,
pub fingerprints: HashMap<Unit<'a>, Arc<Fingerprint>>,
pub compiled: HashSet<Unit<'a>>,
@ -55,7 +56,9 @@ pub struct Context<'a, 'cfg: 'a> {
host_info: TargetInfo,
profiles: &'a Profiles,
incremental_enabled: bool,
target_filenames: HashMap<Unit<'a>, Arc<Vec<(PathBuf, Option<PathBuf>, bool)>>>,
target_metadatas: HashMap<Unit<'a>, Option<Metadata>>,
}
#[derive(Clone, Default)]
@ -82,7 +85,7 @@ impl TargetInfo {
}
}
#[derive(Clone)]
#[derive(Clone, Hash, Eq, PartialEq, Ord, PartialOrd)]
pub struct Metadata(u64);
impl<'a, 'cfg> Context<'a, 'cfg> {
@ -154,7 +157,11 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
used_in_plugin: HashSet::new(),
incremental_enabled: incremental_enabled,
jobserver: jobserver,
build_script_overridden: HashSet::new(),
// TODO: Pre-Calculate these with a topo-sort, rather than lazy-calculating
target_filenames: HashMap::new(),
target_metadatas: HashMap::new(),
})
}
@ -362,13 +369,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// Returns the directory for the specified unit where fingerprint
/// information is stored.
pub fn fingerprint_dir(&mut self, unit: &Unit) -> PathBuf {
pub fn fingerprint_dir(&mut self, unit: &Unit<'a>) -> PathBuf {
let dir = self.pkg_dir(unit);
self.layout(unit.kind).fingerprint().join(dir)
}
/// Returns the appropriate directory layout for either a plugin or not.
pub fn build_script_dir(&mut self, unit: &Unit) -> PathBuf {
pub fn build_script_dir(&mut self, unit: &Unit<'a>) -> PathBuf {
assert!(unit.target.is_custom_build());
assert!(!unit.profile.run_custom_build);
let dir = self.pkg_dir(unit);
@ -376,7 +383,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}
/// Returns the appropriate directory layout for either a plugin or not.
pub fn build_script_out_dir(&mut self, unit: &Unit) -> PathBuf {
pub fn build_script_out_dir(&mut self, unit: &Unit<'a>) -> PathBuf {
assert!(unit.target.is_custom_build());
assert!(unit.profile.run_custom_build);
let dir = self.pkg_dir(unit);
@ -394,7 +401,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// Returns the appropriate output directory for the specified package and
/// target.
pub fn out_dir(&mut self, unit: &Unit) -> PathBuf {
pub fn out_dir(&mut self, unit: &Unit<'a>) -> PathBuf {
if unit.profile.doc {
self.layout(unit.kind).root().parent().unwrap().join("doc")
} else if unit.target.is_custom_build() {
@ -406,7 +413,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}
}
fn pkg_dir(&mut self, unit: &Unit) -> String {
fn pkg_dir(&mut self, unit: &Unit<'a>) -> String {
let name = unit.pkg.package_id().name();
match self.target_metadata(unit) {
Some(meta) => format!("{}-{}", name, meta),
@ -440,7 +447,17 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// We build to the path: "{filename}-{target_metadata}"
/// We use a linking step to link/copy to a predictable filename
/// like `target/debug/libfoo.{a,so,rlib}` and such.
pub fn target_metadata(&mut self, unit: &Unit) -> Option<Metadata> {
pub fn target_metadata(&mut self, unit: &Unit<'a>) -> Option<Metadata> {
if let Some(cache) = self.target_metadatas.get(unit) {
return cache.clone()
}
let metadata = self.calc_target_metadata(unit);
self.target_metadatas.insert(*unit, metadata.clone());
metadata
}
fn calc_target_metadata(&mut self, unit: &Unit<'a>) -> Option<Metadata> {
// No metadata for dylibs because of a couple issues
// - OSX encodes the dylib name in the executable
// - Windows rustc multiple files of which we can't easily link all of them
@ -483,6 +500,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// when changing feature sets each lib is separately cached.
self.resolve.features_sorted(unit.pkg.package_id()).hash(&mut hasher);
// Mix in the target-metadata of all the dependencies of this target
if let Ok(deps) = self.dep_targets(unit) {
let mut deps_metadata = deps.into_iter().map(|dep_unit| {
self.target_metadata(&dep_unit)
}).collect::<Vec<_>>();
deps_metadata.sort();
deps_metadata.hash(&mut hasher);
}
// Throw in the profile we're compiling with. This helps caching
// panic=abort and panic=unwind artifacts, additionally with various
// settings like debuginfo and whatnot.
@ -512,7 +538,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}
/// Returns the file stem for a given target/profile combo (with metadata)
pub fn file_stem(&mut self, unit: &Unit) -> String {
pub fn file_stem(&mut self, unit: &Unit<'a>) -> String {
match self.target_metadata(unit) {
Some(ref metadata) => format!("{}-{}", unit.target.crate_name(),
metadata),
@ -537,7 +563,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// Returns an Option because in some cases we don't want to link
/// (eg a dependent lib)
pub fn link_stem(&mut self, unit: &Unit) -> Option<(PathBuf, String)> {
pub fn link_stem(&mut self, unit: &Unit<'a>) -> Option<(PathBuf, String)> {
let src_dir = self.out_dir(unit);
let bin_stem = self.bin_stem(unit);
let file_stem = self.file_stem(unit);
@ -578,6 +604,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
return Ok(cache.clone())
}
let result = self.calc_target_filenames(unit);
if let Ok(ref ret) = result {
self.target_filenames.insert(*unit, ret.clone());
}
result
}
fn calc_target_filenames(&mut self, unit: &Unit<'a>)
-> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, bool)>>> {
let out_dir = self.out_dir(unit);
let stem = self.file_stem(unit);
let link_stem = self.link_stem(unit);
@ -659,9 +694,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}
info!("Target filenames: {:?}", ret);
let ret = Arc::new(ret);
self.target_filenames.insert(*unit, ret.clone());
Ok(ret)
Ok(Arc::new(ret))
}
/// For a package, return all targets which are registered as dependencies
@ -776,7 +809,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// actually depend on anything, we've reached the end of the dependency
// chain as we've got all the info we're gonna get.
let key = (unit.pkg.package_id().clone(), unit.kind);
if self.build_state.outputs.lock().unwrap().contains_key(&key) {
if self.build_script_overridden.contains(&key) {
return Ok(Vec::new())
}

View file

@ -77,7 +77,9 @@ pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
-> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
unit.pkg, unit.target.name()));
let overridden = cx.build_state.has_override(unit);
let key = (unit.pkg.package_id().clone(), unit.kind);
let overridden = cx.build_script_overridden.contains(&key);
let (work_dirty, work_fresh) = if overridden {
(Work::noop(), Work::noop())
} else {
@ -314,18 +316,6 @@ impl BuildState {
fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {
self.outputs.lock().unwrap().insert((id, kind), output);
}
fn has_override(&self, unit: &Unit) -> bool {
let key = unit.pkg.manifest().links().map(|l| (l.to_string(), unit.kind));
match key.and_then(|k| self.overrides.get(&k)) {
Some(output) => {
self.insert(unit.pkg.package_id().clone(), unit.kind,
output.clone());
true
}
None => false,
}
}
}
impl BuildOutput {
@ -483,7 +473,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<Unit<'b>, BuildScripts>,
cx: &Context<'b, 'cfg>,
cx: &mut Context<'b, 'cfg>,
unit: &Unit<'b>)
-> CargoResult<&'a BuildScripts> {
// Do a quick pre-flight check to see if we've already calculated the
@ -492,6 +482,20 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
return Ok(&out[unit])
}
{
let key = unit.pkg.manifest().links().map(|l| (l.to_string(), unit.kind));
let build_state = &cx.build_state;
if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) {
let key = (unit.pkg.package_id().clone(), unit.kind);
cx.build_script_overridden.insert(key.clone());
build_state
.outputs
.lock()
.unwrap()
.insert(key, output.clone());
}
}
let mut ret = BuildScripts::default();
if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {

View file

@ -538,7 +538,7 @@ fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> {
}
/// Prepare for work when a package starts to build
pub fn prepare_init(cx: &mut Context, unit: &Unit) -> CargoResult<()> {
pub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<()> {
let new1 = cx.fingerprint_dir(unit);
if fs::metadata(&new1).is_err() {
@ -548,7 +548,7 @@ pub fn prepare_init(cx: &mut Context, unit: &Unit) -> CargoResult<()> {
Ok(())
}
pub fn dep_info_loc(cx: &mut Context, unit: &Unit) -> PathBuf {
pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> PathBuf {
cx.fingerprint_dir(unit).join(&format!("dep-{}", filename(cx, unit)))
}
@ -670,7 +670,7 @@ fn mtime_if_fresh<I>(output: &Path, paths: I) -> Option<FileTime>
}
}
fn filename(cx: &mut Context, unit: &Unit) -> String {
fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String {
// file_stem includes metadata hash. Thus we have a different
// fingerprint for every metadata hash version. This works because
// even if the package is fresh, we'll still link the fresh target

View file

@ -684,10 +684,10 @@ fn root_path(cx: &Context, unit: &Unit) -> PathBuf {
}
}
fn build_base_args(cx: &mut Context,
cmd: &mut ProcessBuilder,
unit: &Unit,
crate_types: &[&str]) {
fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
cmd: &mut ProcessBuilder,
unit: &Unit<'a>,
crate_types: &[&str]) {
let Profile {
ref opt_level, lto, codegen_units, ref rustc_args, debuginfo,
debug_assertions, overflow_checks, rpath, test, doc: _doc,

View file

@ -1465,3 +1465,96 @@ Caused by:
"));
}
/// This is a freshness test for feature use with workspaces
///
/// feat_lib is used by caller1 and caller2, but with different features enabled.
/// This test ensures that alternating building caller1, caller2 doesn't force
/// recompile of feat_lib.
///
/// Ideally once we solve https://github.com/rust-lang/cargo/issues/3620, then
/// a single cargo build at the top level will be enough.
#[test]
fn dep_used_with_separate_features() {
let p = project("foo")
.file("Cargo.toml", r#"
[workspace]
members = ["feat_lib", "caller1", "caller2"]
"#)
.file("feat_lib/Cargo.toml", r#"
[project]
name = "feat_lib"
version = "0.1.0"
authors = []
[features]
myfeature = []
"#)
.file("feat_lib/src/lib.rs", "")
.file("caller1/Cargo.toml", r#"
[project]
name = "caller1"
version = "0.1.0"
authors = []
[dependencies]
feat_lib = { path = "../feat_lib" }
"#)
.file("caller1/src/main.rs", "fn main() {}")
.file("caller1/src/lib.rs", "")
.file("caller2/Cargo.toml", r#"
[project]
name = "caller2"
version = "0.1.0"
authors = []
[dependencies]
feat_lib = { path = "../feat_lib", features = ["myfeature"] }
caller1 = { path = "../caller1" }
"#)
.file("caller2/src/main.rs", "fn main() {}")
.file("caller2/src/lib.rs", "");
p.build();
// Build the entire workspace
assert_that(p.cargo("build").arg("--all"),
execs().with_status(0)
.with_stderr("\
[..]Compiling feat_lib v0.1.0 ([..])
[..]Compiling caller1 v0.1.0 ([..])
[..]Compiling caller2 v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
"));
assert_that(&p.bin("caller1"), existing_file());
assert_that(&p.bin("caller2"), existing_file());
// Build caller1. should build the dep library. Because the features
// are different than the full workspace, it rebuilds.
// Ideally once we solve https://github.com/rust-lang/cargo/issues/3620, then
// a single cargo build at the top level will be enough.
assert_that(p.cargo("build").cwd(p.root().join("caller1")),
execs().with_status(0)
.with_stderr("\
[..]Compiling feat_lib v0.1.0 ([..])
[..]Compiling caller1 v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
"));
// Alternate building caller2/caller1 a few times, just to make sure
// features are being built separately. Should not rebuild anything
assert_that(p.cargo("build").cwd(p.root().join("caller2")),
execs().with_status(0)
.with_stderr("\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
"));
assert_that(p.cargo("build").cwd(p.root().join("caller1")),
execs().with_status(0)
.with_stderr("\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
"));
assert_that(p.cargo("build").cwd(p.root().join("caller2")),
execs().with_status(0)
.with_stderr("\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
"));
}