Fix fingerprint handling in pipelining mode

This commit fixes an issue when pipelining mode is used in handling
recompilations. Previously a sequence of compilations could look like:

* Crate A starts to build
* Crate A produces metadata
* Crate B, which depends on A, starts
* Crate B finishes
* Crate A finishes

In this case the mtime for B is before that of A, which fooled Cargo
into thinking that B needed to be recompiled. In this case, however, B
doesn't actually need to be recompiled because it only depends on the
metadata of A, not the final artifacts.

This unfortunately resulted in some duplication in a few places, but not
really much moreso than already exists between fingerprinting and compilation.
This commit is contained in:
Alex Crichton 2019-05-08 11:28:05 -07:00
parent c2152f0805
commit 6b28a0c050
10 changed files with 135 additions and 141 deletions

View file

@ -25,7 +25,7 @@ pub enum FileFlavor {
/// Not a special file type.
Normal,
/// Something you can link against (e.g., a library).
Linkable { rmeta: PathBuf },
Linkable { rmeta: bool },
/// Piece of external debug information (e.g., `.dSYM`/`.pdb` file).
DebugInfo,
}

View file

@ -305,10 +305,10 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
// for both libraries and binaries.
let path = out_dir.join(format!("lib{}.rmeta", file_stem));
ret.push(OutputFile {
path: path.clone(),
path,
hardlink: None,
export_path: None,
flavor: FileFlavor::Linkable { rmeta: path },
flavor: FileFlavor::Linkable { rmeta: false },
});
} else {
let mut add = |crate_type: &str, flavor: FileFlavor| -> CargoResult<()> {
@ -372,13 +372,21 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> {
add(
kind.crate_type(),
if kind.linkable() {
let rmeta = out_dir.join(format!("lib{}.rmeta", file_stem));
FileFlavor::Linkable { rmeta }
FileFlavor::Linkable { rmeta: false }
} else {
FileFlavor::Normal
},
)?;
}
let path = out_dir.join(format!("lib{}.rmeta", file_stem));
if !unit.target.requires_upstream_objects() {
ret.push(OutputFile {
path,
hardlink: None,
export_path: None,
flavor: FileFlavor::Linkable { rmeta: true },
});
}
}
}
}

View file

@ -187,6 +187,7 @@
//! See the `A-rebuild-detection` flag on the issue tracker for more:
//! <https://github.com/rust-lang/cargo/issues?q=is%3Aissue+is%3Aopen+label%3AA-rebuild-detection>
use std::collections::HashMap;
use std::env;
use std::fs;
use std::hash::{self, Hasher};
@ -322,6 +323,7 @@ struct DepFingerprint {
pkg_id: u64,
name: String,
public: bool,
only_requires_rmeta: bool,
fingerprint: Arc<Fingerprint>,
}
@ -395,17 +397,15 @@ enum FsStatus {
/// unit needs to subsequently be recompiled.
Stale,
/// This unit is up-to-date, it does not need to be recompiled. If there are
/// any outputs then the `FileTime` listed here is the minimum of all their
/// mtimes. This is then later used to see if a unit is newer than one of
/// its dependants, causing the dependant to be recompiled.
UpToDate(Option<FileTime>),
/// This unit is up-to-date. All outputs and their corresponding mtime are
/// listed in the payload here for other dependencies to compare against.
UpToDate { mtimes: HashMap<PathBuf, FileTime> },
}
impl FsStatus {
fn up_to_date(&self) -> bool {
match self {
FsStatus::UpToDate(_) => true,
FsStatus::UpToDate { .. } => true,
FsStatus::Stale => false,
}
}
@ -442,6 +442,7 @@ impl<'de> Deserialize<'de> for DepFingerprint {
pkg_id,
name,
public,
only_requires_rmeta: false,
fingerprint: Arc::new(Fingerprint {
memoized_hash: Mutex::new(Some(hash)),
..Fingerprint::new()
@ -753,51 +754,71 @@ impl Fingerprint {
) -> CargoResult<()> {
assert!(!self.fs_status.up_to_date());
let mut mtimes = HashMap::new();
// Get the `mtime` of all outputs. Optionally update their mtime
// afterwards based on the `mtime_on_use` flag. Afterwards we want the
// minimum mtime as it's the one we'll be comparing to inputs and
// dependencies.
let status = self
.outputs
.iter()
.map(|f| {
let mtime = paths::mtime(f).ok();
if mtime_on_use {
let t = FileTime::from_system_time(SystemTime::now());
drop(filetime::set_file_times(f, t, t));
}
mtime
})
.min();
for output in self.outputs.iter() {
let mtime = match paths::mtime(output) {
Ok(mtime) => mtime,
// This path failed to report its `mtime`. It probably doesn't
// exists, so leave ourselves as stale and bail out.
Err(e) => {
log::debug!("failed to get mtime of {:?}: {}", output, e);
return Ok(());
}
};
if mtime_on_use {
let t = FileTime::from_system_time(SystemTime::now());
filetime::set_file_times(output, t, t)?;
}
assert!(mtimes.insert(output.clone(), mtime).is_none());
}
let max_mtime = match mtimes.values().max() {
Some(mtime) => mtime,
let mtime = match status {
// We had no output files. This means we're an overridden build
// script and we're just always up to date because we aren't
// watching the filesystem.
None => {
self.fs_status = FsStatus::UpToDate(None);
self.fs_status = FsStatus::UpToDate { mtimes };
return Ok(());
}
// At least one path failed to report its `mtime`. It probably
// doesn't exists, so leave ourselves as stale and bail out.
Some(None) => return Ok(()),
// All files successfully reported an `mtime`, and we've got the
// minimum one, so let's keep going with that.
Some(Some(mtime)) => mtime,
};
for dep in self.deps.iter() {
let dep_mtime = match dep.fingerprint.fs_status {
let dep_mtimes = match &dep.fingerprint.fs_status {
FsStatus::UpToDate { mtimes } => mtimes,
// If our dependency is stale, so are we, so bail out.
FsStatus::Stale => return Ok(()),
};
// If our dependencies is up to date and has no filesystem
// interactions, then we can move on to the next dependency.
FsStatus::UpToDate(None) => continue,
FsStatus::UpToDate(Some(mtime)) => mtime,
// If our dependency edge only requires the rmeta fiel to be present
// then we only need to look at that one output file, otherwise we
// need to consider all output files to see if we're out of date.
let dep_mtime = if dep.only_requires_rmeta {
dep_mtimes
.iter()
.filter_map(|(path, mtime)| {
if path.extension().and_then(|s| s.to_str()) == Some("rmeta") {
Some(mtime)
} else {
None
}
})
.next()
.expect("failed to find rmeta")
} else {
match dep_mtimes.values().max() {
Some(mtime) => mtime,
// If our dependencies is up to date and has no filesystem
// interactions, then we can move on to the next dependency.
None => continue,
}
};
// If the dependency is newer than our own output then it was
@ -807,7 +828,8 @@ impl Fingerprint {
// Note that this comparison should probably be `>=`, not `>`, but
// for a discussion of why it's `>` see the discussion about #5918
// below in `find_stale`.
if dep_mtime > mtime {
if dep_mtime > max_mtime {
log::info!("dependency on `{}` is newer than we are", dep.name);
return Ok(());
}
}
@ -824,7 +846,7 @@ impl Fingerprint {
}
// Everything was up to date! Record such.
self.fs_status = FsStatus::UpToDate(Some(mtime));
self.fs_status = FsStatus::UpToDate { mtimes };
Ok(())
}
@ -856,6 +878,7 @@ impl hash::Hash for Fingerprint {
name,
public,
fingerprint,
only_requires_rmeta: _,
} in deps
{
pkg_id.hash(h);
@ -929,6 +952,7 @@ impl DepFingerprint {
name,
public,
fingerprint,
only_requires_rmeta: cx.only_requires_rmeta(parent, dep),
})
}
}

View file

@ -39,10 +39,10 @@ use crate::core::profiles::{Lto, PanicStrategy, Profile};
use crate::core::Feature;
use crate::core::{PackageId, Target};
use crate::util::errors::{CargoResult, CargoResultExt, Internal, ProcessError};
use crate::util::machine_message::Message;
use crate::util::paths;
use crate::util::{self, machine_message, process, ProcessBuilder};
use crate::util::{internal, join_paths, profile};
use crate::util::machine_message::Message;
/// Indicates whether an object is for the host architcture or the target architecture.
///
@ -498,7 +498,8 @@ fn link_targets<'a, 'cfg>(
filenames: destinations,
executable,
fresh,
}.to_json_string();
}
.to_json_string();
state.stdout(&msg);
}
Ok(())
@ -1016,20 +1017,14 @@ fn build_deps_args<'a, 'cfg>(
need_unstable_opts: &mut bool,
) -> CargoResult<()> {
let bcx = cx.bcx;
for output in cx.outputs(dep)?.iter() {
let rmeta = match &output.flavor {
FileFlavor::Linkable { rmeta } => rmeta,
_ => continue,
};
let mut v = OsString::new();
let name = bcx.extern_crate_name(current, dep)?;
v.push(name);
v.push("=");
if cx.only_requires_rmeta(current, dep) {
v.push(&rmeta);
} else {
v.push(&output.path);
}
let mut value = OsString::new();
value.push(bcx.extern_crate_name(current, dep)?);
value.push("=");
let mut pass = |file| {
let mut value = value.clone();
value.push(file);
if current
.pkg
@ -1045,7 +1040,26 @@ fn build_deps_args<'a, 'cfg>(
cmd.arg("--extern");
}
cmd.arg(&v);
cmd.arg(&value);
};
let outputs = cx.outputs(dep)?;
let mut outputs = outputs.iter().filter_map(|output| match output.flavor {
FileFlavor::Linkable { rmeta } => Some((output, rmeta)),
_ => None,
});
if cx.only_requires_rmeta(current, dep) {
let (output, _rmeta) = outputs
.find(|(_output, rmeta)| *rmeta)
.expect("failed to find rlib dep for pipelined dep");
pass(&output.path);
} else {
for (output, rmeta) in outputs {
if !rmeta {
pass(&output.path);
}
}
}
Ok(())
}
@ -1146,7 +1160,7 @@ fn on_stderr_line(
log::debug!("looks like metadata finished early!");
state.rmeta_produced();
}
return Ok(())
return Ok(());
}
}
@ -1157,7 +1171,8 @@ fn on_stderr_line(
package_id,
target,
message: compiler_message,
}.to_json_string();
}
.to_json_string();
// Switch json lines from rustc/rustdoc that appear on stderr to stdout
// instead. We want the stdout of Cargo to always be machine parseable as

View file

@ -2,7 +2,7 @@ use std::collections::HashMap;
use std::fs;
use std::path::Path;
use crate::core::compiler::{UnitInterner, FileFlavor};
use crate::core::compiler::UnitInterner;
use crate::core::compiler::{BuildConfig, BuildContext, CompileMode, Context, Kind};
use crate::core::profiles::UnitFor;
use crate::core::Workspace;
@ -119,9 +119,6 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> {
if let Some(ref dst) = output.hardlink {
rm_rf(dst, config)?;
}
if let FileFlavor::Linkable { rmeta } = &output.flavor {
rm_rf(rmeta, config)?;
}
}
}

View file

@ -3101,7 +3101,10 @@ fn compiler_json_error_format() {
"name":"bar",
"src_path":"[..]lib.rs"
},
"filenames":["[..].rlib"],
"filenames":[
"[..].rlib",
"[..].rmeta"
],
"fresh": false
}
@ -3200,7 +3203,10 @@ fn compiler_json_error_format() {
"name":"bar",
"src_path":"[..]lib.rs"
},
"filenames":["[..].rlib"],
"filenames":[
"[..].rlib",
"[..].rmeta"
],
"fresh": true
}
@ -4502,74 +4508,6 @@ Caused by:
.run();
}
#[test]
fn json_parse_fail() {
// Ensure when JSON parsing fails, and rustc exits with non-zero exit
// code, a useful error message is displayed.
let foo = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
pm = { path = "pm" }
"#,
)
.file(
"src/lib.rs",
r#"
#[macro_use]
extern crate pm;
#[derive(Foo)]
pub struct S;
"#,
)
.file(
"pm/Cargo.toml",
r#"
[package]
name = "pm"
version = "0.1.0"
[lib]
proc-macro = true
"#,
)
.file(
"pm/src/lib.rs",
r#"
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro_derive(Foo)]
pub fn derive(_input: TokenStream) -> TokenStream {
eprintln!("{{evil proc macro}}");
panic!("something went wrong");
}
"#,
)
.build();
foo.cargo("build --message-format=json")
.with_stderr(
"\
[COMPILING] pm [..]
[COMPILING] foo [..]
[ERROR] Could not compile `foo`.
Caused by:
compiler produced invalid json: `{evil proc macro}`
Caused by:
failed to parse process output: `rustc [..]
",
)
.with_status(101)
.run();
}
#[test]
fn tricky_pipelining() {
if !crate::support::is_nightly() {

View file

@ -85,7 +85,8 @@ fn cargo_build_plan_single_dep() {
"kind": "Host",
"links": "{...}",
"outputs": [
"[..]/foo/target/debug/deps/libbar-[..].rlib"
"[..]/foo/target/debug/deps/libbar-[..].rlib",
"[..]/foo/target/debug/deps/libbar-[..].rmeta"
],
"package_name": "bar",
"package_version": "0.0.1",
@ -101,7 +102,8 @@ fn cargo_build_plan_single_dep() {
"kind": "Host",
"links": "{...}",
"outputs": [
"[..]/foo/target/debug/deps/libfoo-[..].rlib"
"[..]/foo/target/debug/deps/libfoo-[..].rlib",
"[..]/foo/target/debug/deps/libfoo-[..].rmeta"
],
"package_name": "foo",
"package_version": "0.5.0",

View file

@ -190,8 +190,8 @@ fn build_check() {
.file("src/lib.rs", "pub fn baz() {}")
.build();
foo.cargo("build").run();
foo.cargo("check").run();
foo.cargo("build -v").run();
foo.cargo("check -v").run();
}
// Checks that where a project has both a lib and a bin, the lib is only checked

View file

@ -289,6 +289,7 @@ fn clean_verbose() {
"\
[REMOVING] [..]
[REMOVING] [..]
[REMOVING] [..]
",
)
.run();

View file

@ -461,7 +461,10 @@ fn metabuild_build_plan() {
"compile_mode": "build",
"kind": "Host",
"deps": [],
"outputs": ["[..]/target/debug/deps/libmb-[..].rlib"],
"outputs": [
"[..]/target/debug/deps/libmb-[..].rlib",
"[..]/target/debug/deps/libmb-[..].rmeta"
],
"links": {},
"program": "rustc",
"args": "{...}",
@ -475,7 +478,10 @@ fn metabuild_build_plan() {
"compile_mode": "build",
"kind": "Host",
"deps": [],
"outputs": ["[..]/target/debug/deps/libmb_other-[..].rlib"],
"outputs": [
"[..]/target/debug/deps/libmb_other-[..].rlib",
"[..]/target/debug/deps/libmb_other-[..].rmeta"
],
"links": {},
"program": "rustc",
"args": "{...}",
@ -517,7 +523,10 @@ fn metabuild_build_plan() {
"compile_mode": "build",
"kind": "Host",
"deps": [3],
"outputs": ["[..]/foo/target/debug/deps/libfoo-[..].rlib"],
"outputs": [
"[..]/foo/target/debug/deps/libfoo-[..].rlib",
"[..]/foo/target/debug/deps/libfoo-[..].rmeta"
],
"links": "{...}",
"program": "rustc",
"args": "{...}",