cargo fmt

This commit is contained in:
Alex Crichton 2018-03-14 08:17:44 -07:00
parent a13a33c33b
commit 1e6828485e
186 changed files with 31579 additions and 16776 deletions

View File

@ -1,14 +1,14 @@
extern crate cargo;
extern crate clap;
extern crate env_logger;
#[macro_use]
extern crate failure;
extern crate git2_curl;
extern crate toml;
extern crate log;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate clap;
extern crate toml;
use std::env;
use std::fs;
@ -16,14 +16,13 @@ use std::path::{Path, PathBuf};
use std::collections::BTreeSet;
use cargo::core::shell::Shell;
use cargo::util::{self, CliResult, lev_distance, Config, CargoResult};
use cargo::util::{self, lev_distance, CargoResult, CliResult, Config};
use cargo::util::{CliError, ProcessError};
mod cli;
mod command_prelude;
mod commands;
fn main() {
env_logger::init();
@ -53,7 +52,8 @@ fn aliased_command(config: &Config, command: &str) -> CargoResult<Option<Vec<Str
match config.get_string(&alias_name) {
Ok(value) => {
if let Some(record) = value {
let alias_commands = record.val
let alias_commands = record
.val
.split_whitespace()
.map(|s| s.to_string())
.collect();
@ -63,10 +63,8 @@ fn aliased_command(config: &Config, command: &str) -> CargoResult<Option<Vec<Str
Err(_) => {
let value = config.get_list(&alias_name)?;
if let Some(record) = value {
let alias_commands: Vec<String> = record.val
.iter()
.map(|s| s.0.to_string())
.collect();
let alias_commands: Vec<String> =
record.val.iter().map(|s| s.0.to_string()).collect();
result = Ok(Some(alias_commands));
}
}
@ -95,10 +93,10 @@ fn list_commands(config: &Config) -> BTreeSet<(String, Option<String>)> {
}
if is_executable(entry.path()) {
let end = filename.len() - suffix.len();
commands.insert(
(filename[prefix.len()..end].to_string(),
Some(path.display().to_string()))
);
commands.insert((
filename[prefix.len()..end].to_string(),
Some(path.display().to_string()),
));
}
}
}
@ -110,7 +108,6 @@ fn list_commands(config: &Config) -> BTreeSet<(String, Option<String>)> {
commands
}
fn find_closest(config: &Config, cmd: &str) -> Option<String> {
let cmds = list_commands(config);
// Only consider candidates with a lev_distance of 3 or less so we don't
@ -133,14 +130,14 @@ fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&str]) -> Cli
Some(command) => command,
None => {
let err = match find_closest(config, cmd) {
Some(closest) => {
format_err!("no such subcommand: `{}`\n\n\tDid you mean `{}`?\n",
cmd,
closest)
}
Some(closest) => format_err!(
"no such subcommand: `{}`\n\n\tDid you mean `{}`?\n",
cmd,
closest
),
None => format_err!("no such subcommand: `{}`", cmd),
};
return Err(CliError::new(err, 101))
return Err(CliError::new(err, 101));
}
};
@ -148,7 +145,8 @@ fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&str]) -> Cli
let err = match util::process(&command)
.env(cargo::CARGO_ENV, cargo_exe)
.args(&args[1..])
.exec_replace() {
.exec_replace()
{
Ok(()) => return Ok(()),
Err(e) => e,
};
@ -170,7 +168,9 @@ fn is_executable<P: AsRef<Path>>(path: P) -> bool {
}
#[cfg(windows)]
fn is_executable<P: AsRef<Path>>(path: P) -> bool {
fs::metadata(path).map(|metadata| metadata.is_file()).unwrap_or(false)
fs::metadata(path)
.map(|metadata| metadata.is_file())
.unwrap_or(false)
}
fn search_directories(config: &Config) -> Vec<PathBuf> {

View File

@ -2,7 +2,7 @@ extern crate clap;
use clap::{AppSettings, Arg, ArgMatches};
use cargo::{self, Config, CliResult};
use cargo::{self, CliResult, Config};
use super::list_commands;
use super::commands;
@ -15,10 +15,10 @@ pub fn main(config: &mut Config) -> CliResult {
let version = cargo::version();
println!("{}", version);
if is_verbose {
println!("release: {}.{}.{}",
version.major,
version.minor,
version.patch);
println!(
"release: {}.{}.{}",
version.major, version.minor, version.patch
);
if let Some(ref cfg) = version.cfg_info {
if let Some(ref ci) = cfg.commit_info {
println!("commit-hash: {}", ci.commit_hash);
@ -51,8 +51,7 @@ pub fn main(config: &mut Config) -> CliResult {
return Ok(());
}
if args.subcommand_name().is_none() {
}
if args.subcommand_name().is_none() {}
execute_subcommand(config, args)
}
@ -60,11 +59,16 @@ pub fn main(config: &mut Config) -> CliResult {
fn execute_subcommand(config: &mut Config, args: ArgMatches) -> CliResult {
config.configure(
args.occurrences_of("verbose") as u32,
if args.is_present("quiet") { Some(true) } else { None },
if args.is_present("quiet") {
Some(true)
} else {
None
},
&args.value_of("color").map(|s| s.to_string()),
args.is_present("frozen"),
args.is_present("locked"),
&args.values_of_lossy("unstable-features").unwrap_or_default(),
&args.values_of_lossy("unstable-features")
.unwrap_or_default(),
)?;
let (cmd, args) = match args.subcommand() {
@ -80,7 +84,11 @@ fn execute_subcommand(config: &mut Config, args: ArgMatches) -> CliResult {
}
if let Some(mut alias) = super::aliased_command(config, cmd)? {
alias.extend(args.values_of("").unwrap_or_default().map(|s| s.to_string()));
alias.extend(
args.values_of("")
.unwrap_or_default()
.map(|s| s.to_string()),
);
let args = cli()
.setting(AppSettings::NoBinaryName)
.get_matches_from_safe(alias)?;
@ -91,7 +99,6 @@ fn execute_subcommand(config: &mut Config, args: ArgMatches) -> CliResult {
super::execute_external_subcommand(config, cmd, &ext_args)
}
fn cli() -> App {
let app = App::new("cargo")
.settings(&[
@ -101,7 +108,8 @@ fn cli() -> App {
AppSettings::AllowExternalSubcommands,
])
.about("")
.template("\
.template(
"\
Rust's package manager
USAGE:
@ -126,44 +134,39 @@ Some common cargo commands are (see all commands with --list):
install Install a Rust binary
uninstall Uninstall a Rust binary
See 'cargo help <command>' for more information on a specific command."
See 'cargo help <command>' for more information on a specific command.",
)
.arg(opt("version", "Print version info and exit").short("V"))
.arg(opt("list", "List installed commands"))
.arg(opt("explain", "Run `rustc --explain CODE`").value_name("CODE"))
.arg(
opt("version", "Print version info and exit")
.short("V")
)
.arg(
opt("list", "List installed commands")
)
.arg(
opt("explain", "Run `rustc --explain CODE`")
.value_name("CODE")
)
.arg(
opt("verbose", "Use verbose output (-vv very verbose/build.rs output)")
.short("v").multiple(true).global(true)
opt(
"verbose",
"Use verbose output (-vv very verbose/build.rs output)",
).short("v")
.multiple(true)
.global(true),
)
.arg(
opt("quiet", "No output printed to stdout")
.short("q").global(true)
.short("q")
.global(true),
)
.arg(
opt("color", "Coloring: auto, always, never")
.value_name("WHEN").global(true)
.value_name("WHEN")
.global(true),
)
.arg(opt("frozen", "Require Cargo.lock and cache are up to date").global(true))
.arg(opt("locked", "Require Cargo.lock is up to date").global(true))
.arg(
opt("frozen", "Require Cargo.lock and cache are up to date")
.global(true)
Arg::with_name("unstable-features")
.help("Unstable (nightly-only) flags to Cargo")
.short("Z")
.value_name("FLAG")
.multiple(true)
.global(true),
)
.arg(
opt("locked", "Require Cargo.lock is up to date")
.global(true)
)
.arg(
Arg::with_name("unstable-features").help("Unstable (nightly-only) flags to Cargo")
.short("Z").value_name("FLAG").multiple(true).global(true)
)
.subcommands(commands::builtin())
;
.subcommands(commands::builtin());
app
}

View File

@ -3,12 +3,12 @@ use std::path::PathBuf;
use clap::{self, SubCommand};
use cargo::CargoResult;
use cargo::core::Workspace;
use cargo::ops::{CompileMode, CompileOptions, CompileFilter, Packages, MessageFormat,
VersionControl, NewOptions};
use cargo::ops::{CompileFilter, CompileMode, CompileOptions, MessageFormat, NewOptions, Packages,
VersionControl};
use cargo::util::important_paths::find_root_manifest_for_wd;
pub use clap::{Arg, ArgMatches, AppSettings};
pub use cargo::{Config, CliResult, CliError};
pub use clap::{AppSettings, Arg, ArgMatches};
pub use cargo::{CliError, CliResult, Config};
pub type App = clap::App<'static, 'static>;
@ -16,8 +16,12 @@ pub trait AppExt: Sized {
fn _arg(self, arg: Arg<'static, 'static>) -> Self;
fn arg_package(self, package: &'static str, all: &'static str, exclude: &'static str) -> Self {
self._arg(opt("package", package).short("p").value_name("SPEC").multiple(true))
._arg(opt("all", all))
self._arg(
opt("package", package)
.short("p")
.value_name("SPEC")
.multiple(true),
)._arg(opt("all", all))
._arg(opt("exclude", exclude).value_name("SPEC").multiple(true))
}
@ -28,7 +32,8 @@ pub trait AppExt: Sized {
fn arg_jobs(self) -> Self {
self._arg(
opt("jobs", "Number of parallel jobs, defaults to # of CPUs")
.short("j").value_name("N")
.short("j")
.value_name("N"),
)
}
@ -55,12 +60,7 @@ pub trait AppExt: Sized {
._arg(opt("all-targets", all))
}
fn arg_targets_lib_bin(
self,
lib: &'static str,
bin: &'static str,
bins: &'static str,
) -> Self {
fn arg_targets_lib_bin(self, lib: &'static str, bin: &'static str, bins: &'static str) -> Self {
self._arg(opt("lib", lib))
._arg(opt("bin", bin).value_name("NAME").multiple(true))
._arg(opt("bins", bins))
@ -79,23 +79,19 @@ pub trait AppExt: Sized {
._arg(opt("examples", examples))
}
fn arg_targets_bin_example(
self,
bin: &'static str,
example: &'static str,
) -> Self {
fn arg_targets_bin_example(self, bin: &'static str, example: &'static str) -> Self {
self._arg(opt("bin", bin).value_name("NAME").multiple(true))
._arg(opt("example", example).value_name("NAME").multiple(true))
}
fn arg_features(self) -> Self {
self
._arg(
opt("features", "Space-separated list of features to activate")
.value_name("FEATURES")
)
._arg(opt("all-features", "Activate all available features"))
._arg(opt("no-default-features", "Do not activate the `default` feature"))
self._arg(
opt("features", "Space-separated list of features to activate").value_name("FEATURES"),
)._arg(opt("all-features", "Activate all available features"))
._arg(opt(
"no-default-features",
"Do not activate the `default` feature",
))
}
fn arg_release(self, release: &'static str) -> Self {
@ -115,38 +111,38 @@ pub trait AppExt: Sized {
opt("message-format", "Error format")
.value_name("FMT")
.case_insensitive(true)
.possible_values(&["human", "json"]).default_value("human")
.possible_values(&["human", "json"])
.default_value("human"),
)
}
fn arg_new_opts(self) -> Self {
self._arg(
opt("vcs", "\
Initialize a new repository for the given version \
control system (git, hg, pijul, or fossil) or do not \
initialize any version control at all (none), overriding \
a global configuration.")
.value_name("VCS")
.possible_values(&["git", "hg", "pijul", "fossil", "none"])
)
._arg(opt("bin", "Use a binary (application) template [default]"))
opt(
"vcs",
"\
Initialize a new repository for the given version \
control system (git, hg, pijul, or fossil) or do not \
initialize any version control at all (none), overriding \
a global configuration.",
).value_name("VCS")
.possible_values(&["git", "hg", "pijul", "fossil", "none"]),
)._arg(opt("bin", "Use a binary (application) template [default]"))
._arg(opt("lib", "Use a library template"))
._arg(
opt("name", "Set the resulting package name, defaults to the directory name")
.value_name("NAME")
opt(
"name",
"Set the resulting package name, defaults to the directory name",
).value_name("NAME"),
)
}
fn arg_index(self) -> Self {
self
._arg(
opt("index", "Registry index to upload the package to")
.value_name("INDEX")
)
self._arg(opt("index", "Registry index to upload the package to").value_name("INDEX"))
._arg(
opt("host", "DEPRECATED, renamed to '--index'")
.value_name("HOST")
.hidden(true)
.hidden(true),
)
}
}
@ -162,24 +158,20 @@ pub fn opt(name: &'static str, help: &'static str) -> Arg<'static, 'static> {
}
pub fn subcommand(name: &'static str) -> App {
SubCommand::with_name(name)
.settings(&[
AppSettings::UnifiedHelpMessage,
AppSettings::DeriveDisplayOrder,
AppSettings::DontCollapseArgsInUsage,
])
SubCommand::with_name(name).settings(&[
AppSettings::UnifiedHelpMessage,
AppSettings::DeriveDisplayOrder,
AppSettings::DontCollapseArgsInUsage,
])
}
pub trait ArgMatchesExt {
fn value_of_u32(&self, name: &str) -> CargoResult<Option<u32>> {
let arg = match self._value_of(name) {
None => None,
Some(arg) => Some(arg.parse::<u32>().map_err(|_| {
clap::Error::value_validation_auto(
format!("could not parse `{}` as a number", arg)
)
})?)
clap::Error::value_validation_auto(format!("could not parse `{}` as a number", arg))
})?),
};
Ok(arg)
}
@ -209,7 +201,7 @@ pub trait ArgMatchesExt {
fn compile_options<'a>(
&self,
config: &'a Config,
mode: CompileMode
mode: CompileMode,
) -> CargoResult<CompileOptions<'a>> {
let spec = Packages::from_flags(
self._is_present("all"),
@ -240,12 +232,18 @@ pub trait ArgMatchesExt {
spec,
mode,
release: self._is_present("release"),
filter: CompileFilter::new(self._is_present("lib"),
self._values_of("bin"), self._is_present("bins"),
self._values_of("test"), self._is_present("tests"),
self._values_of("example"), self._is_present("examples"),
self._values_of("bench"), self._is_present("benches"),
self._is_present("all-targets")),
filter: CompileFilter::new(
self._is_present("lib"),
self._values_of("bin"),
self._is_present("bins"),
self._values_of("test"),
self._is_present("tests"),
self._values_of("example"),
self._is_present("examples"),
self._values_of("bench"),
self._is_present("benches"),
self._is_present("all-targets"),
),
message_format,
target_rustdoc_args: None,
target_rustc_args: None,
@ -256,7 +254,7 @@ pub trait ArgMatchesExt {
fn compile_options_for_single_package<'a>(
&self,
config: &'a Config,
mode: CompileMode
mode: CompileMode,
) -> CargoResult<CompileOptions<'a>> {
let mut compile_opts = self.compile_options(config, mode)?;
compile_opts.spec = Packages::Packages(self._values_of("package"));
@ -272,19 +270,23 @@ pub trait ArgMatchesExt {
"none" => VersionControl::NoVcs,
vcs => panic!("Impossible vcs: {:?}", vcs),
});
NewOptions::new(vcs,
self._is_present("bin"),
self._is_present("lib"),
self._value_of("path").unwrap().to_string(),
self._value_of("name").map(|s| s.to_string()))
NewOptions::new(
vcs,
self._is_present("bin"),
self._is_present("lib"),
self._value_of("path").unwrap().to_string(),
self._value_of("name").map(|s| s.to_string()),
)
}
fn registry(&self, config: &Config) -> CargoResult<Option<String>> {
match self._value_of("registry") {
Some(registry) => {
if !config.cli_unstable().unstable_options {
return Err(format_err!("registry option is an unstable feature and \
requires -Zunstable-options to use.").into());
return Err(format_err!(
"registry option is an unstable feature and \
requires -Zunstable-options to use."
).into());
}
Ok(Some(registry.to_string()))
}
@ -313,7 +315,7 @@ about this warning.";
config.shell().warn(&msg)?;
Some(host.to_string())
}
None => self._value_of("index").map(|s| s.to_string())
None => self._value_of("index").map(|s| s.to_string()),
};
Ok(index)
}
@ -331,7 +333,8 @@ impl<'a> ArgMatchesExt for ArgMatches<'a> {
}
fn _values_of(&self, name: &str) -> Vec<String> {
self.values_of(name).unwrap_or_default()
self.values_of(name)
.unwrap_or_default()
.map(|s| s.to_string())
.collect()
}
@ -342,7 +345,8 @@ impl<'a> ArgMatchesExt for ArgMatches<'a> {
}
pub fn values(args: &ArgMatches, name: &str) -> Vec<String> {
args.values_of(name).unwrap_or_default()
args.values_of(name)
.unwrap_or_default()
.map(|s| s.to_string())
.collect()
}

View File

@ -7,16 +7,15 @@ pub fn cli() -> App {
.setting(AppSettings::TrailingVarArg)
.about("Execute all benchmarks of a local package")
.arg(
Arg::with_name("BENCHNAME").help(
"If specified, only run benches containing this string in their names"
)
Arg::with_name("BENCHNAME")
.help("If specified, only run benches containing this string in their names"),
)
.arg(
Arg::with_name("args").help(
"Arguments for the bench binary"
).multiple(true).last(true)
Arg::with_name("args")
.help("Arguments for the bench binary")
.multiple(true)
.last(true),
)
.arg_targets_all(
"Benchmark only this package's library",
"Benchmark only the specified binary",
@ -29,10 +28,7 @@ pub fn cli() -> App {
"Benchmark all benches",
"Benchmark all targets (default)",
)
.arg(
opt("no-run", "Compile, but don't run benchmarks")
)
.arg(opt("no-run", "Compile, but don't run benchmarks"))
.arg_package(
"Package to run benchmarks for",
"Benchmark all packages in the workspace",
@ -43,10 +39,12 @@ pub fn cli() -> App {
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_message_format()
.arg(
opt("no-fail-fast", "Run all benchmarks regardless of failure")
)
.after_help("\
.arg(opt(
"no-fail-fast",
"Run all benchmarks regardless of failure",
))
.after_help(
"\
All of the trailing arguments are passed to the benchmark binaries generated
for filtering benchmarks and generally providing options configuring how they
run.
@ -64,7 +62,8 @@ The --jobs argument affects the building of the benchmark executable but does
not affect how many jobs are used when running the benchmarks.
Compilation can be customized with the `bench` profile in the manifest.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
@ -80,17 +79,23 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
};
let mut bench_args = vec![];
bench_args.extend(args.value_of("BENCHNAME").into_iter().map(|s| s.to_string()));
bench_args.extend(args.values_of("args").unwrap_or_default().map(|s| s.to_string()));
bench_args.extend(
args.value_of("BENCHNAME")
.into_iter()
.map(|s| s.to_string()),
);
bench_args.extend(
args.values_of("args")
.unwrap_or_default()
.map(|s| s.to_string()),
);
let err = ops::run_benches(&ws, &ops, &bench_args)?;
match err {
None => Ok(()),
Some(err) => {
Err(match err.exit.as_ref().and_then(|e| e.code()) {
Some(i) => CliError::new(format_err!("bench failed"), i),
None => CliError::new(err.into(), 101)
})
}
Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) {
Some(i) => CliError::new(format_err!("bench failed"), i),
None => CliError::new(err.into(), 101),
}),
}
}

View File

@ -3,7 +3,8 @@ use command_prelude::*;
use cargo::ops::{self, CompileMode};
pub fn cli() -> App {
subcommand("build").alias("b")
subcommand("build")
.alias("b")
.about("Compile a local package and all of its dependencies")
.arg_package(
"Package to build",
@ -28,7 +29,8 @@ pub fn cli() -> App {
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_message_format()
.after_help("\
.after_help(
"\
If the --package argument is given, then SPEC is a package id specification
which indicates which package should be built. If it is not given, then the
current package is built. For more information on SPEC and its format, see the
@ -41,8 +43,8 @@ Note that `--exclude` has to be specified in conjunction with the `--all` flag.
Compilation can be configured via the use of profiles which are configured in
the manifest. The default profile for this command is `dev`, but passing
the --release flag will use the `release` profile instead.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View File

@ -24,15 +24,13 @@ pub fn cli() -> App {
"Check all targets (lib and bin targets by default)",
)
.arg_release("Check artifacts in release mode, with optimizations")
.arg(
opt("profile", "Profile to build the selected target for")
.value_name("PROFILE")
)
.arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))
.arg_features()
.arg_target_triple("Check for the target triple")
.arg_manifest_path()
.arg_message_format()
.after_help("\
.after_help(
"\
If the --package argument is given, then SPEC is a package id specification
which indicates which package should be built. If it is not given, then the
current package is built. For more information on SPEC and its format, see the
@ -48,7 +46,8 @@ the --release flag will use the `release` profile instead.
The `--profile test` flag can be used to check unit tests with the
`#[cfg(test)]` attribute.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
@ -57,8 +56,11 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
Some("test") => true,
None => false,
Some(profile) => {
let err = format_err!("unknown profile: `{}`, only `test` is \
currently supported", profile);
let err = format_err!(
"unknown profile: `{}`, only `test` is \
currently supported",
profile
);
return Err(CliError::new(err, 101));
}
};

View File

@ -7,17 +7,21 @@ pub fn cli() -> App {
.about("Remove artifacts that cargo has generated in the past")
.arg(
opt("package", "Package to clean artifacts for")
.short("p").value_name("SPEC").multiple(true)
.short("p")
.value_name("SPEC")
.multiple(true),
)
.arg_manifest_path()
.arg_target_triple("Target triple to clean output for (default all)")
.arg_release("Whether or not to clean release artifacts")
.after_help("\
.after_help(
"\
If the --package argument is given, then SPEC is a package id specification
which indicates which package's artifacts should be cleaned out. If it is not
given, then all packages' artifacts are removed. For more information on SPEC
and its format, see the `cargo help pkgid` command.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View File

@ -5,17 +5,16 @@ use cargo::ops::{self, CompileMode, DocOptions};
pub fn cli() -> App {
subcommand("doc")
.about("Build a package's documentation")
.arg(
opt("open", "Opens the docs in a browser after the operation")
)
.arg(opt(
"open",
"Opens the docs in a browser after the operation",
))
.arg_package(
"Package to document",
"Document all packages in the workspace",
"Exclude packages from the build",
)
.arg(
opt("no-deps", "Don't build documentation for dependencies")
)
.arg(opt("no-deps", "Don't build documentation for dependencies"))
.arg_jobs()
.arg_targets_lib_bin(
"Document only this package's library",
@ -27,7 +26,8 @@ pub fn cli() -> App {
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_message_format()
.after_help("\
.after_help(
"\
By default the documentation for the local package and all dependencies is
built. The output is all placed in `target/doc` in rustdoc's usual format.
@ -39,12 +39,15 @@ If the --package argument is given, then SPEC is a package id specification
which indicates which package should be documented. If it is not given, then the
current package is documented. For more information on SPEC and its format, see
the `cargo help pkgid` command.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
let mode = CompileMode::Doc { deps: !args.is_present("no-deps") };
let mode = CompileMode::Doc {
deps: !args.is_present("no-deps"),
};
let compile_opts = args.compile_options(config, mode)?;
let doc_opts = DocOptions {
open_result: args.is_present("open"),

View File

@ -6,7 +6,8 @@ pub fn cli() -> App {
subcommand("fetch")
.about("Fetch dependencies of a package from the network")
.arg_manifest_path()
.after_help("\
.after_help(
"\
If a lockfile is available, this command will ensure that all of the git
dependencies and/or registries dependencies are downloaded and locally
available. The network is never touched after a `cargo fetch` unless
@ -15,7 +16,8 @@ the lockfile changes.
If the lockfile is not available, then this is the equivalent of
`cargo generate-lockfile`. A lockfile is generated and dependencies are also
all updated.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View File

@ -6,7 +6,8 @@ pub fn cli() -> App {
subcommand("generate-lockfile")
.about("Generate the lockfile for a project")
.arg_manifest_path()
.after_help("\
.after_help(
"\
If a lockfile is available, this command will ensure that all of the git
dependencies and/or registries dependencies are downloaded and locally
available. The network is never touched after a `cargo fetch` unless
@ -15,7 +16,8 @@ the lockfile changes.
If the lockfile is not available, then this is the equivalent of
`cargo generate-lockfile`. A lockfile is generated and dependencies are also
all updated.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View File

@ -1,14 +1,24 @@
use command_prelude::*;
use cargo::core::{GitReference, SourceId, Source};
use cargo::core::{GitReference, Source, SourceId};
use cargo::sources::GitSource;
use cargo::util::ToUrl;
pub fn cli() -> App {
subcommand("git-checkout")
.about("Checkout a copy of a Git repository")
.arg(Arg::with_name("url").long("url").value_name("URL").required(true))
.arg(Arg::with_name("reference").long("reference").value_name("REF").required(true))
.arg(
Arg::with_name("url")
.long("url")
.value_name("URL")
.required(true),
)
.arg(
Arg::with_name("reference")
.long("reference")
.value_name("REF")
.required(true),
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View File

@ -12,6 +12,8 @@ pub fn cli() -> App {
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let opts = args.new_options()?;
ops::init(&opts, config)?;
config.shell().status("Created", format!("{} project", opts.kind))?;
config
.shell()
.status("Created", format!("{} project", opts.kind))?;
Ok(())
}

View File

@ -8,39 +8,22 @@ pub fn cli() -> App {
subcommand("install")
.about("Create a new cargo package in an existing directory")
.arg(Arg::with_name("crate").multiple(true))
.arg(
opt("version", "Specify a version to install from crates.io")
.alias("vers").value_name("VERSION")
.alias("vers")
.value_name("VERSION"),
)
.arg(
opt("git", "Git URL to install the specified crate from")
.value_name("URL")
)
.arg(
opt("branch", "Branch to use when installing from git")
.value_name("BRANCH")
)
.arg(
opt("tag", "Tag to use when installing from git")
.value_name("TAG")
)
.arg(
opt("rev", "Specific commit to use when installing from git")
.value_name("SHA")
)
.arg(
opt("path", "Filesystem path to local crate to install")
.value_name("PATH")
)
.arg(opt("list", "list all installed packages and their versions"))
.arg(opt("git", "Git URL to install the specified crate from").value_name("URL"))
.arg(opt("branch", "Branch to use when installing from git").value_name("BRANCH"))
.arg(opt("tag", "Tag to use when installing from git").value_name("TAG"))
.arg(opt("rev", "Specific commit to use when installing from git").value_name("SHA"))
.arg(opt("path", "Filesystem path to local crate to install").value_name("PATH"))
.arg(opt(
"list",
"list all installed packages and their versions",
))
.arg_jobs()
.arg(
opt("force", "Force overwriting existing crates or binaries")
.short("f")
)
.arg(opt("force", "Force overwriting existing crates or binaries").short("f"))
.arg_features()
.arg(opt("debug", "Build in debug mode instead of release mode"))
.arg_targets_bins_examples(
@ -49,11 +32,9 @@ pub fn cli() -> App {
"Install only the specified example",
"Install all examples",
)
.arg(
opt("root", "Directory to install packages into")
.value_name("DIR")
)
.after_help("\
.arg(opt("root", "Directory to install packages into").value_name("DIR"))
.after_help(
"\
This command manages Cargo's local set of installed binary crates. Only packages
which have [[bin]] targets can be installed, and all binaries are installed into
the installation root's `bin` folder. The installation root is determined, in
@ -86,14 +67,17 @@ If the source is crates.io or `--git` then by default the crate will be built
in a temporary target directory. To avoid this, the target directory can be
specified by setting the `CARGO_TARGET_DIR` environment variable to a relative
path. In particular, this can be useful for caching build artifacts on
continuous integration systems.")
continuous integration systems.",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let mut compile_opts = args.compile_options(config, CompileMode::Build)?;
compile_opts.release = !args.is_present("debug");
let krates = args.values_of("crate").unwrap_or_default().collect::<Vec<_>>();
let krates = args.values_of("crate")
.unwrap_or_default()
.collect::<Vec<_>>();
let source = if let Some(url) = args.value_of("git") {
let url = url.to_url()?;
@ -121,7 +105,14 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
if args.is_present("list") {
ops::install_list(root, config)?;
} else {
ops::install(root, krates, &source, version, &compile_opts, args.is_present("force"))?;
ops::install(
root,
krates,
&source,
version,
&compile_opts,
args.is_present("force"),
)?;
}
Ok(())
}

View File

@ -10,15 +10,19 @@ pub fn cli() -> App {
#[derive(Serialize)]
pub struct ProjectLocation {
root: String
root: String,
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let root = args.root_manifest(config)?;
let root = root.to_str()
.ok_or_else(|| format_err!("your project path contains characters \
not representable in Unicode"))
.ok_or_else(|| {
format_err!(
"your project path contains characters \
not representable in Unicode"
)
})
.map_err(|e| CliError::new(e, 1))?
.to_string();

View File

@ -2,15 +2,17 @@ use command_prelude::*;
use std::io::{self, BufRead};
use cargo::core::{SourceId, Source};
use cargo::core::{Source, SourceId};
use cargo::sources::RegistrySource;
use cargo::util::{CargoError, CargoResultExt};
use cargo::ops;
pub fn cli() -> App {
subcommand("login")
.about("Save an api token from the registry locally. \
If token is not specified, it will be read from stdin.")
.about(
"Save an api token from the registry locally. \
If token is not specified, it will be read from stdin.",
)
.arg(Arg::with_name("token"))
.arg(opt("host", "Host to set the token for").value_name("HOST"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
@ -24,24 +26,29 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
None => {
let host = match registry {
Some(ref _registry) => {
return Err(format_err!("token must be provided when \
--registry is provided.").into());
return Err(format_err!(
"token must be provided when \
--registry is provided."
).into());
}
None => {
let src = SourceId::crates_io(config)?;
let mut src = RegistrySource::remote(&src, config);
src.update()?;
let config = src.config()?.unwrap();
args.value_of("host").map(|s| s.to_string())
args.value_of("host")
.map(|s| s.to_string())
.unwrap_or(config.api.unwrap())
}
};
println!("please visit {}me and paste the API Token below", host);
let mut line = String::new();
let input = io::stdin();
input.lock().read_line(&mut line).chain_err(|| {
"failed to read stdin"
}).map_err(CargoError::from)?;
input
.lock()
.read_line(&mut line)
.chain_err(|| "failed to read stdin")
.map_err(CargoError::from)?;
line.trim().to_string()
}
};

View File

@ -5,18 +5,22 @@ use cargo::print_json;
pub fn cli() -> App {
subcommand("metadata")
.about("Output the resolved dependencies of a project, \
the concrete used versions including overrides, \
in machine-readable format")
.arg_features()
.arg(
opt("no-deps", "Output information only about the root package \
and don't fetch dependencies")
.about(
"Output the resolved dependencies of a project, \
the concrete used versions including overrides, \
in machine-readable format",
)
.arg_features()
.arg(opt(
"no-deps",
"Output information only about the root package \
and don't fetch dependencies",
))
.arg_manifest_path()
.arg(
opt("format-version", "Format version")
.value_name("VERSION").possible_value("1")
.value_name("VERSION")
.possible_value("1"),
)
}
@ -25,9 +29,10 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let version = match args.value_of("format-version") {
None => {
config.shell().warn("\
please specify `--format-version` flag explicitly \
to avoid compatibility problems"
config.shell().warn(
"\
please specify `--format-version` flag explicitly \
to avoid compatibility problems",
)?;
1
}

View File

@ -34,7 +34,7 @@ pub fn builtin() -> Vec<App> {
]
}
pub fn builtin_exec(cmd: & str) -> Option<fn(&mut Config, &ArgMatches) -> CliResult> {
pub fn builtin_exec(cmd: &str) -> Option<fn(&mut Config, &ArgMatches) -> CliResult> {
let f = match cmd {
"bench" => bench::exec,
"build" => build::exec,

View File

@ -13,6 +13,8 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let opts = args.new_options()?;
ops::new(&opts, config)?;
let path = args.value_of("path").unwrap();
config.shell().status("Created", format!("{} `{}` project", opts.kind, path))?;
config
.shell()
.status("Created", format!("{} `{}` project", opts.kind, path))?;
Ok(())
}

View File

@ -8,17 +8,22 @@ pub fn cli() -> App {
.arg(Arg::with_name("crate"))
.arg(
opt("add", "Name of a user or team to add as an owner")
.short("a").value_name("LOGIN").multiple(true)
.short("a")
.value_name("LOGIN")
.multiple(true),
)
.arg(
opt("remove", "Name of a user or team to remove as an owner")
.short("r").value_name("LOGIN").multiple(true)
.short("r")
.value_name("LOGIN")
.multiple(true),
)
.arg(opt("list", "List owners of a crate").short("l"))
.arg(opt("index", "Registry index to modify owners for").value_name("INDEX"))
.arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
.after_help("\
.after_help(
"\
This command will modify the owners for a package
on the specified registry(or
default).Note that owners of a package can upload new versions, yank old
@ -26,7 +31,8 @@ pub fn cli() -> App {
caution!
See http://doc.crates.io/crates-io.html#cargo-owner for detailed documentation
and troubleshooting.")
and troubleshooting.",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View File

@ -2,14 +2,27 @@ use command_prelude::*;
use cargo::ops::{self, PackageOpts};
pub fn cli() -> App {
subcommand("package")
.about("Assemble the local package into a distributable tarball")
.arg(opt("list", "Print files included in a package without making one").short("l"))
.arg(opt("no-verify", "Don't verify the contents by building them"))
.arg(opt("no-metadata", "Ignore warnings about a lack of human-usable metadata"))
.arg(opt("allow-dirty", "Allow dirty working directories to be packaged"))
.arg(
opt(
"list",
"Print files included in a package without making one",
).short("l"),
)
.arg(opt(
"no-verify",
"Don't verify the contents by building them",
))
.arg(opt(
"no-metadata",
"Ignore warnings about a lack of human-usable metadata",
))
.arg(opt(
"allow-dirty",
"Allow dirty working directories to be packaged",
))
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_jobs()
@ -17,15 +30,18 @@ pub fn cli() -> App {
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
ops::package(&ws, &PackageOpts {
config,
verify: !args.is_present("no-verify"),
list: args.is_present("list"),
check_metadata: !args.is_present("no-metadata"),
allow_dirty: args.is_present("allow-dirty"),
target: args.target(),
jobs: args.jobs()?,
registry: None,
})?;
ops::package(
&ws,
&PackageOpts {
config,
verify: !args.is_present("no-verify"),
list: args.is_present("list"),
check_metadata: !args.is_present("no-metadata"),
allow_dirty: args.is_present("allow-dirty"),
target: args.target(),
jobs: args.jobs()?,
registry: None,
},
)?;
Ok(())
}

View File

@ -8,7 +8,8 @@ pub fn cli() -> App {
.arg(Arg::with_name("spec"))
.arg_single_package("Argument to get the package id specifier for")
.arg_manifest_path()
.after_help("\
.after_help(
"\
Given a <spec> argument, print out the fully qualified package id specifier.
This command will generate an error if <spec> is ambiguous as to which package
it refers to in the dependency graph. If no <spec> is given, then the pkgid for
@ -27,7 +28,8 @@ Example Package IDs
crates.io/foo#1.2.3 | foo | 1.2.3 | *://crates.io/foo
crates.io/bar#foo:1.2.3 | foo | 1.2.3 | *://crates.io/bar
http://crates.io/foo#1.2.3 | foo | 1.2.3 | http://crates.io/foo
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View File

@ -7,14 +7,18 @@ pub fn cli() -> App {
.about("Upload a package to the registry")
.arg_index()
.arg(opt("token", "Token to use when uploading").value_name("TOKEN"))
.arg(opt("no-verify", "Don't verify the contents by building them"))
.arg(opt("allow-dirty", "Allow dirty working directories to be packaged"))
.arg(opt(
"no-verify",
"Don't verify the contents by building them",
))
.arg(opt(
"allow-dirty",
"Allow dirty working directories to be packaged",
))
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_jobs()
.arg(
opt("dry-run", "Perform all checks without uploading")
)
.arg(opt("dry-run", "Perform all checks without uploading"))
.arg(opt("registry", "Registry to publish to").value_name("REGISTRY"))
}
@ -23,16 +27,19 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
let index = args.index(config)?;
ops::publish(&ws, &PublishOpts {
config,
token: args.value_of("token").map(|s| s.to_string()),
index,
verify: !args.is_present("no-verify"),
allow_dirty: args.is_present("allow-dirty"),
target: args.target(),
jobs: args.jobs()?,
dry_run: args.is_present("dry-run"),
registry,
})?;
ops::publish(
&ws,
&PublishOpts {
config,
token: args.value_of("token").map(|s| s.to_string()),
index,
verify: !args.is_present("no-verify"),
allow_dirty: args.is_present("allow-dirty"),
target: args.target(),
jobs: args.jobs()?,
dry_run: args.is_present("dry-run"),
registry,
},
)?;
Ok(())
}

View File

@ -5,8 +5,10 @@ use cargo::print_json;
pub fn cli() -> App {
subcommand("read-manifest")
.about("Deprecated, use `cargo metadata --no-deps` instead.
Print a JSON representation of a Cargo.toml manifest.")
.about(
"Deprecated, use `cargo metadata --no-deps` instead.
Print a JSON representation of a Cargo.toml manifest.",
)
.arg_manifest_path()
}

View File

@ -1,10 +1,11 @@
use command_prelude::*;
use cargo::core::Verbosity;
use cargo::ops::{self, CompileMode, CompileFilter};
use cargo::ops::{self, CompileFilter, CompileMode};
pub fn cli() -> App {
subcommand("run").alias("r")
subcommand("run")
.alias("r")
.setting(AppSettings::TrailingVarArg)
.about("Run the main binary of the local package (src/main.rs)")
.arg(Arg::with_name("args").multiple(true))
@ -19,7 +20,8 @@ pub fn cli() -> App {
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_message_format()
.after_help("\
.after_help(
"\
If neither `--bin` nor `--example` are given, then if the project only has one
bin target it will be run. Otherwise `--bin` specifies the bin target to run,
and `--example` specifies the example target to run. At most one of `--bin` or
@ -28,15 +30,14 @@ and `--example` specifies the example target to run. At most one of `--bin` or
All of the trailing arguments are passed to the binary to run. If you're passing
arguments to both Cargo and the binary, the ones after `--` go to the binary,
the ones before go to Cargo.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
let mut compile_opts = args.compile_options_for_single_package(
config, CompileMode::Build,
)?;
let mut compile_opts = args.compile_options_for_single_package(config, CompileMode::Build)?;
if !args.is_present("example") && !args.is_present("bin") {
compile_opts.filter = CompileFilter::Default {
required_features_filterable: false,

View File

@ -22,15 +22,13 @@ pub fn cli() -> App {
"Build all targets (lib and bin targets by default)",
)
.arg_release("Build artifacts in release mode, with optimizations")
.arg(
opt("profile", "Profile to build the selected target for")
.value_name("PROFILE")
)
.arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))
.arg_features()
.arg_target_triple("Target triple which compiles will be for")
.arg_manifest_path()
.arg_message_format()
.after_help("\
.after_help(
"\
The specified target for the current package (or package specified by SPEC if
provided) will be compiled along with all of its dependencies. The specified
<args>... will all be passed to the final compiler invocation, not any of the
@ -43,7 +41,8 @@ target is available for the current package the filters of --lib, --bin, etc,
must be used to select which target is compiled. To pass flags to all compiler
processes spawned by Cargo, use the $RUSTFLAGS environment variable or the
`build.rustflags` configuration option.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
@ -54,14 +53,15 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
Some("bench") => CompileMode::Bench,
Some("check") => CompileMode::Check { test: false },
Some(mode) => {
let err = format_err!("unknown profile: `{}`, use dev,
test, or bench", mode);
let err = format_err!(
"unknown profile: `{}`, use dev,
test, or bench",
mode
);
return Err(CliError::new(err, 101));
}
};
let mut compile_opts = args.compile_options_for_single_package(
config, mode,
)?;
let mut compile_opts = args.compile_options_for_single_package(config, mode)?;
compile_opts.target_rustc_args = Some(values(args, "args"));
ops::compile(&ws, &compile_opts)?;
Ok(())

View File

@ -7,7 +7,10 @@ pub fn cli() -> App {
.setting(AppSettings::TrailingVarArg)
.about("Build a package's documentation, using specified custom flags.")
.arg(Arg::with_name("args").multiple(true))
.arg(opt("open", "Opens the docs in a browser after the operation"))
.arg(opt(
"open",
"Opens the docs in a browser after the operation",
))
.arg_single_package("Package to document")
.arg_jobs()
.arg_targets_all(
@ -25,7 +28,8 @@ pub fn cli() -> App {
.arg_release("Build artifacts in release mode, with optimizations")
.arg_manifest_path()
.arg_message_format()
.after_help("\
.after_help(
"\
The specified target for the current package (or package specified by SPEC if
provided) will be documented with the specified <opts>... being passed to the
final rustdoc invocation. Dependencies will not be documented as part of this
@ -37,14 +41,14 @@ If the --package argument is given, then SPEC is a package id specification
which indicates which package should be documented. If it is not given, then the
current package is documented. For more information on SPEC and its format, see
the `cargo help pkgid` command.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
let mut compile_opts = args.compile_options_for_single_package(
config, CompileMode::Doc { deps: false },
)?;
let mut compile_opts =
args.compile_options_for_single_package(config, CompileMode::Doc { deps: false })?;
compile_opts.target_rustdoc_args = Some(values(args, "args"));
let doc_opts = DocOptions {
open_result: args.is_present("open"),

View File

@ -10,8 +10,10 @@ pub fn cli() -> App {
.arg(Arg::with_name("query").multiple(true))
.arg_index()
.arg(
opt("limit", "Limit the number of results (default: 10, max: 100)")
.value_name("LIMIT")
opt(
"limit",
"Limit the number of results (default: 10, max: 100)",
).value_name("LIMIT"),
)
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
}

View File

@ -3,18 +3,19 @@ use command_prelude::*;
use cargo::ops::{self, CompileMode};
pub fn cli() -> App {
subcommand("test").alias("t")
subcommand("test")
.alias("t")
.setting(AppSettings::TrailingVarArg)
.about("Execute all unit and integration tests of a local package")
.arg(
Arg::with_name("TESTNAME").help(
"If specified, only run tests containing this string in their names"
)
Arg::with_name("TESTNAME")
.help("If specified, only run tests containing this string in their names"),
)
.arg(
Arg::with_name("args").help(
"Arguments for the test binary"
).multiple(true).last(true)
Arg::with_name("args")
.help("Arguments for the test binary")
.multiple(true)
.last(true),
)
.arg_targets_all(
"Test only this package's library",
@ -29,12 +30,8 @@ pub fn cli() -> App {
"Test all targets (default)",
)
.arg(opt("doc", "Test only this library's documentation"))
.arg(
opt("no-run", "Compile, but don't run tests")
)
.arg(
opt("no-fail-fast", "Run all tests regardless of failure")
)
.arg(opt("no-run", "Compile, but don't run tests"))
.arg(opt("no-fail-fast", "Run all tests regardless of failure"))
.arg_package(
"Package to run tests for",
"Test all packages in the workspace",
@ -46,7 +43,8 @@ pub fn cli() -> App {
.arg_target_triple("Build for the target triple")
.arg_manifest_path()
.arg_message_format()
.after_help("\
.after_help(
"\
All of the trailing arguments are passed to the test binaries generated for
filtering tests and generally providing options configuring how they run. For
example, this will run all tests with the name `foo` in their name:
@ -81,7 +79,8 @@ by passing `--nocapture` to the test binaries:
To get the list of all options available for the test binaries use this:
cargo test -- --help
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
@ -91,12 +90,18 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let doc = args.is_present("doc");
if doc {
compile_opts.mode = ops::CompileMode::Doctest;
compile_opts.filter = ops::CompileFilter::new(true,
Vec::new(), false,
Vec::new(), false,
Vec::new(), false,
Vec::new(), false,
false);
compile_opts.filter = ops::CompileFilter::new(
true,
Vec::new(),
false,
Vec::new(),
false,
Vec::new(),
false,
Vec::new(),
false,
false,
);
}
let ops = ops::TestOptions {
@ -110,16 +115,18 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
// important so we explicitly mention it and reconfigure
let mut test_args = vec![];
test_args.extend(args.value_of("TESTNAME").into_iter().map(|s| s.to_string()));
test_args.extend(args.values_of("args").unwrap_or_default().map(|s| s.to_string()));
test_args.extend(
args.values_of("args")
.unwrap_or_default()
.map(|s| s.to_string()),
);
let err = ops::run_tests(&ws, &ops, &test_args)?;
return match err {
None => Ok(()),
Some(err) => {
Err(match err.exit.as_ref().and_then(|e| e.code()) {
Some(i) => CliError::new(format_err!("{}", err.hint(&ws)), i),
None => CliError::new(err.into(), 101),
})
}
Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) {
Some(i) => CliError::new(format_err!("{}", err.hint(&ws)), i),
None => CliError::new(err.into(), 101),
}),
};
}

View File

@ -8,18 +8,18 @@ pub fn cli() -> App {
.arg(Arg::with_name("spec").multiple(true))
.arg(
opt("bin", "Only uninstall the binary NAME")
.value_name("NAME").multiple(true)
.value_name("NAME")
.multiple(true),
)
.arg(
opt("root", "Directory to uninstall packages from")
.value_name("DIR")
)
.after_help("\
.arg(opt("root", "Directory to uninstall packages from").value_name("DIR"))
.after_help(
"\
The argument SPEC is a package id specification (see `cargo help pkgid`) to
specify which crate should be uninstalled. By default all binaries are
uninstalled for a crate but the `--bin` and `--example` flags can be used to
only uninstall particular binaries.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View File

@ -7,15 +7,18 @@ pub fn cli() -> App {
.about("Update dependencies as recorded in the local lock file")
.arg(
opt("package", "Package to clean artifacts for")
.short("p").value_name("SPEC").multiple(true)
)
.arg(opt("aggressive", "Force updating all dependencies of <name> as well"))
.arg(
opt("precise", "Update a single dependency to exactly PRECISE")
.value_name("PRECISE")
.short("p")
.value_name("SPEC")
.multiple(true),
)
.arg(opt(
"aggressive",
"Force updating all dependencies of <name> as well",
))
.arg(opt("precise", "Update a single dependency to exactly PRECISE").value_name("PRECISE"))
.arg_manifest_path()
.after_help("\
.after_help(
"\
This command requires that a `Cargo.lock` already exists as generated by
`cargo build` or related commands.
@ -35,7 +38,8 @@ If SPEC is not given, then all dependencies will be re-resolved and
updated.
For more information about package id specifications, see `cargo help pkgid`.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View File

@ -32,7 +32,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let file = File::open(&filename);
match file.and_then(|mut f| f.read_to_string(&mut contents)) {
Ok(_) => {}
Err(e) => fail("invalid", &format!("error reading file: {}", e))
Err(e) => fail("invalid", &format!("error reading file: {}", e)),
};
if contents.parse::<toml::Value>().is_err() {
fail("invalid", "invalid-format");

View File

@ -3,8 +3,7 @@ use command_prelude::*;
use cargo;
pub fn cli() -> App {
subcommand("version")
.about("Show version information")
subcommand("version").about("Show version information")
}
pub fn exec(_config: &mut Config, _args: &ArgMatches) -> CliResult {

View File

@ -6,14 +6,16 @@ pub fn cli() -> App {
subcommand("yank")
.about("Remove a pushed crate from the index")
.arg(Arg::with_name("crate"))
.arg(
opt("vers", "The version to yank or un-yank").value_name("VERSION")
)
.arg(opt("undo", "Undo a yank, putting a version back into the index"))
.arg(opt("vers", "The version to yank or un-yank").value_name("VERSION"))
.arg(opt(
"undo",
"Undo a yank, putting a version back into the index",
))
.arg(opt("index", "Registry index to yank from").value_name("INDEX"))
.arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
.after_help("\
.after_help(
"\
The yank command removes a previously pushed crate's version from the server's
index. This command does not delete any data, and the crate will still be
available for download via the registry's download link.
@ -21,18 +23,21 @@ available for download via the registry's download link.
Note that existing crates locked to a yanked version will still be able to
download the yanked version to use it. Cargo will, however, not allow any new
crates to be locked to any yanked version.
")
",
)
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let registry = args.registry(config)?;
ops::yank(config,
args.value_of("crate").map(|s| s.to_string()),
args.value_of("vers").map(|s| s.to_string()),
args.value_of("token").map(|s| s.to_string()),
args.value_of("index").map(|s| s.to_string()),
args.is_present("undo"),
registry)?;
ops::yank(
config,
args.value_of("crate").map(|s| s.to_string()),
args.value_of("vers").map(|s| s.to_string()),
args.value_of("token").map(|s| s.to_string()),
args.value_of("index").map(|s| s.to_string()),
args.is_present("undo"),
registry,
)?;
Ok(())
}

View File

@ -6,10 +6,10 @@ use semver::VersionReq;
use semver::ReqParseError;
use serde::ser;
use core::{SourceId, Summary, PackageId};
use core::{PackageId, SourceId, Summary};
use core::interning::InternedString;
use util::{Cfg, CfgExpr, Config};
use util::errors::{CargoResult, CargoResultExt, CargoError};
use util::errors::{CargoError, CargoResult, CargoResultExt};
/// Information about a dependency requested by a Cargo manifest.
/// Cheap to copy.
@ -61,7 +61,8 @@ struct SerializedDependency<'a> {
impl ser::Serialize for Dependency {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer,
where
S: ser::Serializer,
{
SerializedDependency {
name: &*self.name(),
@ -84,9 +85,10 @@ pub enum Kind {
Build,
}
fn parse_req_with_deprecated(req: &str,
extra: Option<(&PackageId, &Config)>)
-> CargoResult<VersionReq> {
fn parse_req_with_deprecated(
req: &str,
extra: Option<(&PackageId, &Config)>,
) -> CargoResult<VersionReq> {
match VersionReq::parse(req) {
Err(e) => {
let (inside, config) = match extra {
@ -95,7 +97,8 @@ fn parse_req_with_deprecated(req: &str,
};
match e {
ReqParseError::DeprecatedVersionRequirement(requirement) => {
let msg = format!("\
let msg = format!(
"\
parsed version requirement `{}` is no longer valid
Previous versions of Cargo accepted this malformed requirement,
@ -106,21 +109,26 @@ This will soon become a hard error, so it's either recommended to
update to a fixed version or contact the upstream maintainer about
this warning.
",
req, inside.name(), inside.version(), requirement);
req,
inside.name(),
inside.version(),
requirement
);
config.shell().warn(&msg)?;
Ok(requirement)
}
e => Err(e.into()),
}
},
}
Ok(v) => Ok(v),
}
}
impl ser::Serialize for Kind {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer,
where
S: ser::Serializer,
{
match *self {
Kind::Normal => None,
@ -132,15 +140,17 @@ impl ser::Serialize for Kind {
impl Dependency {
/// Attempt to create a `Dependency` from an entry in the manifest.
pub fn parse(name: &str,
version: Option<&str>,
source_id: &SourceId,
inside: &PackageId,
config: &Config) -> CargoResult<Dependency> {
pub fn parse(
name: &str,
version: Option<&str>,
source_id: &SourceId,
inside: &PackageId,
config: &Config,
) -> CargoResult<Dependency> {
let arg = Some((inside, config));
let (specified_req, version_req) = match version {
Some(v) => (true, parse_req_with_deprecated(v, arg)?),
None => (false, VersionReq::any())
None => (false, VersionReq::any()),
};
let mut ret = Dependency::new_override(name, source_id);
@ -154,12 +164,14 @@ impl Dependency {
}
/// Attempt to create a `Dependency` from an entry in the manifest.
pub fn parse_no_deprecated(name: &str,
version: Option<&str>,
source_id: &SourceId) -> CargoResult<Dependency> {
pub fn parse_no_deprecated(
name: &str,
version: Option<&str>,
source_id: &SourceId,
) -> CargoResult<Dependency> {
let (specified_req, version_req) = match version {
Some(v) => (true, parse_req_with_deprecated(v, None)?),
None => (false, VersionReq::any())
None => (false, VersionReq::any()),
};
let mut ret = Dependency::new_override(name, source_id);
@ -279,11 +291,13 @@ impl Dependency {
pub fn lock_to(&mut self, id: &PackageId) -> &mut Dependency {
assert_eq!(self.inner.source_id, *id.source_id());
assert!(self.inner.req.matches(id.version()));
trace!("locking dep from `{}` with `{}` at {} to {}",
self.name(),
self.version_req(),
self.source_id(),
id);
trace!(
"locking dep from `{}` with `{}` at {} to {}",
self.name(),
self.version_req(),
self.source_id(),
id
);
self.set_version_req(VersionReq::exact(id.version()))
.set_source_id(id.source_id().clone())
}
@ -330,19 +344,19 @@ impl Dependency {
/// Returns true if the package (`sum`) can fulfill this dependency request.
pub fn matches_ignoring_source(&self, sum: &Summary) -> bool {
self.name() == sum.package_id().name() &&
self.version_req().matches(sum.package_id().version())
self.name() == sum.package_id().name()
&& self.version_req().matches(sum.package_id().version())
}
/// Returns true if the package (`id`) can fulfill this dependency request.
pub fn matches_id(&self, id: &PackageId) -> bool {
self.inner.name == id.name() &&
(self.inner.only_match_name || (self.inner.req.matches(id.version()) &&
&self.inner.source_id == id.source_id()))
self.inner.name == id.name()
&& (self.inner.only_match_name
|| (self.inner.req.matches(id.version())
&& &self.inner.source_id == id.source_id()))
}
pub fn map_source(mut self, to_replace: &SourceId, replace_with: &SourceId)
-> Dependency {
pub fn map_source(mut self, to_replace: &SourceId, replace_with: &SourceId) -> Dependency {
if self.source_id() != to_replace {
self
} else {
@ -356,19 +370,18 @@ impl Platform {
pub fn matches(&self, name: &str, cfg: Option<&[Cfg]>) -> bool {
match *self {
Platform::Name(ref p) => p == name,
Platform::Cfg(ref p) => {
match cfg {
Some(cfg) => p.matches(cfg),
None => false,
}
}
Platform::Cfg(ref p) => match cfg {
Some(cfg) => p.matches(cfg),
None => false,
},
}
}
}
impl ser::Serialize for Platform {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer,
where
S: ser::Serializer,
{
self.to_string().serialize(s)
}
@ -379,10 +392,10 @@ impl FromStr for Platform {
fn from_str(s: &str) -> CargoResult<Platform> {
if s.starts_with("cfg(") && s.ends_with(')') {
let s = &s[4..s.len()-1];
let p = s.parse().map(Platform::Cfg).chain_err(|| {
format_err!("failed to parse `{}` as a cfg expression", s)
})?;
let s = &s[4..s.len() - 1];
let p = s.parse()
.map(Platform::Cfg)
.chain_err(|| format_err!("failed to parse `{}` as a cfg expression", s))?;
Ok(p)
} else {
Ok(Platform::Name(s.to_string()))

View File

@ -46,8 +46,7 @@ use std::str::FromStr;
use util::errors::CargoResult;
/// The epoch of the compiler (RFC 2052)
#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq)]
#[derive(Serialize, Deserialize)]
#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq, Serialize, Deserialize)]
pub enum Epoch {
/// The 2015 epoch
Epoch2015,
@ -69,7 +68,7 @@ impl FromStr for Epoch {
match s {
"2015" => Ok(Epoch::Epoch2015),
"2018" => Ok(Epoch::Epoch2018),
_ => Err(())
_ => Err(()),
}
}
}
@ -174,8 +173,7 @@ pub struct Feature {
}
impl Features {
pub fn new(features: &[String],
warnings: &mut Vec<String>) -> CargoResult<Features> {
pub fn new(features: &[String], warnings: &mut Vec<String>) -> CargoResult<Features> {
let mut ret = Features::default();
for feature in features {
ret.add(feature, warnings)?;
@ -196,17 +194,20 @@ impl Features {
match status {
Status::Stable => {
let warning = format!("the cargo feature `{}` is now stable \
and is no longer necessary to be listed \
in the manifest", feature);
let warning = format!(
"the cargo feature `{}` is now stable \
and is no longer necessary to be listed \
in the manifest",
feature
);
warnings.push(warning);
}
Status::Unstable if !nightly_features_allowed() => {
bail!("the cargo feature `{}` requires a nightly version of \
Cargo, but this is the `{}` channel",
feature,
channel())
}
Status::Unstable if !nightly_features_allowed() => bail!(
"the cargo feature `{}` requires a nightly version of \
Cargo, but this is the `{}` channel",
feature,
channel()
),
Status::Unstable => {}
}
@ -227,15 +228,20 @@ impl Features {
let mut msg = format!("feature `{}` is required", feature);
if nightly_features_allowed() {
let s = format!("\n\nconsider adding `cargo-features = [\"{0}\"]` \
to the manifest", feature);
let s = format!(
"\n\nconsider adding `cargo-features = [\"{0}\"]` \
to the manifest",
feature
);
msg.push_str(&s);
} else {
let s = format!("\n\n\
this Cargo does not support nightly features, but if you\n\
switch to nightly channel you can add\n\
`cargo-features = [\"{}\"]` to enable this feature",
feature);
let s = format!(
"\n\n\
this Cargo does not support nightly features, but if you\n\
switch to nightly channel you can add\n\
`cargo-features = [\"{}\"]` to enable this feature",
feature
);
msg.push_str(&s);
}
bail!("{}", msg);
@ -299,8 +305,7 @@ impl CliUnstable {
fn parse_bool(value: Option<&str>) -> CargoResult<bool> {
match value {
None |
Some("yes") => Ok(true),
None | Some("yes") => Ok(true),
Some("no") => Ok(false),
Some(s) => bail!("expected `no` or `yes`, found: {}", s),
}
@ -321,7 +326,9 @@ impl CliUnstable {
fn channel() -> String {
env::var("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS").unwrap_or_else(|_| {
::version().cfg_info.map(|c| c.release_channel)
::version()
.cfg_info
.map(|c| c.release_channel)
.unwrap_or_else(|| String::from("dev"))
})
}

View File

@ -34,11 +34,17 @@ impl InternedString {
pub fn new(str: &str) -> InternedString {
let mut cache = STRING_CASHE.write().unwrap();
if let Some(&s) = cache.get(str) {
return InternedString { ptr: s.as_ptr(), len: s.len() };
return InternedString {
ptr: s.as_ptr(),
len: s.len(),
};
}
let s = leek(str.to_string());
cache.insert(s);
InternedString { ptr: s.as_ptr(), len: s.len() }
InternedString {
ptr: s.as_ptr(),
len: s.len(),
}
}
pub fn to_inner(&self) -> &'static str {
unsafe {
@ -87,4 +93,4 @@ impl PartialOrd for InternedString {
}
unsafe impl Send for InternedString {}
unsafe impl Sync for InternedString {}
unsafe impl Sync for InternedString {}

View File

@ -1,6 +1,6 @@
use std::collections::{HashMap, BTreeMap};
use std::collections::{BTreeMap, HashMap};
use std::fmt;
use std::path::{PathBuf, Path};
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::hash::{Hash, Hasher};
@ -8,8 +8,8 @@ use semver::Version;
use serde::ser;
use url::Url;
use core::{Dependency, PackageId, Summary, SourceId, PackageIdSpec};
use core::{WorkspaceConfig, Epoch, Features, Feature};
use core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary};
use core::{Epoch, Feature, Features, WorkspaceConfig};
use core::interning::InternedString;
use util::Config;
use util::toml::TomlManifest;
@ -48,7 +48,7 @@ pub struct Manifest {
#[derive(Clone, Debug)]
pub struct DelayedWarning {
pub message: String,
pub is_critical: bool
pub is_critical: bool,
}
#[derive(Clone, Debug)]
@ -74,11 +74,11 @@ pub struct ManifestMetadata {
pub categories: Vec<String>,
pub license: Option<String>,
pub license_file: Option<String>,
pub description: Option<String>, // not markdown
pub readme: Option<String>, // file, not contents
pub homepage: Option<String>, // url
pub repository: Option<String>, // url
pub documentation: Option<String>, // url
pub description: Option<String>, // not markdown
pub readme: Option<String>, // file, not contents
pub homepage: Option<String>, // url
pub repository: Option<String>, // url
pub documentation: Option<String>, // url
pub badges: BTreeMap<String, BTreeMap<String, String>>,
pub links: Option<String>,
}
@ -116,10 +116,7 @@ impl LibKind {
pub fn linkable(&self) -> bool {
match *self {
LibKind::Lib |
LibKind::Rlib |
LibKind::Dylib |
LibKind::ProcMacro => true,
LibKind::Lib | LibKind::Rlib | LibKind::Dylib | LibKind::ProcMacro => true,
LibKind::Other(..) => false,
}
}
@ -138,7 +135,8 @@ pub enum TargetKind {
impl ser::Serialize for TargetKind {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer,
where
S: ser::Serializer,
{
use self::TargetKind::*;
match *self {
@ -147,42 +145,31 @@ impl ser::Serialize for TargetKind {
ExampleBin | ExampleLib(_) => vec!["example"],
Test => vec!["test"],
CustomBuild => vec!["custom-build"],
Bench => vec!["bench"]
Bench => vec!["bench"],
}.serialize(s)
}
}
// Note that most of the fields here are skipped when serializing because we
// don't want to export them just yet (becomes a public API of Cargo). Others
// though are definitely needed!
#[derive(Clone, PartialEq, Eq, Debug, Hash, Serialize)]
pub struct Profile {
pub opt_level: String,
#[serde(skip_serializing)]
pub lto: Lto,
#[serde(skip_serializing)]
pub codegen_units: Option<u32>, // None = use rustc default
#[serde(skip_serializing)]
pub rustc_args: Option<Vec<String>>,
#[serde(skip_serializing)]
pub rustdoc_args: Option<Vec<String>>,
#[serde(skip_serializing)] pub lto: Lto,
#[serde(skip_serializing)] pub codegen_units: Option<u32>, // None = use rustc default
#[serde(skip_serializing)] pub rustc_args: Option<Vec<String>>,
#[serde(skip_serializing)] pub rustdoc_args: Option<Vec<String>>,
pub debuginfo: Option<u32>,
pub debug_assertions: bool,
pub overflow_checks: bool,
#[serde(skip_serializing)]
pub rpath: bool,
#[serde(skip_serializing)] pub rpath: bool,
pub test: bool,
#[serde(skip_serializing)]
pub doc: bool,
#[serde(skip_serializing)]
pub run_custom_build: bool,
#[serde(skip_serializing)]
pub check: bool,
#[serde(skip_serializing)]
pub panic: Option<String>,
#[serde(skip_serializing)]
pub incremental: bool,
#[serde(skip_serializing)] pub doc: bool,
#[serde(skip_serializing)] pub run_custom_build: bool,
#[serde(skip_serializing)] pub check: bool,
#[serde(skip_serializing)] pub panic: Option<String>,
#[serde(skip_serializing)] pub incremental: bool,
}
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
@ -261,22 +248,24 @@ impl ser::Serialize for Target {
}
impl Manifest {
pub fn new(summary: Summary,
targets: Vec<Target>,
exclude: Vec<String>,
include: Vec<String>,
links: Option<String>,
metadata: ManifestMetadata,
profiles: Profiles,
publish: Option<Vec<String>>,
publish_lockfile: bool,
replace: Vec<(PackageIdSpec, Dependency)>,
patch: HashMap<Url, Vec<Dependency>>,
workspace: WorkspaceConfig,
features: Features,
epoch: Epoch,
im_a_teapot: Option<bool>,
original: Rc<TomlManifest>) -> Manifest {
pub fn new(
summary: Summary,
targets: Vec<Target>,
exclude: Vec<String>,
include: Vec<String>,
links: Option<String>,
metadata: ManifestMetadata,
profiles: Profiles,
publish: Option<Vec<String>>,
publish_lockfile: bool,
replace: Vec<(PackageIdSpec, Dependency)>,
patch: HashMap<Url, Vec<Dependency>>,
workspace: WorkspaceConfig,
features: Features,
epoch: Epoch,
im_a_teapot: Option<bool>,
original: Rc<TomlManifest>,
) -> Manifest {
Manifest {
summary,
targets,
@ -298,22 +287,54 @@ impl Manifest {
}
}
pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() }
pub fn exclude(&self) -> &[String] { &self.exclude }
pub fn include(&self) -> &[String] { &self.include }
pub fn metadata(&self) -> &ManifestMetadata { &self.metadata }
pub fn name(&self) -> InternedString { self.package_id().name() }
pub fn package_id(&self) -> &PackageId { self.summary.package_id() }
pub fn summary(&self) -> &Summary { &self.summary }
pub fn targets(&self) -> &[Target] { &self.targets }
pub fn version(&self) -> &Version { self.package_id().version() }
pub fn warnings(&self) -> &[DelayedWarning] { &self.warnings }
pub fn profiles(&self) -> &Profiles { &self.profiles }
pub fn publish(&self) -> &Option<Vec<String>> { &self.publish }
pub fn publish_lockfile(&self) -> bool { self.publish_lockfile }
pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { &self.replace }
pub fn original(&self) -> &TomlManifest { &self.original }
pub fn patch(&self) -> &HashMap<Url, Vec<Dependency>> { &self.patch }
pub fn dependencies(&self) -> &[Dependency] {
self.summary.dependencies()
}
pub fn exclude(&self) -> &[String] {
&self.exclude
}
pub fn include(&self) -> &[String] {
&self.include
}
pub fn metadata(&self) -> &ManifestMetadata {
&self.metadata
}
pub fn name(&self) -> InternedString {
self.package_id().name()
}
pub fn package_id(&self) -> &PackageId {
self.summary.package_id()
}
pub fn summary(&self) -> &Summary {
&self.summary
}
pub fn targets(&self) -> &[Target] {
&self.targets
}
pub fn version(&self) -> &Version {
self.package_id().version()
}
pub fn warnings(&self) -> &[DelayedWarning] {
&self.warnings
}
pub fn profiles(&self) -> &Profiles {
&self.profiles
}
pub fn publish(&self) -> &Option<Vec<String>> {
&self.publish
}
pub fn publish_lockfile(&self) -> bool {
self.publish_lockfile
}
pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] {
&self.replace
}
pub fn original(&self) -> &TomlManifest {
&self.original
}
pub fn patch(&self) -> &HashMap<Url, Vec<Dependency>> {
&self.patch
}
pub fn links(&self) -> Option<&str> {
self.links.as_ref().map(|s| &s[..])
}
@ -327,19 +348,24 @@ impl Manifest {
}
pub fn add_warning(&mut self, s: String) {
self.warnings.push(DelayedWarning { message: s, is_critical: false })
self.warnings.push(DelayedWarning {
message: s,
is_critical: false,
})
}
pub fn add_critical_warning(&mut self, s: String) {
self.warnings.push(DelayedWarning { message: s, is_critical: true })
self.warnings.push(DelayedWarning {
message: s,
is_critical: true,
})
}
pub fn set_summary(&mut self, summary: Summary) {
self.summary = summary;
}
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId)
-> Manifest {
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Manifest {
Manifest {
summary: self.summary.map_source(to_replace, replace_with),
..self
@ -348,10 +374,14 @@ impl Manifest {
pub fn feature_gate(&self) -> CargoResult<()> {
if self.im_a_teapot.is_some() {
self.features.require(Feature::test_dummy_unstable()).chain_err(|| {
format_err!("the `im-a-teapot` manifest key is unstable and may \
not work properly in England")
})?;
self.features
.require(Feature::test_dummy_unstable())
.chain_err(|| {
format_err!(
"the `im-a-teapot` manifest key is unstable and may \
not work properly in England"
)
})?;
}
Ok(())
@ -372,10 +402,12 @@ impl Manifest {
}
impl VirtualManifest {
pub fn new(replace: Vec<(PackageIdSpec, Dependency)>,
patch: HashMap<Url, Vec<Dependency>>,
workspace: WorkspaceConfig,
profiles: Profiles) -> VirtualManifest {
pub fn new(
replace: Vec<(PackageIdSpec, Dependency)>,
patch: HashMap<Url, Vec<Dependency>>,
workspace: WorkspaceConfig,
profiles: Profiles,
) -> VirtualManifest {
VirtualManifest {
replace,
patch,
@ -418,9 +450,7 @@ impl Target {
}
}
pub fn lib_target(name: &str,
crate_targets: Vec<LibKind>,
src_path: PathBuf) -> Target {
pub fn lib_target(name: &str, crate_targets: Vec<LibKind>, src_path: PathBuf) -> Target {
Target {
kind: TargetKind::Lib(crate_targets),
name: name.to_string(),
@ -430,8 +460,11 @@ impl Target {
}
}
pub fn bin_target(name: &str, src_path: PathBuf,
required_features: Option<Vec<String>>) -> Target {
pub fn bin_target(
name: &str,
src_path: PathBuf,
required_features: Option<Vec<String>>,
) -> Target {
Target {
kind: TargetKind::Bin,
name: name.to_string(),
@ -453,10 +486,12 @@ impl Target {
}
}
pub fn example_target(name: &str,
crate_targets: Vec<LibKind>,
src_path: PathBuf,
required_features: Option<Vec<String>>) -> Target {
pub fn example_target(
name: &str,
crate_targets: Vec<LibKind>,
src_path: PathBuf,
required_features: Option<Vec<String>>,
) -> Target {
let kind = if crate_targets.is_empty() {
TargetKind::ExampleBin
} else {
@ -472,8 +507,11 @@ impl Target {
}
}
pub fn test_target(name: &str, src_path: PathBuf,
required_features: Option<Vec<String>>) -> Target {
pub fn test_target(
name: &str,
src_path: PathBuf,
required_features: Option<Vec<String>>,
) -> Target {
Target {
kind: TargetKind::Test,
name: name.to_string(),
@ -483,8 +521,11 @@ impl Target {
}
}
pub fn bench_target(name: &str, src_path: PathBuf,
required_features: Option<Vec<String>>) -> Target {
pub fn bench_target(
name: &str,
src_path: PathBuf,
required_features: Option<Vec<String>>,
) -> Target {
Target {
kind: TargetKind::Bench,
name: name.to_string(),
@ -494,26 +535,42 @@ impl Target {
}
}
pub fn name(&self) -> &str { &self.name }
pub fn crate_name(&self) -> String { self.name.replace("-", "_") }
pub fn src_path(&self) -> &Path { &self.src_path.path }
pub fn required_features(&self) -> Option<&Vec<String>> { self.required_features.as_ref() }
pub fn kind(&self) -> &TargetKind { &self.kind }
pub fn tested(&self) -> bool { self.tested }
pub fn harness(&self) -> bool { self.harness }
pub fn documented(&self) -> bool { self.doc }
pub fn for_host(&self) -> bool { self.for_host }
pub fn benched(&self) -> bool { self.benched }
pub fn name(&self) -> &str {
&self.name
}
pub fn crate_name(&self) -> String {
self.name.replace("-", "_")
}
pub fn src_path(&self) -> &Path {
&self.src_path.path
}
pub fn required_features(&self) -> Option<&Vec<String>> {
self.required_features.as_ref()
}
pub fn kind(&self) -> &TargetKind {
&self.kind
}
pub fn tested(&self) -> bool {
self.tested
}
pub fn harness(&self) -> bool {
self.harness
}
pub fn documented(&self) -> bool {
self.doc
}
pub fn for_host(&self) -> bool {
self.for_host
}
pub fn benched(&self) -> bool {
self.benched
}
pub fn doctested(&self) -> bool {
self.doctest && match self.kind {
TargetKind::Lib(ref kinds) => {
kinds.iter().any(|k| {
*k == LibKind::Rlib ||
*k == LibKind::Lib ||
*k == LibKind::ProcMacro
})
}
TargetKind::Lib(ref kinds) => kinds
.iter()
.any(|k| *k == LibKind::Rlib || *k == LibKind::Lib || *k == LibKind::ProcMacro),
_ => false,
}
}
@ -525,46 +582,43 @@ impl Target {
pub fn is_lib(&self) -> bool {
match self.kind {
TargetKind::Lib(_) => true,
_ => false
_ => false,
}
}
pub fn is_dylib(&self) -> bool {
match self.kind {
TargetKind::Lib(ref libs) => libs.iter().any(|l| *l == LibKind::Dylib),
_ => false
_ => false,
}
}
pub fn is_cdylib(&self) -> bool {
let libs = match self.kind {
TargetKind::Lib(ref libs) => libs,
_ => return false
_ => return false,
};
libs.iter().any(|l| {
match *l {
LibKind::Other(ref s) => s == "cdylib",
_ => false,
}
libs.iter().any(|l| match *l {
LibKind::Other(ref s) => s == "cdylib",
_ => false,
})
}
pub fn linkable(&self) -> bool {
match self.kind {
TargetKind::Lib(ref kinds) => {
kinds.iter().any(|k| k.linkable())
}
_ => false
TargetKind::Lib(ref kinds) => kinds.iter().any(|k| k.linkable()),
_ => false,
}
}
pub fn is_bin(&self) -> bool { self.kind == TargetKind::Bin }
pub fn is_bin(&self) -> bool {
self.kind == TargetKind::Bin
}
pub fn is_example(&self) -> bool {
match self.kind {
TargetKind::ExampleBin |
TargetKind::ExampleLib(..) => true,
_ => false
TargetKind::ExampleBin | TargetKind::ExampleLib(..) => true,
_ => false,
}
}
@ -572,35 +626,39 @@ impl Target {
// Needed for --all-examples in contexts where only runnable examples make sense
match self.kind {
TargetKind::ExampleBin => true,
_ => false
_ => false,
}
}
pub fn is_test(&self) -> bool { self.kind == TargetKind::Test }
pub fn is_bench(&self) -> bool { self.kind == TargetKind::Bench }
pub fn is_custom_build(&self) -> bool { self.kind == TargetKind::CustomBuild }
pub fn is_test(&self) -> bool {
self.kind == TargetKind::Test
}
pub fn is_bench(&self) -> bool {
self.kind == TargetKind::Bench
}
pub fn is_custom_build(&self) -> bool {
self.kind == TargetKind::CustomBuild
}
/// Returns the arguments suitable for `--crate-type` to pass to rustc.
pub fn rustc_crate_types(&self) -> Vec<&str> {
match self.kind {
TargetKind::Lib(ref kinds) |
TargetKind::ExampleLib(ref kinds) => {
TargetKind::Lib(ref kinds) | TargetKind::ExampleLib(ref kinds) => {
kinds.iter().map(LibKind::crate_type).collect()
}
TargetKind::CustomBuild |
TargetKind::Bench |
TargetKind::Test |
TargetKind::ExampleBin |
TargetKind::Bin => vec!["bin"],
TargetKind::CustomBuild
| TargetKind::Bench
| TargetKind::Test
| TargetKind::ExampleBin
| TargetKind::Bin => vec!["bin"],
}
}
pub fn can_lto(&self) -> bool {
match self.kind {
TargetKind::Lib(ref v) => {
!v.contains(&LibKind::Rlib) &&
!v.contains(&LibKind::Dylib) &&
!v.contains(&LibKind::Lib)
!v.contains(&LibKind::Rlib) && !v.contains(&LibKind::Dylib)
&& !v.contains(&LibKind::Lib)
}
_ => true,
}
@ -639,8 +697,9 @@ impl fmt::Display for Target {
TargetKind::Bin => write!(f, "Target(bin: {})", self.name),
TargetKind::Test => write!(f, "Target(test: {})", self.name),
TargetKind::Bench => write!(f, "Target(bench: {})", self.name),
TargetKind::ExampleBin |
TargetKind::ExampleLib(..) => write!(f, "Target(example: {})", self.name),
TargetKind::ExampleBin | TargetKind::ExampleLib(..) => {
write!(f, "Target(example: {})", self.name)
}
TargetKind::CustomBuild => write!(f, "Target(script)"),
}
}
@ -752,6 +811,5 @@ impl fmt::Display for Profile {
} else {
write!(f, "Profile(build)")
}
}
}

View File

@ -1,14 +1,14 @@
pub use self::dependency::Dependency;
pub use self::features::{Epoch, Features, Feature, CliUnstable};
pub use self::features::{CliUnstable, Epoch, Feature, Features};
pub use self::manifest::{EitherManifest, VirtualManifest};
pub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles};
pub use self::manifest::{LibKind, Manifest, Profile, Profiles, Target, TargetKind};
pub use self::package::{Package, PackageSet};
pub use self::package_id::PackageId;
pub use self::package_id_spec::PackageIdSpec;
pub use self::registry::Registry;
pub use self::resolver::Resolve;
pub use self::shell::{Shell, Verbosity};
pub use self::source::{Source, SourceId, SourceMap, GitReference};
pub use self::source::{GitReference, Source, SourceId, SourceMap};
pub use self::summary::Summary;
pub use self::workspace::{Members, Workspace, WorkspaceConfig, WorkspaceRootConfig};

View File

@ -1,5 +1,5 @@
use std::cell::{Ref, RefCell};
use std::collections::{HashMap, BTreeMap};
use std::collections::{BTreeMap, HashMap};
use std::fmt;
use std::hash;
use std::path::{Path, PathBuf};
@ -10,10 +10,10 @@ use toml;
use lazycell::LazyCell;
use core::{Dependency, Manifest, PackageId, SourceId, Target};
use core::{Summary, SourceMap};
use core::{SourceMap, Summary};
use core::interning::InternedString;
use ops;
use util::{Config, internal, lev_distance};
use util::{internal, lev_distance, Config};
use util::errors::{CargoResult, CargoResultExt};
/// Information about a package that is available somewhere in the file system.
@ -46,7 +46,8 @@ struct SerializedPackage<'a> {
impl ser::Serialize for Package {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer,
where
S: ser::Serializer,
{
let summary = self.manifest.summary();
let package_id = summary.package_id();
@ -73,8 +74,7 @@ impl ser::Serialize for Package {
impl Package {
/// Create a package from a manifest and its location
pub fn new(manifest: Manifest,
manifest_path: &Path) -> Package {
pub fn new(manifest: Manifest, manifest_path: &Path) -> Package {
Package {
manifest,
manifest_path: manifest_path.to_path_buf(),
@ -90,46 +90,71 @@ impl Package {
}
/// Get the manifest dependencies
pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() }
pub fn dependencies(&self) -> &[Dependency] {
self.manifest.dependencies()
}
/// Get the manifest
pub fn manifest(&self) -> &Manifest { &self.manifest }
pub fn manifest(&self) -> &Manifest {
&self.manifest
}
/// Get the path to the manifest
pub fn manifest_path(&self) -> &Path { &self.manifest_path }
pub fn manifest_path(&self) -> &Path {
&self.manifest_path
}
/// Get the name of the package
pub fn name(&self) -> InternedString { self.package_id().name() }
pub fn name(&self) -> InternedString {
self.package_id().name()
}
/// Get the PackageId object for the package (fully defines a package)
pub fn package_id(&self) -> &PackageId { self.manifest.package_id() }
pub fn package_id(&self) -> &PackageId {
self.manifest.package_id()
}
/// Get the root folder of the package
pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() }
pub fn root(&self) -> &Path {
self.manifest_path.parent().unwrap()
}
/// Get the summary for the package
pub fn summary(&self) -> &Summary { self.manifest.summary() }
pub fn summary(&self) -> &Summary {
self.manifest.summary()
}
/// Get the targets specified in the manifest
pub fn targets(&self) -> &[Target] { self.manifest.targets() }
pub fn targets(&self) -> &[Target] {
self.manifest.targets()
}
/// Get the current package version
pub fn version(&self) -> &Version { self.package_id().version() }
pub fn version(&self) -> &Version {
self.package_id().version()
}
/// Get the package authors
pub fn authors(&self) -> &Vec<String> { &self.manifest.metadata().authors }
pub fn authors(&self) -> &Vec<String> {
&self.manifest.metadata().authors
}
/// Whether the package is set to publish
pub fn publish(&self) -> &Option<Vec<String>> { self.manifest.publish() }
pub fn publish(&self) -> &Option<Vec<String>> {
self.manifest.publish()
}
/// Whether the package uses a custom build script for any target
pub fn has_custom_build(&self) -> bool {
self.targets().iter().any(|t| t.is_custom_build())
}
pub fn find_closest_target(&self,
target: &str,
is_expected_kind: fn(&Target)-> bool) -> Option<&Target> {
pub fn find_closest_target(
&self,
target: &str,
is_expected_kind: fn(&Target) -> bool,
) -> Option<&Target> {
let targets = self.targets();
let matches = targets.iter().filter(|t| is_expected_kind(t))
.map(|t| (lev_distance(target, t.name()), t))
.filter(|&(d, _)| d < 4);
let matches = targets
.iter()
.filter(|t| is_expected_kind(t))
.map(|t| (lev_distance(target, t.name()), t))
.filter(|&(d, _)| d < 4);
matches.min_by_key(|t| t.0).map(|t| t.1)
}
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId)
-> Package {
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Package {
Package {
manifest: self.manifest.map_source(to_replace, replace_with),
manifest_path: self.manifest_path,
@ -139,21 +164,24 @@ impl Package {
pub fn to_registry_toml(&self, config: &Config) -> CargoResult<String> {
let manifest = self.manifest().original().prepare_for_publish(config)?;
let toml = toml::to_string(&manifest)?;
Ok(format!("\
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO\n\
#\n\
# When uploading crates to the registry Cargo will automatically\n\
# \"normalize\" Cargo.toml files for maximal compatibility\n\
# with all versions of Cargo and also rewrite `path` dependencies\n\
# to registry (e.g. crates.io) dependencies\n\
#\n\
# If you believe there's an error in this file please file an\n\
# issue against the rust-lang/cargo repository. If you're\n\
# editing this file be aware that the upstream Cargo.toml\n\
# will likely look very different (and much more reasonable)\n\
\n\
{}\
", toml))
Ok(format!(
"\
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO\n\
#\n\
# When uploading crates to the registry Cargo will automatically\n\
# \"normalize\" Cargo.toml files for maximal compatibility\n\
# with all versions of Cargo and also rewrite `path` dependencies\n\
# to registry (e.g. crates.io) dependencies\n\
#\n\
# If you believe there's an error in this file please file an\n\
# issue against the rust-lang/cargo repository. If you're\n\
# editing this file be aware that the upstream Cargo.toml\n\
# will likely look very different (and much more reasonable)\n\
\n\
{}\
",
toml
))
}
}
@ -183,34 +211,34 @@ pub struct PackageSet<'cfg> {
}
impl<'cfg> PackageSet<'cfg> {
pub fn new(package_ids: &[PackageId],
sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
pub fn new(package_ids: &[PackageId], sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
PackageSet {
packages: package_ids.iter().map(|id| {
(id.clone(), LazyCell::new())
}).collect(),
packages: package_ids
.iter()
.map(|id| (id.clone(), LazyCell::new()))
.collect(),
sources: RefCell::new(sources),
}
}
pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item=&'a PackageId> + 'a> {
pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item = &'a PackageId> + 'a> {
Box::new(self.packages.keys())
}
pub fn get(&self, id: &PackageId) -> CargoResult<&Package> {
let slot = self.packages.get(id).ok_or_else(|| {
internal(format!("couldn't find `{}` in package set", id))
})?;
let slot = self.packages
.get(id)
.ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?;
if let Some(pkg) = slot.borrow() {
return Ok(pkg)
return Ok(pkg);
}
let mut sources = self.sources.borrow_mut();
let source = sources.get_mut(id.source_id()).ok_or_else(|| {
internal(format!("couldn't find source for `{}`", id))
})?;
let pkg = source.download(id).chain_err(|| {
format_err!("unable to get packages from source")
})?;
let source = sources
.get_mut(id.source_id())
.ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
let pkg = source
.download(id)
.chain_err(|| format_err!("unable to get packages from source"))?;
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
}

View File

@ -28,18 +28,22 @@ struct PackageIdInner {
impl ser::Serialize for PackageId {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer
where
S: ser::Serializer,
{
s.collect_str(&format_args!("{} {} ({})",
self.inner.name,
self.inner.version,
self.inner.source_id.to_url()))
s.collect_str(&format_args!(
"{} {} ({})",
self.inner.name,
self.inner.version,
self.inner.source_id.to_url()
))
}
}
impl<'de> de::Deserialize<'de> for PackageId {
fn deserialize<D>(d: D) -> Result<PackageId, D::Error>
where D: de::Deserializer<'de>
where
D: de::Deserializer<'de>,
{
let string = String::deserialize(d)?;
let mut s = string.splitn(3, ' ');
@ -48,8 +52,7 @@ impl<'de> de::Deserialize<'de> for PackageId {
Some(s) => s,
None => return Err(de::Error::custom("invalid serialized PackageId")),
};
let version = semver::Version::parse(version)
.map_err(de::Error::custom)?;
let version = semver::Version::parse(version).map_err(de::Error::custom)?;
let url = match s.next() {
Some(s) => s,
None => return Err(de::Error::custom("invalid serialized PackageId")),
@ -57,8 +60,7 @@ impl<'de> de::Deserialize<'de> for PackageId {
let url = if url.starts_with('(') && url.ends_with(')') {
&url[1..url.len() - 1]
} else {
return Err(de::Error::custom("invalid serialized PackageId"))
return Err(de::Error::custom("invalid serialized PackageId"));
};
let source_id = SourceId::from_url(url).map_err(de::Error::custom)?;
@ -98,8 +100,7 @@ impl Ord for PackageId {
}
impl PackageId {
pub fn new<T: ToSemver>(name: &str, version: T,
sid: &SourceId) -> CargoResult<PackageId> {
pub fn new<T: ToSemver>(name: &str, version: T, sid: &SourceId) -> CargoResult<PackageId> {
let v = version.to_semver()?;
Ok(PackageId {
inner: Arc::new(PackageIdInner {
@ -110,9 +111,15 @@ impl PackageId {
})
}
pub fn name(&self) -> InternedString { self.inner.name }
pub fn version(&self) -> &semver::Version { &self.inner.version }
pub fn source_id(&self) -> &SourceId { &self.inner.source_id }
pub fn name(&self) -> InternedString {
self.inner.name
}
pub fn version(&self) -> &semver::Version {
&self.inner.version
}
pub fn source_id(&self) -> &SourceId {
&self.inner.source_id
}
pub fn with_precise(&self, precise: Option<String>) -> PackageId {
PackageId {
@ -164,10 +171,10 @@ impl fmt::Display for PackageId {
impl fmt::Debug for PackageId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_struct("PackageId")
.field("name", &self.inner.name)
.field("version", &self.inner.version.to_string())
.field("source", &self.inner.source_id.to_string())
.finish()
.field("name", &self.inner.name)
.field("version", &self.inner.version.to_string())
.field("source", &self.inner.source_id.to_string())
.finish()
}
}

View File

@ -5,7 +5,7 @@ use semver::Version;
use url::Url;
use core::PackageId;
use util::{ToUrl, ToSemver};
use util::{ToSemver, ToUrl};
use util::errors::{CargoResult, CargoResultExt};
#[derive(Clone, PartialEq, Eq, Debug)]
@ -46,11 +46,11 @@ impl PackageIdSpec {
}
pub fn query_str<'a, I>(spec: &str, i: I) -> CargoResult<&'a PackageId>
where I: IntoIterator<Item=&'a PackageId>
where
I: IntoIterator<Item = &'a PackageId>,
{
let spec = PackageIdSpec::parse(spec).chain_err(|| {
format_err!("invalid package id specification: `{}`", spec)
})?;
let spec = PackageIdSpec::parse(spec)
.chain_err(|| format_err!("invalid package id specification: `{}`", spec))?;
spec.query(i)
}
@ -69,12 +69,14 @@ impl PackageIdSpec {
let frag = url.fragment().map(|s| s.to_owned());
url.set_fragment(None);
let (name, version) = {
let mut path = url.path_segments().ok_or_else(|| {
format_err!("pkgid urls must have a path: {}", url)
})?;
let mut path = url.path_segments()
.ok_or_else(|| format_err!("pkgid urls must have a path: {}", url))?;
let path_name = path.next_back().ok_or_else(|| {
format_err!("pkgid urls must have at least one path \
component: {}", url)
format_err!(
"pkgid urls must have at least one path \
component: {}",
url
)
})?;
match frag {
Some(fragment) => {
@ -86,8 +88,7 @@ impl PackageIdSpec {
(name_or_version.to_string(), Some(version))
}
None => {
if name_or_version.chars().next().unwrap()
.is_alphabetic() {
if name_or_version.chars().next().unwrap().is_alphabetic() {
(name_or_version.to_string(), None)
} else {
let version = name_or_version.to_semver()?;
@ -106,16 +107,24 @@ impl PackageIdSpec {
})
}
pub fn name(&self) -> &str { &self.name }
pub fn version(&self) -> Option<&Version> { self.version.as_ref() }
pub fn url(&self) -> Option<&Url> { self.url.as_ref() }
pub fn name(&self) -> &str {
&self.name
}
pub fn version(&self) -> Option<&Version> {
self.version.as_ref()
}
pub fn url(&self) -> Option<&Url> {
self.url.as_ref()
}
pub fn set_url(&mut self, url: Url) {
self.url = Some(url);
}
pub fn matches(&self, package_id: &PackageId) -> bool {
if self.name() != &*package_id.name() { return false }
if self.name() != &*package_id.name() {
return false;
}
if let Some(ref v) = self.version {
if v != package_id.version() {
@ -125,50 +134,53 @@ impl PackageIdSpec {
match self.url {
Some(ref u) => u == package_id.source_id().url(),
None => true
None => true,
}
}
pub fn query<'a, I>(&self, i: I) -> CargoResult<&'a PackageId>
where I: IntoIterator<Item=&'a PackageId>
where
I: IntoIterator<Item = &'a PackageId>,
{
let mut ids = i.into_iter().filter(|p| self.matches(*p));
let ret = match ids.next() {
Some(id) => id,
None => bail!("package id specification `{}` \
matched no packages", self),
None => bail!(
"package id specification `{}` \
matched no packages",
self
),
};
return match ids.next() {
Some(other) => {
let mut msg = format!("There are multiple `{}` packages in \
your project, and the specification \
`{}` is ambiguous.\n\
Please re-run this command \
with `-p <spec>` where `<spec>` is one \
of the following:",
self.name(), self);
let mut msg = format!(
"There are multiple `{}` packages in \
your project, and the specification \
`{}` is ambiguous.\n\
Please re-run this command \
with `-p <spec>` where `<spec>` is one \
of the following:",
self.name(),
self
);
let mut vec = vec![ret, other];
vec.extend(ids);
minimize(&mut msg, &vec, self);
Err(format_err!("{}", msg))
}
None => Ok(ret)
None => Ok(ret),
};
fn minimize(msg: &mut String,
ids: &[&PackageId],
spec: &PackageIdSpec) {
fn minimize(msg: &mut String, ids: &[&PackageId], spec: &PackageIdSpec) {
let mut version_cnt = HashMap::new();
for id in ids {
*version_cnt.entry(id.version()).or_insert(0) += 1;
}
for id in ids {
if version_cnt[id.version()] == 1 {
msg.push_str(&format!("\n {}:{}", spec.name(),
id.version()));
msg.push_str(&format!("\n {}:{}", spec.name(), id.version()));
} else {
msg.push_str(&format!("\n {}",
PackageIdSpec::from_package_id(*id)));
msg.push_str(&format!("\n {}", PackageIdSpec::from_package_id(*id)));
}
}
}
@ -190,10 +202,13 @@ impl fmt::Display for PackageIdSpec {
write!(f, "#{}", self.name)?;
}
}
None => { printed_name = true; write!(f, "{}", self.name)? }
None => {
printed_name = true;
write!(f, "{}", self.name)?
}
}
if let Some(ref v) = self.version {
write!(f, "{}{}", if printed_name {":"} else {"#"}, v)?;
write!(f, "{}{}", if printed_name { ":" } else { "#" }, v)?;
}
Ok(())
}
@ -214,46 +229,70 @@ mod tests {
assert_eq!(parsed.to_string(), spec);
}
ok("http://crates.io/foo#1.2.3", PackageIdSpec {
name: "foo".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: Some(Url::parse("http://crates.io/foo").unwrap()),
});
ok("http://crates.io/foo#bar:1.2.3", PackageIdSpec {
name: "bar".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: Some(Url::parse("http://crates.io/foo").unwrap()),
});
ok("crates.io/foo", PackageIdSpec {
name: "foo".to_string(),
version: None,
url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
});
ok("crates.io/foo#1.2.3", PackageIdSpec {
name: "foo".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
});
ok("crates.io/foo#bar", PackageIdSpec {
name: "bar".to_string(),
version: None,
url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
});
ok("crates.io/foo#bar:1.2.3", PackageIdSpec {
name: "bar".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
});
ok("foo", PackageIdSpec {
name: "foo".to_string(),
version: None,
url: None,
});
ok("foo:1.2.3", PackageIdSpec {
name: "foo".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: None,
});
ok(
"http://crates.io/foo#1.2.3",
PackageIdSpec {
name: "foo".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: Some(Url::parse("http://crates.io/foo").unwrap()),
},
);
ok(
"http://crates.io/foo#bar:1.2.3",
PackageIdSpec {
name: "bar".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: Some(Url::parse("http://crates.io/foo").unwrap()),
},
);
ok(
"crates.io/foo",
PackageIdSpec {
name: "foo".to_string(),
version: None,
url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
},
);
ok(
"crates.io/foo#1.2.3",
PackageIdSpec {
name: "foo".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
},
);
ok(
"crates.io/foo#bar",
PackageIdSpec {
name: "bar".to_string(),
version: None,
url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
},
);
ok(
"crates.io/foo#bar:1.2.3",
PackageIdSpec {
name: "bar".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
},
);
ok(
"foo",
PackageIdSpec {
name: "foo".to_string(),
version: None,
url: None,
},
);
ok(
"foo:1.2.3",
PackageIdSpec {
name: "foo".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: None,
},
);
}
#[test]
@ -272,9 +311,9 @@ mod tests {
let foo = PackageId::new("foo", "1.2.3", &sid).unwrap();
let bar = PackageId::new("bar", "1.2.3", &sid).unwrap();
assert!( PackageIdSpec::parse("foo").unwrap().matches(&foo));
assert!(PackageIdSpec::parse("foo").unwrap().matches(&foo));
assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar));
assert!( PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo));
assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo));
assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(&foo));
}
}

View File

@ -3,9 +3,9 @@ use std::collections::HashMap;
use semver::VersionReq;
use url::Url;
use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId};
use core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary};
use core::PackageSet;
use util::{Config, profile};
use util::{profile, Config};
use util::errors::{CargoResult, CargoResultExt};
use sources::config::SourceConfigMap;
@ -14,9 +14,7 @@ use sources::config::SourceConfigMap;
/// See also `core::Source`.
pub trait Registry {
/// Attempt to find the packages that match a dependency request.
fn query(&mut self,
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()>;
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()>;
fn query_vec(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
let mut ret = Vec::new();
@ -34,9 +32,7 @@ pub trait Registry {
}
impl<'a, T: ?Sized + Registry + 'a> Registry for Box<T> {
fn query(&mut self,
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
(**self).query(dep, f)
}
@ -130,14 +126,14 @@ impl<'cfg> PackageRegistry<'cfg> {
// slightly different precise version listed.
Some(&(_, Kind::Locked)) => {
debug!("load/locked {}", namespace);
return Ok(())
return Ok(());
}
// If the previous source was not a precise source, then we can be
// sure that it's already been updated if we've already loaded it.
Some(&(ref previous, _)) if previous.precise().is_none() => {
debug!("load/precise {}", namespace);
return Ok(())
return Ok(());
}
// If the previous source has the same precise version as we do,
@ -146,7 +142,7 @@ impl<'cfg> PackageRegistry<'cfg> {
Some(&(ref previous, _)) => {
if previous.precise() == namespace.precise() {
debug!("load/match {}", namespace);
return Ok(())
return Ok(());
}
debug!("load/mismatch {}", namespace);
}
@ -186,10 +182,12 @@ impl<'cfg> PackageRegistry<'cfg> {
for dep in deps.iter() {
trace!("\t-> {}", dep);
}
let sub_map = self.locked.entry(id.source_id().clone())
.or_insert_with(HashMap::new);
let sub_vec = sub_map.entry(id.name().to_string())
.or_insert_with(Vec::new);
let sub_map = self.locked
.entry(id.source_id().clone())
.or_insert_with(HashMap::new);
let sub_vec = sub_map
.entry(id.name().to_string())
.or_insert_with(Vec::new);
sub_vec.push((id, deps));
}
@ -219,53 +217,65 @@ impl<'cfg> PackageRegistry<'cfg> {
// Remember that each dependency listed in `[patch]` has to resolve to
// precisely one package, so that's why we're just creating a flat list
// of summaries which should be the same length as `deps` above.
let unlocked_summaries = deps.iter().map(|dep| {
debug!("registring a patch for `{}` with `{}`",
url,
dep.name());
let unlocked_summaries = deps.iter()
.map(|dep| {
debug!("registring a patch for `{}` with `{}`", url, dep.name());
// Go straight to the source for resolving `dep`. Load it as we
// normally would and then ask it directly for the list of summaries
// corresponding to this `dep`.
self.ensure_loaded(dep.source_id(), Kind::Normal).chain_err(|| {
format_err!("failed to load source for a dependency \
on `{}`", dep.name())
})?;
// Go straight to the source for resolving `dep`. Load it as we
// normally would and then ask it directly for the list of summaries
// corresponding to this `dep`.
self.ensure_loaded(dep.source_id(), Kind::Normal)
.chain_err(|| {
format_err!(
"failed to load source for a dependency \
on `{}`",
dep.name()
)
})?;
let mut summaries = self.sources.get_mut(dep.source_id())
.expect("loaded source not present")
.query_vec(dep)?
.into_iter();
let mut summaries = self.sources
.get_mut(dep.source_id())
.expect("loaded source not present")
.query_vec(dep)?
.into_iter();
let summary = match summaries.next() {
Some(summary) => summary,
None => {
bail!("patch for `{}` in `{}` did not resolve to any crates. If this is \
unexpected, you may wish to consult: \
https://github.com/rust-lang/cargo/issues/4678",
dep.name(), url)
let summary = match summaries.next() {
Some(summary) => summary,
None => bail!(
"patch for `{}` in `{}` did not resolve to any crates. If this is \
unexpected, you may wish to consult: \
https://github.com/rust-lang/cargo/issues/4678",
dep.name(),
url
),
};
if summaries.next().is_some() {
bail!(
"patch for `{}` in `{}` resolved to more than one candidate",
dep.name(),
url
)
}
};
if summaries.next().is_some() {
bail!("patch for `{}` in `{}` resolved to more than one candidate",
dep.name(), url)
}
if summary.package_id().source_id().url() == url {
bail!("patch for `{}` in `{}` points to the same source, but \
patches must point to different sources",
dep.name(), url);
}
Ok(summary)
}).collect::<CargoResult<Vec<_>>>().chain_err(|| {
format_err!("failed to resolve patches for `{}`", url)
})?;
if summary.package_id().source_id().url() == url {
bail!(
"patch for `{}` in `{}` points to the same source, but \
patches must point to different sources",
dep.name(),
url
);
}
Ok(summary)
})
.collect::<CargoResult<Vec<_>>>()
.chain_err(|| format_err!("failed to resolve patches for `{}`", url))?;
// Note that we do not use `lock` here to lock summaries! That step
// happens later once `lock_patches` is invoked. In the meantime though
// we want to fill in the `patches_available` map (later used in the
// `lock` method) and otherwise store the unlocked summaries in
// `patches` to get locked in a future call to `lock_patches`.
let ids = unlocked_summaries.iter()
let ids = unlocked_summaries
.iter()
.map(|s| s.package_id())
.cloned()
.collect();
@ -309,18 +319,18 @@ impl<'cfg> PackageRegistry<'cfg> {
// Ensure the source has fetched all necessary remote data.
let _p = profile::start(format!("updating: {}", source_id));
self.sources.get_mut(source_id).unwrap().update()
})().chain_err(|| format_err!("Unable to update {}", source_id))?;
})()
.chain_err(|| format_err!("Unable to update {}", source_id))?;
Ok(())
}
fn query_overrides(&mut self, dep: &Dependency)
-> CargoResult<Option<Summary>> {
fn query_overrides(&mut self, dep: &Dependency) -> CargoResult<Option<Summary>> {
for s in self.overrides.iter() {
let src = self.sources.get_mut(s).unwrap();
let dep = Dependency::new_override(&*dep.name(), s);
let mut results = src.query_vec(&dep)?;
if !results.is_empty() {
return Ok(Some(results.remove(0)))
return Ok(Some(results.remove(0)));
}
}
Ok(None)
@ -348,9 +358,11 @@ impl<'cfg> PackageRegistry<'cfg> {
lock(&self.locked, &self.patches_available, summary)
}
fn warn_bad_override(&self,
override_summary: &Summary,
real_summary: &Summary) -> CargoResult<()> {
fn warn_bad_override(
&self,
override_summary: &Summary,
real_summary: &Summary,
) -> CargoResult<()> {
let mut real_deps = real_summary.dependencies().iter().collect::<Vec<_>>();
let boilerplate = "\
@ -369,24 +381,34 @@ http://doc.crates.io/specifying-dependencies.html#overriding-dependencies
for dep in override_summary.dependencies() {
if let Some(i) = real_deps.iter().position(|d| dep == *d) {
real_deps.remove(i);
continue
continue;
}
let msg = format!("\
path override for crate `{}` has altered the original list of\n\
dependencies; the dependency on `{}` was either added or\n\
modified to not match the previously resolved version\n\n\
{}", override_summary.package_id().name(), dep.name(), boilerplate);
let msg = format!(
"\
path override for crate `{}` has altered the original list of\n\
dependencies; the dependency on `{}` was either added or\n\
modified to not match the previously resolved version\n\n\
{}",
override_summary.package_id().name(),
dep.name(),
boilerplate
);
self.source_config.config().shell().warn(&msg)?;
return Ok(())
return Ok(());
}
if let Some(id) = real_deps.get(0) {
let msg = format!("\
let msg = format!(
"\
path override for crate `{}` has altered the original list of
dependencies; the dependency on `{}` was removed\n\n
{}", override_summary.package_id().name(), id.name(), boilerplate);
{}",
override_summary.package_id().name(),
id.name(),
boilerplate
);
self.source_config.config().shell().warn(&msg)?;
return Ok(())
return Ok(());
}
Ok(())
@ -394,9 +416,7 @@ http://doc.crates.io/specifying-dependencies.html#overriding-dependencies
}
impl<'cfg> Registry for PackageRegistry<'cfg> {
fn query(&mut self,
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
assert!(self.patches_locked);
let (override_summary, n, to_warn) = {
// Look for an override and get ready to query the real source.
@ -411,9 +431,12 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
// what we really care about is the name/version match.
let mut patches = Vec::<Summary>::new();
if let Some(extra) = self.patches.get(dep.source_id().url()) {
patches.extend(extra.iter().filter(|s| {
dep.matches_ignoring_source(s)
}).cloned());
patches.extend(
extra
.iter()
.filter(|s| dep.matches_ignoring_source(s))
.cloned(),
);
}
// A crucial feature of the `[patch]` feature is that we *don't*
@ -427,24 +450,31 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
Some(summary) => (summary, 1, Some(patch)),
None => {
f(patch);
return Ok(())
return Ok(());
}
}
} else {
if !patches.is_empty() {
debug!("found {} patches with an unlocked dep on `{}` at {} \
with `{}`, \
looking at sources", patches.len(),
dep.name(),
dep.source_id(),
dep.version_req());
debug!(
"found {} patches with an unlocked dep on `{}` at {} \
with `{}`, \
looking at sources",
patches.len(),
dep.name(),
dep.source_id(),
dep.version_req()
);
}
// Ensure the requested source_id is loaded
self.ensure_loaded(dep.source_id(), Kind::Normal).chain_err(|| {
format_err!("failed to load source for a dependency \
on `{}`", dep.name())
})?;
self.ensure_loaded(dep.source_id(), Kind::Normal)
.chain_err(|| {
format_err!(
"failed to load source for a dependency \
on `{}`",
dep.name()
)
})?;
let source = self.sources.get_mut(dep.source_id());
match (override_summary, source) {
@ -471,11 +501,11 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
for patch in patches.iter() {
let patch = patch.package_id().version();
if summary.package_id().version() == patch {
return
return;
}
}
f(lock(locked, all_patches, summary))
})
});
}
// If we have an override summary then we query the source
@ -515,14 +545,11 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
}
}
fn lock(locked: &LockedMap,
patches: &HashMap<Url, Vec<PackageId>>,
summary: Summary) -> Summary {
let pair = locked.get(summary.source_id()).and_then(|map| {
map.get(&*summary.name())
}).and_then(|vec| {
vec.iter().find(|&&(ref id, _)| id == summary.package_id())
});
fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Summary) -> Summary {
let pair = locked
.get(summary.source_id())
.and_then(|map| map.get(&*summary.name()))
.and_then(|vec| vec.iter().find(|&&(ref id, _)| id == summary.package_id()));
trace!("locking summary of {}", summary.package_id());
@ -532,8 +559,7 @@ fn lock(locked: &LockedMap,
None => summary,
};
summary.map_dependencies(|dep| {
trace!("\t{}/{}/{}", dep.name(), dep.version_req(),
dep.source_id());
trace!("\t{}/{}/{}", dep.name(), dep.version_req(), dep.source_id());
// If we've got a known set of overrides for this summary, then
// one of a few cases can arise:
@ -560,23 +586,22 @@ fn lock(locked: &LockedMap,
trace!("\tfirst hit on {}", locked);
let mut dep = dep.clone();
dep.lock_to(locked);
return dep
return dep;
}
}
// If this dependency did not have a locked version, then we query
// all known locked packages to see if they match this dependency.
// If anything does then we lock it to that and move on.
let v = locked.get(dep.source_id()).and_then(|map| {
map.get(&*dep.name())
}).and_then(|vec| {
vec.iter().find(|&&(ref id, _)| dep.matches_id(id))
});
let v = locked
.get(dep.source_id())
.and_then(|map| map.get(&*dep.name()))
.and_then(|vec| vec.iter().find(|&&(ref id, _)| dep.matches_id(id)));
if let Some(&(ref id, _)) = v {
trace!("\tsecond hit on {}", id);
let mut dep = dep.clone();
dep.lock_to(id);
return dep
return dep;
}
// Finally we check to see if any registered patches correspond to
@ -584,26 +609,25 @@ fn lock(locked: &LockedMap,
let v = patches.get(dep.source_id().url()).map(|vec| {
let dep2 = dep.clone();
let mut iter = vec.iter().filter(move |p| {
dep2.name() == p.name() &&
dep2.version_req().matches(p.version())
dep2.name() == p.name() && dep2.version_req().matches(p.version())
});
(iter.next(), iter)
});
if let Some((Some(patch_id), mut remaining)) = v {
assert!(remaining.next().is_none());
let patch_source = patch_id.source_id();
let patch_locked = locked.get(patch_source).and_then(|m| {
m.get(&*patch_id.name())
}).map(|list| {
list.iter().any(|&(ref id, _)| id == patch_id)
}).unwrap_or(false);
let patch_locked = locked
.get(patch_source)
.and_then(|m| m.get(&*patch_id.name()))
.map(|list| list.iter().any(|&(ref id, _)| id == patch_id))
.unwrap_or(false);
if patch_locked {
trace!("\tthird hit on {}", patch_id);
let req = VersionReq::exact(patch_id.version());
let mut dep = dep.clone();
dep.set_version_req(req);
return dep
return dep;
}
}
@ -614,17 +638,20 @@ fn lock(locked: &LockedMap,
#[cfg(test)]
pub mod test {
use core::{Summary, Registry, Dependency};
use core::{Dependency, Registry, Summary};
use util::CargoResult;
pub struct RegistryBuilder {
summaries: Vec<Summary>,
overrides: Vec<Summary>
overrides: Vec<Summary>,
}
impl RegistryBuilder {
pub fn new() -> RegistryBuilder {
RegistryBuilder { summaries: vec![], overrides: vec![] }
RegistryBuilder {
summaries: vec![],
overrides: vec![],
}
}
pub fn summary(mut self, summary: Summary) -> RegistryBuilder {
@ -648,7 +675,8 @@ pub mod test {
}
fn query_overrides(&self, dep: &Dependency) -> Vec<Summary> {
self.overrides.iter()
self.overrides
.iter()
.filter(|s| s.name() == dep.name())
.map(|s| s.clone())
.collect()
@ -656,9 +684,7 @@ pub mod test {
}
impl Registry for RegistryBuilder {
fn query(&mut self,
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
debug!("querying; dep={:?}", dep);
let overrides = self.query_overrides(dep);

View File

@ -1,13 +1,13 @@
use std::collections::{HashMap, HashSet, BTreeMap};
use std::collections::{BTreeMap, HashMap, HashSet};
use std::fmt;
use std::str::FromStr;
use serde::ser;
use serde::de;
use core::{Package, PackageId, SourceId, Workspace, Dependency};
use util::{Graph, Config, internal};
use util::errors::{CargoResult, CargoResultExt, CargoError};
use core::{Dependency, Package, PackageId, SourceId, Workspace};
use util::{internal, Config, Graph};
use util::errors::{CargoError, CargoResult, CargoResultExt};
use super::Resolve;
@ -18,8 +18,7 @@ pub struct EncodableResolve {
root: Option<EncodableDependency>,
metadata: Option<Metadata>,
#[serde(default, skip_serializing_if = "Patch::is_empty")]
patch: Patch,
#[serde(default, skip_serializing_if = "Patch::is_empty")] patch: Patch,
}
#[derive(Serialize, Deserialize, Debug, Default)]
@ -54,19 +53,19 @@ impl EncodableResolve {
};
if !all_pkgs.insert(enc_id.clone()) {
return Err(internal(format!("package `{}` is specified twice in the lockfile",
pkg.name)));
return Err(internal(format!(
"package `{}` is specified twice in the lockfile",
pkg.name
)));
}
let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) {
// We failed to find a local package in the workspace.
// It must have been removed and should be ignored.
None => {
debug!("path dependency now missing {} v{}",
pkg.name,
pkg.version);
continue
debug!("path dependency now missing {} v{}", pkg.name, pkg.version);
continue;
}
Some(source) => PackageId::new(&pkg.name, &pkg.version, source)?
Some(source) => PackageId::new(&pkg.name, &pkg.version, source)?,
};
assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none())
@ -82,9 +81,12 @@ impl EncodableResolve {
// no longer a member of the workspace.
Ok(None)
} else {
Err(internal(format!("package `{}` is specified as a dependency, \
but is missing from the package list", enc_id)))
}
Err(internal(format!(
"package `{}` is specified as a dependency, \
but is missing from the package list",
enc_id
)))
},
}
};
@ -98,7 +100,7 @@ impl EncodableResolve {
for &(ref id, pkg) in live_pkgs.values() {
let deps = match pkg.dependencies {
Some(ref deps) => deps,
None => continue
None => continue,
};
for edge in deps.iter() {
@ -146,9 +148,8 @@ impl EncodableResolve {
for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) {
to_remove.push(k.to_string());
let k = &k[prefix.len()..];
let enc_id: EncodablePackageId = k.parse().chain_err(|| {
internal("invalid encoding of checksum in lockfile")
})?;
let enc_id: EncodablePackageId = k.parse()
.chain_err(|| internal("invalid encoding of checksum in lockfile"))?;
let id = match lookup_id(&enc_id) {
Ok(Some(id)) => id,
_ => continue,
@ -192,21 +193,23 @@ fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
// such as `cargo install` with a lock file from a remote dependency. In
// that case we don't need to fixup any path dependencies (as they're not
// actually path dependencies any more), so we ignore them.
let members = ws.members().filter(|p| {
p.package_id().source_id().is_path()
}).collect::<Vec<_>>();
let members = ws.members()
.filter(|p| p.package_id().source_id().is_path())
.collect::<Vec<_>>();
let mut ret = HashMap::new();
let mut visited = HashSet::new();
for member in members.iter() {
ret.insert(member.package_id().name().to_string(),
member.package_id().source_id().clone());
ret.insert(
member.package_id().name().to_string(),
member.package_id().source_id().clone(),
);
visited.insert(member.package_id().source_id().clone());
}
for member in members.iter() {
build_pkg(member, ws.config(), &mut ret, &mut visited);
}
for deps in ws.root_patch().values() {
for deps in ws.root_patch().values() {
for dep in deps {
build_dep(dep, ws.config(), &mut ret, &mut visited);
}
@ -217,22 +220,26 @@ fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
return ret;
fn build_pkg(pkg: &Package,
config: &Config,
ret: &mut HashMap<String, SourceId>,
visited: &mut HashSet<SourceId>) {
fn build_pkg(
pkg: &Package,
config: &Config,
ret: &mut HashMap<String, SourceId>,
visited: &mut HashSet<SourceId>,
) {
for dep in pkg.dependencies() {
build_dep(dep, config, ret, visited);
}
}
fn build_dep(dep: &Dependency,
config: &Config,
ret: &mut HashMap<String, SourceId>,
visited: &mut HashSet<SourceId>) {
fn build_dep(
dep: &Dependency,
config: &Config,
ret: &mut HashMap<String, SourceId>,
visited: &mut HashSet<SourceId>,
) {
let id = dep.source_id();
if visited.contains(id) || !id.is_path() {
return
return;
}
let path = match id.url().to_file_path() {
Ok(p) => p.join("Cargo.toml"),
@ -242,8 +249,7 @@ fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
Ok(p) => p,
Err(_) => return,
};
ret.insert(pkg.name().to_string(),
pkg.package_id().source_id().clone());
ret.insert(pkg.name().to_string(), pkg.package_id().source_id().clone());
visited.insert(pkg.package_id().source_id().clone());
build_pkg(&pkg, config, ret, visited);
}
@ -268,7 +274,7 @@ pub struct EncodableDependency {
pub struct EncodablePackageId {
name: String,
version: String,
source: Option<SourceId>
source: Option<SourceId>,
}
impl fmt::Display for EncodablePackageId {
@ -287,9 +293,8 @@ impl FromStr for EncodablePackageId {
fn from_str(s: &str) -> CargoResult<EncodablePackageId> {
let mut s = s.splitn(3, ' ');
let name = s.next().unwrap();
let version = s.next().ok_or_else(|| {
internal("invalid serialized PackageId")
})?;
let version = s.next()
.ok_or_else(|| internal("invalid serialized PackageId"))?;
let source_id = match s.next() {
Some(s) => {
if s.starts_with('(') && s.ends_with(')') {
@ -304,14 +309,15 @@ impl FromStr for EncodablePackageId {
Ok(EncodablePackageId {
name: name.to_string(),
version: version.to_string(),
source: source_id
source: source_id,
})
}
}
impl ser::Serialize for EncodablePackageId {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer,
where
S: ser::Serializer,
{
s.collect_str(self)
}
@ -319,11 +325,13 @@ impl ser::Serialize for EncodablePackageId {
impl<'de> de::Deserialize<'de> for EncodablePackageId {
fn deserialize<D>(d: D) -> Result<EncodablePackageId, D::Error>
where D: de::Deserializer<'de>,
where
D: de::Deserializer<'de>,
{
String::deserialize(d).and_then(|string| {
string.parse::<EncodablePackageId>()
.map_err(de::Error::custom)
string
.parse::<EncodablePackageId>()
.map_err(de::Error::custom)
})
}
}
@ -335,14 +343,15 @@ pub struct WorkspaceResolve<'a, 'cfg: 'a> {
impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer,
where
S: ser::Serializer,
{
let mut ids: Vec<&PackageId> = self.resolve.graph.iter().collect();
ids.sort();
let encodable = ids.iter().filter_map(|&id| {
Some(encodable_resolve_node(id, self.resolve))
}).collect::<Vec<_>>();
let encodable = ids.iter()
.filter_map(|&id| Some(encodable_resolve_node(id, self.resolve)))
.collect::<Vec<_>>();
let mut metadata = self.resolve.metadata.clone();
@ -352,22 +361,27 @@ impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
None => "<none>",
};
let id = encodable_package_id(id);
metadata.insert(format!("checksum {}", id.to_string()),
checksum.to_string());
metadata.insert(format!("checksum {}", id.to_string()), checksum.to_string());
}
let metadata = if metadata.is_empty() { None } else { Some(metadata) };
let metadata = if metadata.is_empty() {
None
} else {
Some(metadata)
};
let patch = Patch {
unused: self.resolve.unused_patches().iter().map(|id| {
EncodableDependency {
unused: self.resolve
.unused_patches()
.iter()
.map(|id| EncodableDependency {
name: id.name().to_string(),
version: id.version().to_string(),
source: encode_source(id.source_id()),
dependencies: None,
replace: None,
}
}).collect(),
})
.collect(),
};
EncodableResolve {
package: Some(encodable),
@ -378,17 +392,17 @@ impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
}
}
fn encodable_resolve_node(id: &PackageId, resolve: &Resolve)
-> EncodableDependency {
fn encodable_resolve_node(id: &PackageId, resolve: &Resolve) -> EncodableDependency {
let (replace, deps) = match resolve.replacement(id) {
Some(id) => {
(Some(encodable_package_id(id)), None)
}
Some(id) => (Some(encodable_package_id(id)), None),
None => {
let mut deps = resolve.graph.edges(id)
.into_iter().flat_map(|a| a)
.map(encodable_package_id)
.collect::<Vec<_>>();
let mut deps = resolve
.graph
.edges(id)
.into_iter()
.flat_map(|a| a)
.map(encodable_package_id)
.collect::<Vec<_>>();
deps.sort();
(None, Some(deps))
}

File diff suppressed because it is too large Load Diff

View File

@ -2,8 +2,8 @@ use std::fmt;
use std::io::prelude::*;
use atty;
use termcolor::Color::{Green, Red, Yellow, Cyan};
use termcolor::{self, StandardStream, Color, ColorSpec, WriteColor};
use termcolor::Color::{Cyan, Green, Red, Yellow};
use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor};
use util::errors::CargoResult;
@ -12,7 +12,7 @@ use util::errors::CargoResult;
pub enum Verbosity {
Verbose,
Normal,
Quiet
Quiet,
}
/// An abstraction around a `Write`able object that remembers preferences for output verbosity and
@ -28,17 +28,13 @@ pub struct Shell {
impl fmt::Debug for Shell {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.err {
ShellOut::Write(_) => {
f.debug_struct("Shell")
.field("verbosity", &self.verbosity)
.finish()
}
ShellOut::Stream { color_choice, .. } => {
f.debug_struct("Shell")
.field("verbosity", &self.verbosity)
.field("color_choice", &color_choice)
.finish()
}
ShellOut::Write(_) => f.debug_struct("Shell")
.field("verbosity", &self.verbosity)
.finish(),
ShellOut::Stream { color_choice, .. } => f.debug_struct("Shell")
.field("verbosity", &self.verbosity)
.field("color_choice", &color_choice)
.finish(),
}
}
}
@ -90,16 +86,16 @@ impl Shell {
/// Print a message, where the status will have `color` color, and can be justified. The
/// messages follows without color.
fn print(&mut self,
status: &fmt::Display,
message: Option<&fmt::Display>,
color: Color,
justified: bool) -> CargoResult<()> {
fn print(
&mut self,
status: &fmt::Display,
message: Option<&fmt::Display>,
color: Color,
justified: bool,
) -> CargoResult<()> {
match self.verbosity {
Verbosity::Quiet => Ok(()),
_ => {
self.err.print(status, message, color, justified)
}
_ => self.err.print(status, message, color, justified),
}
}
@ -126,44 +122,53 @@ impl Shell {
/// Shortcut to right-align and color green a status message.
pub fn status<T, U>(&mut self, status: T, message: U) -> CargoResult<()>
where T: fmt::Display, U: fmt::Display
where
T: fmt::Display,
U: fmt::Display,
{
self.print(&status, Some(&message), Green, true)
}
pub fn status_header<T>(&mut self, status: T) -> CargoResult<()>
where T: fmt::Display,
where
T: fmt::Display,
{
self.print(&status, None, Cyan, true)
}
/// Shortcut to right-align a status message.
pub fn status_with_color<T, U>(&mut self,
status: T,
message: U,
color: Color) -> CargoResult<()>
where T: fmt::Display, U: fmt::Display
pub fn status_with_color<T, U>(
&mut self,
status: T,
message: U,
color: Color,
) -> CargoResult<()>
where
T: fmt::Display,
U: fmt::Display,
{
self.print(&status, Some(&message), color, true)
}
/// Run the callback only if we are in verbose mode
pub fn verbose<F>(&mut self, mut callback: F) -> CargoResult<()>
where F: FnMut(&mut Shell) -> CargoResult<()>
where
F: FnMut(&mut Shell) -> CargoResult<()>,
{
match self.verbosity {
Verbosity::Verbose => callback(self),
_ => Ok(())
_ => Ok(()),
}
}
/// Run the callback if we are not in verbose mode.
pub fn concise<F>(&mut self, mut callback: F) -> CargoResult<()>
where F: FnMut(&mut Shell) -> CargoResult<()>
where
F: FnMut(&mut Shell) -> CargoResult<()>,
{
match self.verbosity {
Verbosity::Verbose => Ok(()),
_ => callback(self)
_ => callback(self),
}
}
@ -192,16 +197,23 @@ impl Shell {
/// Update the color choice (always, never, or auto) from a string.
pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> {
if let ShellOut::Stream { ref mut stream, ref mut color_choice, .. } = self.err {
if let ShellOut::Stream {
ref mut stream,
ref mut color_choice,
..
} = self.err
{
let cfg = match color {
Some("always") => ColorChoice::Always,
Some("never") => ColorChoice::Never,
Some("auto") |
None => ColorChoice::CargoAuto,
Some("auto") | None => ColorChoice::CargoAuto,
Some(arg) => bail!("argument for --color must be auto, always, or \
never, but found `{}`", arg),
Some(arg) => bail!(
"argument for --color must be auto, always, or \
never, but found `{}`",
arg
),
};
*color_choice = cfg;
*stream = StandardStream::stderr(cfg.to_termcolor_color_choice());
@ -230,17 +242,17 @@ impl Default for Shell {
impl ShellOut {
/// Print out a message with a status. The status comes first and is bold + the given color.
/// The status can be justified, in which case the max width that will right align is 12 chars.
fn print(&mut self,
status: &fmt::Display,
message: Option<&fmt::Display>,
color: Color,
justified: bool) -> CargoResult<()> {
fn print(
&mut self,
status: &fmt::Display,
message: Option<&fmt::Display>,
color: Color,
justified: bool,
) -> CargoResult<()> {
match *self {
ShellOut::Stream { ref mut stream, .. } => {
stream.reset()?;
stream.set_color(ColorSpec::new()
.set_bold(true)
.set_fg(Some(color)))?;
stream.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?;
if justified {
write!(stream, "{:>12}", status)?;
} else {
@ -303,7 +315,7 @@ mod imp {
unsafe {
let mut winsize: libc::winsize = mem::zeroed();
if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) < 0 {
return None
return None;
}
if winsize.ws_col > 0 {
Some(winsize.ws_col as usize)
@ -335,7 +347,7 @@ mod imp {
let stdout = GetStdHandle(STD_ERROR_HANDLE);
let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
if GetConsoleScreenBufferInfo(stdout, &mut csbi) == 0 {
return None
return None;
}
Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize)
}

View File

@ -1,11 +1,11 @@
use std::collections::hash_map::{HashMap, Values, IterMut};
use std::collections::hash_map::{HashMap, IterMut, Values};
use core::{Package, PackageId, Registry};
use util::CargoResult;
mod source_id;
pub use self::source_id::{SourceId, GitReference};
pub use self::source_id::{GitReference, SourceId};
/// A Source finds and downloads remote packages based on names and
/// versions.
@ -89,7 +89,9 @@ pub struct SourcesMut<'a, 'src: 'a> {
impl<'src> SourceMap<'src> {
/// Create an empty map
pub fn new() -> SourceMap<'src> {
SourceMap { map: HashMap::new() }
SourceMap {
map: HashMap::new(),
}
}
/// Like `HashMap::contains_key`
@ -144,7 +146,9 @@ impl<'src> SourceMap<'src> {
/// Like `HashMap::iter_mut`
pub fn sources_mut<'a>(&'a mut self) -> SourcesMut<'a, 'src> {
SourcesMut { inner: self.map.iter_mut() }
SourcesMut {
inner: self.map.iter_mut(),
}
}
}
@ -154,4 +158,3 @@ impl<'a, 'src> Iterator for SourcesMut<'a, 'src> {
self.inner.next().map(|(a, b)| (a, &mut **b))
}
}

View File

@ -12,9 +12,9 @@ use url::Url;
use ops;
use sources::git;
use sources::{PathSource, GitSource, RegistrySource, CRATES_IO};
use sources::{GitSource, PathSource, RegistrySource, CRATES_IO};
use sources::DirectorySource;
use util::{Config, CargoResult, ToUrl};
use util::{CargoResult, Config, ToUrl};
/// Unique identifier for a source of packages.
#[derive(Clone, Eq, Debug)]
@ -93,7 +93,9 @@ impl SourceId {
pub fn from_url(string: &str) -> CargoResult<SourceId> {
let mut parts = string.splitn(2, '+');
let kind = parts.next().unwrap();
let url = parts.next().ok_or_else(|| format_err!("invalid source `{}`", string))?;
let url = parts
.next()
.ok_or_else(|| format_err!("invalid source `{}`", string))?;
match kind {
"git" => {
@ -102,8 +104,7 @@ impl SourceId {
for (k, v) in url.query_pairs() {
match &k[..] {
// map older 'ref' to branch
"branch" |
"ref" => reference = GitReference::Branch(v.into_owned()),
"branch" | "ref" => reference = GitReference::Branch(v.into_owned()),
"rev" => reference = GitReference::Rev(v.into_owned()),
"tag" => reference = GitReference::Tag(v.into_owned()),
@ -114,23 +115,24 @@ impl SourceId {
url.set_fragment(None);
url.set_query(None);
Ok(SourceId::for_git(&url, reference)?.with_precise(precise))
},
}
"registry" => {
let url = url.to_url()?;
Ok(SourceId::new(Kind::Registry, url)?
.with_precise(Some("locked".to_string())))
Ok(SourceId::new(Kind::Registry, url)?.with_precise(Some("locked".to_string())))
}
"path" => {
let url = url.to_url()?;
SourceId::new(Kind::Path, url)
}
kind => Err(format_err!("unsupported source protocol: {}", kind))
kind => Err(format_err!("unsupported source protocol: {}", kind)),
}
}
/// A view of the `SourceId` that can be `Display`ed as a URL
pub fn to_url(&self) -> SourceIdToUrl {
SourceIdToUrl { inner: &*self.inner }
SourceIdToUrl {
inner: &*self.inner,
}
}
/// Create a SourceId from a filesystem path.
@ -173,10 +175,12 @@ impl SourceId {
let url = if let Some(ref index) = cfg.index {
static WARNED: AtomicBool = ATOMIC_BOOL_INIT;
if !WARNED.swap(true, SeqCst) {
config.shell().warn("custom registry support via \
the `registry.index` configuration is \
being removed, this functionality \
will not work in the future")?;
config.shell().warn(
"custom registry support via \
the `registry.index` configuration is \
being removed, this functionality \
will not work in the future",
)?;
}
&index[..]
} else {
@ -218,7 +222,7 @@ impl SourceId {
pub fn is_registry(&self) -> bool {
match self.inner.kind {
Kind::Registry | Kind::LocalRegistry => true,
_ => false,
_ => false,
}
}
@ -284,7 +288,7 @@ impl SourceId {
inner: Arc::new(SourceIdInner {
precise: v,
..(*self.inner).clone()
})
}),
}
}
@ -303,10 +307,15 @@ impl SourceId {
/// same hash in different locations.
pub fn stable_hash<S: hash::Hasher>(&self, workspace: &Path, into: &mut S) {
if self.is_path() {
if let Ok(p) = self.inner.url.to_file_path().unwrap().strip_prefix(workspace) {
if let Ok(p) = self.inner
.url
.to_file_path()
.unwrap()
.strip_prefix(workspace)
{
self.inner.kind.hash(into);
p.to_str().unwrap().hash(into);
return
return;
}
}
self.hash(into)
@ -333,7 +342,8 @@ impl Ord for SourceId {
impl ser::Serialize for SourceId {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer,
where
S: ser::Serializer,
{
if self.is_path() {
None::<String>.serialize(s)
@ -345,7 +355,8 @@ impl ser::Serialize for SourceId {
impl<'de> de::Deserialize<'de> for SourceId {
fn deserialize<D>(d: D) -> Result<SourceId, D::Error>
where D: de::Deserializer<'de>,
where
D: de::Deserializer<'de>,
{
let string = String::deserialize(d)?;
SourceId::from_url(&string).map_err(de::Error::custom)
@ -355,11 +366,17 @@ impl<'de> de::Deserialize<'de> for SourceId {
impl fmt::Display for SourceId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self.inner {
SourceIdInner { kind: Kind::Path, ref url, .. } => {
fmt::Display::fmt(url, f)
}
SourceIdInner { kind: Kind::Git(ref reference), ref url,
ref precise, .. } => {
SourceIdInner {
kind: Kind::Path,
ref url,
..
} => fmt::Display::fmt(url, f),
SourceIdInner {
kind: Kind::Git(ref reference),
ref url,
ref precise,
..
} => {
write!(f, "{}", url)?;
if let Some(pretty) = reference.pretty_ref() {
write!(f, "?{}", pretty)?;
@ -371,13 +388,21 @@ impl fmt::Display for SourceId {
}
Ok(())
}
SourceIdInner { kind: Kind::Registry, ref url, .. } |
SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => {
write!(f, "registry `{}`", url)
}
SourceIdInner { kind: Kind::Directory, ref url, .. } => {
write!(f, "dir {}", url)
SourceIdInner {
kind: Kind::Registry,
ref url,
..
}
| SourceIdInner {
kind: Kind::LocalRegistry,
ref url,
..
} => write!(f, "registry `{}`", url),
SourceIdInner {
kind: Kind::Directory,
ref url,
..
} => write!(f, "dir {}", url),
}
}
}
@ -438,9 +463,11 @@ impl Hash for SourceId {
fn hash<S: hash::Hasher>(&self, into: &mut S) {
self.inner.kind.hash(into);
match *self.inner {
SourceIdInner { kind: Kind::Git(..), ref canonical_url, .. } => {
canonical_url.as_str().hash(into)
}
SourceIdInner {
kind: Kind::Git(..),
ref canonical_url,
..
} => canonical_url.as_str().hash(into),
_ => self.inner.url.as_str().hash(into),
}
}
@ -454,11 +481,16 @@ pub struct SourceIdToUrl<'a> {
impl<'a> fmt::Display for SourceIdToUrl<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self.inner {
SourceIdInner { kind: Kind::Path, ref url, .. } => {
write!(f, "path+{}", url)
}
SourceIdInner {
kind: Kind::Git(ref reference), ref url, ref precise, ..
kind: Kind::Path,
ref url,
..
} => write!(f, "path+{}", url),
SourceIdInner {
kind: Kind::Git(ref reference),
ref url,
ref precise,
..
} => {
write!(f, "git+{}", url)?;
if let Some(pretty) = reference.pretty_ref() {
@ -469,15 +501,21 @@ impl<'a> fmt::Display for SourceIdToUrl<'a> {
}
Ok(())
}
SourceIdInner { kind: Kind::Registry, ref url, .. } => {
write!(f, "registry+{}", url)
}
SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => {
write!(f, "local-registry+{}", url)
}
SourceIdInner { kind: Kind::Directory, ref url, .. } => {
write!(f, "directory+{}", url)
}
SourceIdInner {
kind: Kind::Registry,
ref url,
..
} => write!(f, "registry+{}", url),
SourceIdInner {
kind: Kind::LocalRegistry,
ref url,
..
} => write!(f, "local-registry+{}", url),
SourceIdInner {
kind: Kind::Directory,
ref url,
..
} => write!(f, "directory+{}", url),
}
}
}
@ -510,7 +548,7 @@ impl<'a> fmt::Display for PrettyRef<'a> {
#[cfg(test)]
mod tests {
use super::{SourceId, Kind, GitReference};
use super::{GitReference, Kind, SourceId};
use util::ToUrl;
#[test]

View File

@ -27,18 +27,25 @@ struct Inner {
}
impl Summary {
pub fn new(pkg_id: PackageId,
dependencies: Vec<Dependency>,
features: BTreeMap<String, Vec<String>>,
links: Option<String>) -> CargoResult<Summary> {
pub fn new(
pkg_id: PackageId,
dependencies: Vec<Dependency>,
features: BTreeMap<String, Vec<String>>,
links: Option<String>,
) -> CargoResult<Summary> {
for dep in dependencies.iter() {
if features.get(&*dep.name()).is_some() {
bail!("Features and dependencies cannot have the \
same name: `{}`", dep.name())
bail!(
"Features and dependencies cannot have the \
same name: `{}`",
dep.name()
)
}
if dep.is_optional() && !dep.is_transitive() {
bail!("Dev-dependencies are not allowed to be optional: `{}`",
dep.name())
bail!(
"Dev-dependencies are not allowed to be optional: `{}`",
dep.name()
)
}
}
for (feature, list) in features.iter() {
@ -46,23 +53,34 @@ impl Summary {
let mut parts = dep.splitn(2, '/');
let dep = parts.next().unwrap();
let is_reexport = parts.next().is_some();
if !is_reexport && features.get(dep).is_some() { continue }
if !is_reexport && features.get(dep).is_some() {
continue;
}
match dependencies.iter().find(|d| &*d.name() == dep) {
Some(d) => {
if d.is_optional() || is_reexport { continue }
bail!("Feature `{}` depends on `{}` which is not an \
optional dependency.\nConsider adding \
`optional = true` to the dependency",
feature, dep)
}
None if is_reexport => {
bail!("Feature `{}` requires a feature of `{}` which is not a \
dependency", feature, dep)
}
None => {
bail!("Feature `{}` includes `{}` which is neither \
a dependency nor another feature", feature, dep)
if d.is_optional() || is_reexport {
continue;
}
bail!(
"Feature `{}` depends on `{}` which is not an \
optional dependency.\nConsider adding \
`optional = true` to the dependency",
feature,
dep
)
}
None if is_reexport => bail!(
"Feature `{}` requires a feature of `{}` which is not a \
dependency",
feature,
dep
),
None => bail!(
"Feature `{}` includes `{}` which is neither \
a dependency nor another feature",
feature,
dep
),
}
}
}
@ -77,12 +95,24 @@ impl Summary {
})
}
pub fn package_id(&self) -> &PackageId { &self.inner.package_id }
pub fn name(&self) -> InternedString { self.package_id().name() }
pub fn version(&self) -> &Version { self.package_id().version() }
pub fn source_id(&self) -> &SourceId { self.package_id().source_id() }
pub fn dependencies(&self) -> &[Dependency] { &self.inner.dependencies }
pub fn features(&self) -> &BTreeMap<String, Vec<String>> { &self.inner.features }
pub fn package_id(&self) -> &PackageId {
&self.inner.package_id
}
pub fn name(&self) -> InternedString {
self.package_id().name()
}
pub fn version(&self) -> &Version {
self.package_id().version()
}
pub fn source_id(&self) -> &SourceId {
self.package_id().source_id()
}
pub fn dependencies(&self) -> &[Dependency] {
&self.inner.dependencies
}
pub fn features(&self) -> &BTreeMap<String, Vec<String>> {
&self.inner.features
}
pub fn checksum(&self) -> Option<&str> {
self.inner.checksum.as_ref().map(|s| &s[..])
}
@ -101,7 +131,9 @@ impl Summary {
}
pub fn map_dependencies<F>(mut self, f: F) -> Summary
where F: FnMut(Dependency) -> Dependency {
where
F: FnMut(Dependency) -> Dependency,
{
{
let slot = &mut Rc::make_mut(&mut self.inner).dependencies;
let deps = mem::replace(slot, Vec::new());
@ -110,17 +142,14 @@ impl Summary {
self
}
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId)
-> Summary {
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Summary {
let me = if self.package_id().source_id() == to_replace {
let new_id = self.package_id().with_source_id(replace_with);
self.override_id(new_id)
} else {
self
};
me.map_dependencies(|dep| {
dep.map_source(to_replace, replace_with)
})
me.map_dependencies(|dep| dep.map_source(to_replace, replace_with))
}
}

View File

@ -1,4 +1,4 @@
use std::collections::hash_map::{HashMap, Entry};
use std::collections::hash_map::{Entry, HashMap};
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use std::slice;
@ -6,8 +6,8 @@ use std::slice;
use glob::glob;
use url::Url;
use core::{Package, VirtualManifest, EitherManifest, SourceId};
use core::{PackageIdSpec, Dependency, Profile, Profiles};
use core::{EitherManifest, Package, SourceId, VirtualManifest};
use core::{Dependency, PackageIdSpec, Profile, Profiles};
use util::{Config, Filesystem};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
@ -121,8 +121,7 @@ impl<'cfg> Workspace<'cfg> {
/// This function will construct the entire workspace by determining the
/// root and all member packages. It will then validate the workspace
/// before returning it, so `Ok` is only returned for valid workspaces.
pub fn new(manifest_path: &Path, config: &'cfg Config)
-> CargoResult<Workspace<'cfg>> {
pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult<Workspace<'cfg>> {
let target_dir = config.target_dir()?;
let mut ws = Workspace {
@ -154,10 +153,12 @@ impl<'cfg> Workspace<'cfg> {
///
/// This is currently only used in niche situations like `cargo install` or
/// `cargo package`.
pub fn ephemeral(package: Package,
config: &'cfg Config,
target_dir: Option<Filesystem>,
require_optional_deps: bool) -> CargoResult<Workspace<'cfg>> {
pub fn ephemeral(
package: Package,
config: &'cfg Config,
target_dir: Option<Filesystem>,
require_optional_deps: bool,
) -> CargoResult<Workspace<'cfg>> {
let mut ws = Workspace {
config,
current_manifest: package.manifest_path().to_path_buf(),
@ -194,9 +195,12 @@ impl<'cfg> Workspace<'cfg> {
/// indicating that something else should be passed.
pub fn current(&self) -> CargoResult<&Package> {
let pkg = self.current_opt().ok_or_else(|| {
format_err!("manifest path `{}` is a virtual manifest, but this \
command requires running against an actual package in \
this workspace", self.current_manifest.display())
format_err!(
"manifest path `{}` is a virtual manifest, but this \
command requires running against an actual package in \
this workspace",
self.current_manifest.display()
)
})?;
Ok(pkg)
}
@ -204,14 +208,14 @@ impl<'cfg> Workspace<'cfg> {
pub fn current_opt(&self) -> Option<&Package> {
match *self.packages.get(&self.current_manifest) {
MaybePackage::Package(ref p) => Some(p),
MaybePackage::Virtual(..) => None
MaybePackage::Virtual(..) => None,
}
}
pub fn is_virtual(&self) -> bool {
match *self.packages.get(&self.current_manifest) {
MaybePackage::Package(..) => false,
MaybePackage::Virtual(..) => true
MaybePackage::Virtual(..) => true,
}
}
@ -221,7 +225,9 @@ impl<'cfg> Workspace<'cfg> {
}
pub fn profiles(&self) -> &Profiles {
let root = self.root_manifest.as_ref().unwrap_or(&self.current_manifest);
let root = self.root_manifest
.as_ref()
.unwrap_or(&self.current_manifest);
match *self.packages.get(root) {
MaybePackage::Package(ref p) => p.manifest().profiles(),
MaybePackage::Virtual(ref vm) => vm.profiles(),
@ -235,14 +241,15 @@ impl<'cfg> Workspace<'cfg> {
pub fn root(&self) -> &Path {
match self.root_manifest {
Some(ref p) => p,
None => &self.current_manifest
}.parent().unwrap()
None => &self.current_manifest,
}.parent()
.unwrap()
}
pub fn target_dir(&self) -> Filesystem {
self.target_dir.clone().unwrap_or_else(|| {
Filesystem::new(self.root().join("target"))
})
self.target_dir
.clone()
.unwrap_or_else(|| Filesystem::new(self.root().join("target")))
}
/// Returns the root [replace] section of this workspace.
@ -297,7 +304,10 @@ impl<'cfg> Workspace<'cfg> {
self.require_optional_deps
}
pub fn set_require_optional_deps<'a>(&'a mut self, require_optional_deps: bool) -> &mut Workspace<'cfg> {
pub fn set_require_optional_deps<'a>(
&'a mut self,
require_optional_deps: bool,
) -> &mut Workspace<'cfg> {
self.require_optional_deps = require_optional_deps;
self
}
@ -311,10 +321,11 @@ impl<'cfg> Workspace<'cfg> {
///
/// Returns an error if `manifest_path` isn't actually a valid manifest or
/// if some other transient error happens.
fn find_root(&mut self, manifest_path: &Path)
-> CargoResult<Option<PathBuf>> {
fn find_root(&mut self, manifest_path: &Path) -> CargoResult<Option<PathBuf>> {
fn read_root_pointer(member_manifest: &Path, root_link: &str) -> CargoResult<PathBuf> {
let path = member_manifest.parent().unwrap()
let path = member_manifest
.parent()
.unwrap()
.join(root_link)
.join("Cargo.toml");
debug!("find_root - pointer {}", path.display());
@ -326,11 +337,11 @@ impl<'cfg> Workspace<'cfg> {
match *current.workspace_config() {
WorkspaceConfig::Root(_) => {
debug!("find_root - is root {}", manifest_path.display());
return Ok(Some(manifest_path.to_path_buf()))
}
WorkspaceConfig::Member { root: Some(ref path_to_root) } => {
return Ok(Some(read_root_pointer(manifest_path, path_to_root)?))
return Ok(Some(manifest_path.to_path_buf()));
}
WorkspaceConfig::Member {
root: Some(ref path_to_root),
} => return Ok(Some(read_root_pointer(manifest_path, path_to_root)?)),
WorkspaceConfig::Member { root: None } => {}
}
}
@ -344,12 +355,14 @@ impl<'cfg> Workspace<'cfg> {
debug!("find_root - found a root checking exclusion");
if !ances_root_config.is_excluded(manifest_path) {
debug!("find_root - found!");
return Ok(Some(ances_manifest_path))
return Ok(Some(ances_manifest_path));
}
}
WorkspaceConfig::Member { root: Some(ref path_to_root) } => {
WorkspaceConfig::Member {
root: Some(ref path_to_root),
} => {
debug!("find_root - found pointer");
return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)?))
return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)?));
}
WorkspaceConfig::Member { .. } => {}
}
@ -361,7 +374,7 @@ impl<'cfg> Workspace<'cfg> {
// current project, but we don't want to mistakenly try to put
// crates.io crates into the workspace by accident.
if self.config.home() == path {
break
break;
}
}
@ -382,7 +395,7 @@ impl<'cfg> Workspace<'cfg> {
debug!("find_members - only me as a member");
self.members.push(self.current_manifest.clone());
self.default_members.push(self.current_manifest.clone());
return Ok(())
return Ok(());
}
};
@ -392,17 +405,18 @@ impl<'cfg> Workspace<'cfg> {
let root_package = self.packages.load(&root_manifest_path)?;
match *root_package.workspace_config() {
WorkspaceConfig::Root(ref root_config) => {
members_paths = root_config.members_paths(
root_config.members.as_ref().unwrap_or(&vec![])
)?;
members_paths =
root_config.members_paths(root_config.members.as_ref().unwrap_or(&vec![]))?;
default_members_paths = if let Some(ref default) = root_config.default_members {
Some(root_config.members_paths(default)?)
} else {
None
}
}
_ => bail!("root of a workspace inferred but wasn't a root: {}",
root_manifest_path.display()),
_ => bail!(
"root of a workspace inferred but wasn't a root: {}",
root_manifest_path.display()
),
}
}
@ -414,9 +428,11 @@ impl<'cfg> Workspace<'cfg> {
for path in default {
let manifest_path = paths::normalize_path(&path.join("Cargo.toml"));
if !self.members.contains(&manifest_path) {
bail!("package `{}` is listed in workspaces default-members \
but is not a member.",
path.display())
bail!(
"package `{}` is listed in workspaces default-members \
but is not a member.",
path.display()
)
}
self.default_members.push(manifest_path)
}
@ -429,25 +445,29 @@ impl<'cfg> Workspace<'cfg> {
self.find_path_deps(&root_manifest_path, &root_manifest_path, false)
}
fn find_path_deps(&mut self,
manifest_path: &Path,
root_manifest: &Path,
is_path_dep: bool) -> CargoResult<()> {
fn find_path_deps(
&mut self,
manifest_path: &Path,
root_manifest: &Path,
is_path_dep: bool,
) -> CargoResult<()> {
let manifest_path = paths::normalize_path(manifest_path);
if self.members.contains(&manifest_path) {
return Ok(())
return Ok(());
}
if is_path_dep
&& !manifest_path.parent().unwrap().starts_with(self.root())
&& self.find_root(&manifest_path)? != self.root_manifest {
if is_path_dep && !manifest_path.parent().unwrap().starts_with(self.root())
&& self.find_root(&manifest_path)? != self.root_manifest
{
// If `manifest_path` is a path dependency outside of the workspace,
// don't add it, or any of its dependencies, as a members.
return Ok(())
return Ok(());
}
if let WorkspaceConfig::Root(ref root_config) = *self.packages.load(root_manifest)?.workspace_config() {
if let WorkspaceConfig::Root(ref root_config) =
*self.packages.load(root_manifest)?.workspace_config()
{
if root_config.is_excluded(&manifest_path) {
return Ok(())
return Ok(());
}
}
@ -460,12 +480,12 @@ impl<'cfg> Workspace<'cfg> {
MaybePackage::Virtual(_) => return Ok(()),
};
pkg.dependencies()
.iter()
.map(|d| d.source_id())
.filter(|d| d.is_path())
.filter_map(|d| d.url().to_file_path().ok())
.map(|p| p.join("Cargo.toml"))
.collect::<Vec<_>>()
.iter()
.map(|d| d.source_id())
.filter(|d| d.is_path())
.filter_map(|d| d.url().to_file_path().ok())
.map(|p| p.join("Cargo.toml"))
.collect::<Vec<_>>()
};
for candidate in candidates {
self.find_path_deps(&candidate, root_manifest, true)?;
@ -480,7 +500,7 @@ impl<'cfg> Workspace<'cfg> {
/// 3. The current crate is a member of this workspace.
fn validate(&mut self) -> CargoResult<()> {
if self.root_manifest.is_none() {
return Ok(())
return Ok(());
}
let mut roots = Vec::new();
@ -499,52 +519,64 @@ impl<'cfg> Workspace<'cfg> {
MaybePackage::Virtual(_) => continue,
};
if let Some(prev) = names.insert(name, member) {
bail!("two packages named `{}` in this workspace:\n\
- {}\n\
- {}", name, prev.display(), member.display());
bail!(
"two packages named `{}` in this workspace:\n\
- {}\n\
- {}",
name,
prev.display(),
member.display()
);
}
}
}
match roots.len() {
0 => {
bail!("`package.workspace` configuration points to a crate \
which is not configured with [workspace]: \n\
configuration at: {}\n\
points to: {}",
self.current_manifest.display(),
self.root_manifest.as_ref().unwrap().display())
}
0 => bail!(
"`package.workspace` configuration points to a crate \
which is not configured with [workspace]: \n\
configuration at: {}\n\
points to: {}",
self.current_manifest.display(),
self.root_manifest.as_ref().unwrap().display()
),
1 => {}
_ => {
bail!("multiple workspace roots found in the same workspace:\n{}",
roots.iter()
.map(|r| format!(" {}", r.display()))
.collect::<Vec<_>>()
.join("\n"));
bail!(
"multiple workspace roots found in the same workspace:\n{}",
roots
.iter()
.map(|r| format!(" {}", r.display()))
.collect::<Vec<_>>()
.join("\n")
);
}
}
for member in self.members.clone() {
let root = self.find_root(&member)?;
if root == self.root_manifest {
continue
continue;
}
match root {
Some(root) => {
bail!("package `{}` is a member of the wrong workspace\n\
expected: {}\n\
actual: {}",
member.display(),
self.root_manifest.as_ref().unwrap().display(),
root.display());
bail!(
"package `{}` is a member of the wrong workspace\n\
expected: {}\n\
actual: {}",
member.display(),
self.root_manifest.as_ref().unwrap().display(),
root.display()
);
}
None => {
bail!("workspace member `{}` is not hierarchically below \
the workspace root `{}`",
member.display(),
self.root_manifest.as_ref().unwrap().display());
bail!(
"workspace member `{}` is not hierarchically below \
the workspace root `{}`",
member.display(),
self.root_manifest.as_ref().unwrap().display()
);
}
}
}
@ -558,18 +590,19 @@ impl<'cfg> Workspace<'cfg> {
// FIXME: Make this more generic by using a relative path resolver between member and
// root.
let members_msg = match current_dir.strip_prefix(root_dir) {
Ok(rel) => {
format!("this may be fixable by adding `{}` to the \
`workspace.members` array of the manifest \
located at: {}",
rel.display(),
root.display())
}
Err(_) => {
format!("this may be fixable by adding a member to \
the `workspace.members` array of the \
manifest located at: {}", root.display())
}
Ok(rel) => format!(
"this may be fixable by adding `{}` to the \
`workspace.members` array of the manifest \
located at: {}",
rel.display(),
root.display()
),
Err(_) => format!(
"this may be fixable by adding a member to \
the `workspace.members` array of the \
manifest located at: {}",
root.display()
),
};
let extra = match *root_pkg {
MaybePackage::Virtual(_) => members_msg,
@ -579,20 +612,25 @@ impl<'cfg> Workspace<'cfg> {
WorkspaceConfig::Member { .. } => unreachable!(),
};
if !has_members_list {
format!("this may be fixable by ensuring that this \
crate is depended on by the workspace \
root: {}", root.display())
format!(
"this may be fixable by ensuring that this \
crate is depended on by the workspace \
root: {}",
root.display()
)
} else {
members_msg
}
}
};
bail!("current package believes it's in a workspace when it's not:\n\
current: {}\n\
workspace: {}\n\n{}",
self.current_manifest.display(),
root.display(),
extra);
bail!(
"current package believes it's in a workspace when it's not:\n\
current: {}\n\
workspace: {}\n\n{}",
self.current_manifest.display(),
root.display(),
extra
);
}
if let Some(ref root_manifest) = self.root_manifest {
@ -610,14 +648,18 @@ impl<'cfg> Workspace<'cfg> {
doctest: Profile::default_doctest(),
};
for pkg in self.members().filter(|p| p.manifest_path() != root_manifest) {
for pkg in self.members()
.filter(|p| p.manifest_path() != root_manifest)
{
if pkg.manifest().profiles() != &default_profiles {
let message = &format!("profiles for the non root package will be ignored, \
specify profiles at the workspace root:\n\
package: {}\n\
workspace: {}",
pkg.manifest_path().display(),
root_manifest.display());
let message = &format!(
"profiles for the non root package will be ignored, \
specify profiles at the workspace root:\n\
package: {}\n\
workspace: {}",
pkg.manifest_path().display(),
root_manifest.display()
);
//TODO: remove `Eq` bound from `Profiles` when the warning is removed.
self.config.shell().warn(&message)?;
@ -629,7 +671,6 @@ impl<'cfg> Workspace<'cfg> {
}
}
impl<'cfg> Packages<'cfg> {
fn get(&self, manifest_path: &Path) -> &MaybePackage {
&self.packages[manifest_path.parent().unwrap()]
@ -647,9 +688,7 @@ impl<'cfg> Packages<'cfg> {
EitherManifest::Real(manifest) => {
MaybePackage::Package(Package::new(manifest, manifest_path))
}
EitherManifest::Virtual(vm) => {
MaybePackage::Virtual(vm)
}
EitherManifest::Virtual(vm) => MaybePackage::Virtual(vm),
}))
}
}
@ -667,9 +706,7 @@ impl<'a, 'cfg> Iterator for Members<'a, 'cfg> {
fn next(&mut self) -> Option<&'a Package> {
loop {
let next = self.iter.next().map(|path| {
self.ws.packages.get(path)
});
let next = self.iter.next().map(|path| self.ws.packages.get(path));
match next {
Some(&MaybePackage::Package(ref p)) => return Some(p),
Some(&MaybePackage::Virtual(_)) => {}
@ -708,16 +745,14 @@ impl WorkspaceRootConfig {
///
/// This method does NOT consider the `members` list.
fn is_excluded(&self, manifest_path: &Path) -> bool {
let excluded = self.exclude.iter().any(|ex| {
manifest_path.starts_with(self.root_dir.join(ex))
});
let excluded = self.exclude
.iter()
.any(|ex| manifest_path.starts_with(self.root_dir.join(ex)));
let explicit_member = match self.members {
Some(ref members) => {
members.iter().any(|mem| {
manifest_path.starts_with(self.root_dir.join(mem))
})
}
Some(ref members) => members
.iter()
.any(|mem| manifest_path.starts_with(self.root_dir.join(mem))),
None => false,
};
@ -752,13 +787,9 @@ impl WorkspaceRootConfig {
Some(p) => p,
None => return Ok(Vec::new()),
};
let res = glob(path).chain_err(|| {
format_err!("could not parse pattern `{}`", &path)
})?;
let res = glob(path).chain_err(|| format_err!("could not parse pattern `{}`", &path))?;
let res = res.map(|p| {
p.chain_err(|| {
format_err!("unable to match path to pattern `{}`", &path)
})
p.chain_err(|| format_err!("unable to match path to pattern `{}`", &path))
}).collect::<Result<Vec<_>, _>>()?;
Ok(res)
}

View File

@ -1,20 +1,18 @@
#![cfg_attr(test, deny(warnings))]
// Currently, Cargo does not use clippy for its source code.
// But if someone runs it they should know that
// @alexcrichton disagree with clippy on some style things
#![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))]
#[macro_use] extern crate failure;
#[macro_use] extern crate log;
#[macro_use] extern crate scoped_tls;
#[macro_use] extern crate serde_derive;
#[macro_use] extern crate serde_json;
extern crate atty;
extern crate clap;
#[cfg(target_os = "macos")]
extern crate core_foundation;
extern crate crates_io as registry;
extern crate crossbeam;
extern crate curl;
#[macro_use]
extern crate failure;
extern crate filetime;
extern crate flate2;
extern crate fs2;
@ -24,23 +22,30 @@ extern crate hex;
extern crate home;
extern crate ignore;
extern crate jobserver;
#[macro_use]
extern crate lazy_static;
extern crate lazycell;
#[macro_use] extern crate lazy_static;
extern crate libc;
extern crate libgit2_sys;
#[macro_use]
extern crate log;
extern crate num_cpus;
extern crate same_file;
#[macro_use]
extern crate scoped_tls;
extern crate semver;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_ignored;
#[macro_use]
extern crate serde_json;
extern crate shell_escape;
extern crate tar;
extern crate tempdir;
extern crate termcolor;
extern crate toml;
extern crate url;
#[cfg(target_os = "macos")]
extern crate core_foundation;
use std::fmt;
@ -85,8 +90,7 @@ pub struct VersionInfo {
impl fmt::Display for VersionInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "cargo {}.{}.{}",
self.major, self.minor, self.patch)?;
write!(f, "cargo {}.{}.{}", self.major, self.minor, self.patch)?;
if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) {
if channel != "stable" {
write!(f, "-{}", channel)?;
@ -97,8 +101,7 @@ impl fmt::Display for VersionInfo {
if let Some(ref cfg) = self.cfg_info {
if let Some(ref ci) = cfg.commit_info {
write!(f, " ({} {})",
ci.short_commit_hash, ci.commit_date)?;
write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
}
};
Ok(())
@ -118,7 +121,11 @@ pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! {
}
}
let CliError { error, exit_code, unknown } = err;
let CliError {
error,
exit_code,
unknown,
} = err;
// exit_code == 0 is non-fatal error, e.g. docopt version info
let fatal = exit_code != 0;
@ -134,8 +141,11 @@ pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! {
}
if !handle_cause(&error, shell) || hide {
drop(writeln!(shell.err(), "\nTo learn more, run the command again \
with --verbose."));
drop(writeln!(
shell.err(),
"\nTo learn more, run the command again \
with --verbose."
));
}
}
@ -204,14 +214,11 @@ pub fn version() -> VersionInfo {
match option_env!("CFG_RELEASE_CHANNEL") {
// We have environment variables set up from configure/make.
Some(_) => {
let commit_info =
option_env!("CFG_COMMIT_HASH").map(|s| {
CommitInfo {
commit_hash: s.to_string(),
short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(),
commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(),
}
});
let commit_info = option_env!("CFG_COMMIT_HASH").map(|s| CommitInfo {
commit_hash: s.to_string(),
short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(),
commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(),
});
VersionInfo {
major,
minor,
@ -222,16 +229,14 @@ pub fn version() -> VersionInfo {
commit_info,
}),
}
},
// We are being compiled by Cargo itself.
None => {
VersionInfo {
major,
minor,
patch,
pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"),
cfg_info: None,
}
}
// We are being compiled by Cargo itself.
None => VersionInfo {
major,
minor,
patch,
pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"),
cfg_info: None,
},
}
}

View File

@ -6,7 +6,7 @@ use core::{Profiles, Workspace};
use util::Config;
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
use ops::{self, Context, BuildConfig, Kind, Unit};
use ops::{self, BuildConfig, Context, Kind, Unit};
pub struct CleanOptions<'a> {
pub config: &'a Config,
@ -34,15 +34,20 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
let profiles = ws.profiles();
let host_triple = opts.config.rustc()?.host.clone();
let mut cx = Context::new(ws, &resolve, &packages, opts.config,
BuildConfig {
host_triple,
requested_target: opts.target.clone(),
release: opts.release,
jobs: 1,
..BuildConfig::default()
},
profiles)?;
let mut cx = Context::new(
ws,
&resolve,
&packages,
opts.config,
BuildConfig {
host_triple,
requested_target: opts.target.clone(),
release: opts.release,
jobs: 1,
..BuildConfig::default()
},
profiles,
)?;
let mut units = Vec::new();
for spec in opts.spec.iter() {
@ -54,12 +59,31 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
for target in pkg.targets() {
for kind in [Kind::Host, Kind::Target].iter() {
let Profiles {
ref release, ref dev, ref test, ref bench, ref doc,
ref custom_build, ref test_deps, ref bench_deps, ref check,
ref check_test, ref doctest,
ref release,
ref dev,
ref test,
ref bench,
ref doc,
ref custom_build,
ref test_deps,
ref bench_deps,
ref check,
ref check_test,
ref doctest,
} = *profiles;
let profiles = [release, dev, test, bench, doc, custom_build,
test_deps, bench_deps, check, check_test, doctest];
let profiles = [
release,
dev,
test,
bench,
doc,
custom_build,
test_deps,
bench_deps,
check,
check_test,
doctest,
];
for profile in profiles.iter() {
units.push(Unit {
pkg,
@ -82,7 +106,7 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
} else {
rm_rf(&cx.build_script_dir(unit), config)?;
}
continue
continue;
}
for &(ref src, ref link_dst, _) in cx.target_filenames(unit)?.iter() {
@ -99,15 +123,15 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> {
let m = fs::metadata(path);
if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
config.shell().verbose(|shell| {shell.status("Removing", path.display())})?;
paths::remove_dir_all(path).chain_err(|| {
format_err!("could not remove build directory")
})?;
config
.shell()
.verbose(|shell| shell.status("Removing", path.display()))?;
paths::remove_dir_all(path).chain_err(|| format_err!("could not remove build directory"))?;
} else if m.is_ok() {
config.shell().verbose(|shell| {shell.status("Removing", path.display())})?;
paths::remove_file(path).chain_err(|| {
format_err!("failed to remove build artifact")
})?;
config
.shell()
.verbose(|shell| shell.status("Removing", path.display()))?;
paths::remove_file(path).chain_err(|| format_err!("failed to remove build artifact"))?;
}
Ok(())
}

View File

@ -27,12 +27,12 @@ use std::default::Default;
use std::path::PathBuf;
use std::sync::Arc;
use core::{Source, Package, Target};
use core::{Profile, TargetKind, Profiles, Workspace, PackageId, PackageIdSpec};
use core::resolver::{Resolve, Method};
use ops::{self, BuildOutput, Executor, DefaultExecutor};
use core::{Package, Source, Target};
use core::{PackageId, PackageIdSpec, Profile, Profiles, TargetKind, Workspace};
use core::resolver::{Method, Resolve};
use ops::{self, BuildOutput, DefaultExecutor, Executor};
use util::config::Config;
use util::{CargoResult, profile};
use util::{profile, CargoResult};
/// Contains information about how a package should be compiled.
#[derive(Debug)]
@ -67,8 +67,7 @@ pub struct CompileOptions<'a> {
}
impl<'a> CompileOptions<'a> {
pub fn default(config: &'a Config, mode: CompileMode) -> CompileOptions<'a>
{
pub fn default(config: &'a Config, mode: CompileMode) -> CompileOptions<'a> {
CompileOptions {
config,
jobs: None,
@ -79,7 +78,9 @@ impl<'a> CompileOptions<'a> {
spec: ops::Packages::Packages(Vec::new()),
mode,
release: false,
filter: CompileFilter::Default { required_features_filterable: false },
filter: CompileFilter::Default {
required_features_filterable: false,
},
message_format: MessageFormat::Human,
target_rustdoc_args: None,
target_rustc_args: None,
@ -100,7 +101,7 @@ pub enum CompileMode {
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MessageFormat {
Human,
Json
Json,
}
#[derive(Clone, PartialEq, Eq, Debug)]
@ -112,9 +113,7 @@ pub enum Packages {
}
impl Packages {
pub fn from_flags(all: bool, exclude: Vec<String>, package: Vec<String>)
-> CargoResult<Self>
{
pub fn from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) -> CargoResult<Self> {
Ok(match (all, exclude.len(), package.len()) {
(false, 0, 0) => Packages::Default,
(false, 0, _) => Packages::Packages(package),
@ -126,39 +125,36 @@ impl Packages {
pub fn into_package_id_specs(&self, ws: &Workspace) -> CargoResult<Vec<PackageIdSpec>> {
let specs = match *self {
Packages::All => {
ws.members()
.map(Package::package_id)
.map(PackageIdSpec::from_package_id)
.collect()
}
Packages::OptOut(ref opt_out) => {
ws.members()
.map(Package::package_id)
.map(PackageIdSpec::from_package_id)
.filter(|p| opt_out.iter().position(|x| *x == p.name()).is_none())
.collect()
}
Packages::Packages(ref packages) if packages.is_empty() => {
ws.current_opt()
.map(Package::package_id)
.map(PackageIdSpec::from_package_id)
.into_iter().collect()
}
Packages::Packages(ref packages) => {
packages.iter().map(|p| PackageIdSpec::parse(p)).collect::<CargoResult<Vec<_>>>()?
}
Packages::Default => {
ws.default_members()
.map(Package::package_id)
.map(PackageIdSpec::from_package_id)
.collect()
}
Packages::All => ws.members()
.map(Package::package_id)
.map(PackageIdSpec::from_package_id)
.collect(),
Packages::OptOut(ref opt_out) => ws.members()
.map(Package::package_id)
.map(PackageIdSpec::from_package_id)
.filter(|p| opt_out.iter().position(|x| *x == p.name()).is_none())
.collect(),
Packages::Packages(ref packages) if packages.is_empty() => ws.current_opt()
.map(Package::package_id)
.map(PackageIdSpec::from_package_id)
.into_iter()
.collect(),
Packages::Packages(ref packages) => packages
.iter()
.map(|p| PackageIdSpec::parse(p))
.collect::<CargoResult<Vec<_>>>()?,
Packages::Default => ws.default_members()
.map(Package::package_id)
.map(PackageIdSpec::from_package_id)
.collect(),
};
if specs.is_empty() {
if ws.is_virtual() {
bail!("manifest path `{}` contains no package: The manifest is virtual, \
and the workspace has no members.", ws.root().display())
bail!(
"manifest path `{}` contains no package: The manifest is virtual, \
and the workspace has no members.",
ws.root().display()
)
}
bail!("no packages to compile")
}
@ -185,25 +181,30 @@ pub enum CompileFilter {
examples: FilterRule,
tests: FilterRule,
benches: FilterRule,
}
},
}
pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
pub fn compile<'a>(
ws: &Workspace<'a>,
options: &CompileOptions<'a>,
) -> CargoResult<ops::Compilation<'a>> {
compile_with_exec(ws, options, Arc::new(DefaultExecutor))
}
pub fn compile_with_exec<'a>(ws: &Workspace<'a>,
options: &CompileOptions<'a>,
exec: Arc<Executor>)
-> CargoResult<ops::Compilation<'a>> {
pub fn compile_with_exec<'a>(
ws: &Workspace<'a>,
options: &CompileOptions<'a>,
exec: Arc<Executor>,
) -> CargoResult<ops::Compilation<'a>> {
for member in ws.members() {
for warning in member.manifest().warnings().iter() {
if warning.is_critical {
let err = format_err!("{}", warning.message);
let cx = format_err!("failed to parse manifest at `{}`",
member.manifest_path().display());
return Err(err.context(cx).into())
let cx = format_err!(
"failed to parse manifest at `{}`",
member.manifest_path().display()
);
return Err(err.context(cx).into());
} else {
options.config.shell().warn(&warning.message)?
}
@ -212,17 +213,27 @@ pub fn compile_with_exec<'a>(ws: &Workspace<'a>,
compile_ws(ws, None, options, exec)
}
pub fn compile_ws<'a>(ws: &Workspace<'a>,
source: Option<Box<Source + 'a>>,
options: &CompileOptions<'a>,
exec: Arc<Executor>)
-> CargoResult<ops::Compilation<'a>> {
let CompileOptions { config, jobs, ref target, ref spec, ref features,
all_features, no_default_features,
release, mode, message_format,
ref filter,
ref target_rustdoc_args,
ref target_rustc_args } = *options;
pub fn compile_ws<'a>(
ws: &Workspace<'a>,
source: Option<Box<Source + 'a>>,
options: &CompileOptions<'a>,
exec: Arc<Executor>,
) -> CargoResult<ops::Compilation<'a>> {
let CompileOptions {
config,
jobs,
ref target,
ref spec,
ref features,
all_features,
no_default_features,
release,
mode,
message_format,
ref filter,
ref target_rustdoc_args,
ref target_rustc_args,
} = *options;
let target = target.clone();
@ -240,69 +251,68 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>,
all_features,
uses_default_features: !no_default_features,
};
let resolve = ops::resolve_ws_with_method(ws,
source,
method,
&specs,
)?;
let resolve = ops::resolve_ws_with_method(ws, source, method, &specs)?;
let (packages, resolve_with_overrides) = resolve;
let to_builds = specs.iter().map(|p| {
let pkgid = p.query(resolve_with_overrides.iter())?;
let p = packages.get(pkgid)?;
p.manifest().print_teapot(ws.config());
Ok(p)
}).collect::<CargoResult<Vec<_>>>()?;
let to_builds = specs
.iter()
.map(|p| {
let pkgid = p.query(resolve_with_overrides.iter())?;
let p = packages.get(pkgid)?;
p.manifest().print_teapot(ws.config());
Ok(p)
})
.collect::<CargoResult<Vec<_>>>()?;
let mut general_targets = Vec::new();
let mut package_targets = Vec::new();
match (target_rustc_args, target_rustdoc_args) {
(&Some(..), _) |
(_, &Some(..)) if to_builds.len() != 1 => {
(&Some(..), _) | (_, &Some(..)) if to_builds.len() != 1 => {
panic!("`rustc` and `rustdoc` should not accept multiple `-p` flags")
}
(&Some(ref args), _) => {
let all_features = resolve_all_features(&resolve_with_overrides,
to_builds[0].package_id());
let targets = generate_targets(to_builds[0], profiles,
mode, filter, &all_features, release)?;
let all_features =
resolve_all_features(&resolve_with_overrides, to_builds[0].package_id());
let targets =
generate_targets(to_builds[0], profiles, mode, filter, &all_features, release)?;
if targets.len() == 1 {
let (target, profile) = targets[0];
let mut profile = profile.clone();
profile.rustc_args = Some(args.to_vec());
general_targets.push((target, profile));
} else {
bail!("extra arguments to `rustc` can only be passed to one \
target, consider filtering\nthe package by passing \
e.g. `--lib` or `--bin NAME` to specify a single target")
bail!(
"extra arguments to `rustc` can only be passed to one \
target, consider filtering\nthe package by passing \
e.g. `--lib` or `--bin NAME` to specify a single target"
)
}
}
(&None, &Some(ref args)) => {
let all_features = resolve_all_features(&resolve_with_overrides,
to_builds[0].package_id());
let targets = generate_targets(to_builds[0], profiles,
mode, filter, &all_features, release)?;
let all_features =
resolve_all_features(&resolve_with_overrides, to_builds[0].package_id());
let targets =
generate_targets(to_builds[0], profiles, mode, filter, &all_features, release)?;
if targets.len() == 1 {
let (target, profile) = targets[0];
let mut profile = profile.clone();
profile.rustdoc_args = Some(args.to_vec());
general_targets.push((target, profile));
} else {
bail!("extra arguments to `rustdoc` can only be passed to one \
target, consider filtering\nthe package by passing e.g. \
`--lib` or `--bin NAME` to specify a single target")
}
}
(&None, &None) => {
for &to_build in to_builds.iter() {
let all_features = resolve_all_features(&resolve_with_overrides,
to_build.package_id());
let targets = generate_targets(to_build, profiles, mode,
filter, &all_features, release)?;
package_targets.push((to_build, targets));
bail!(
"extra arguments to `rustdoc` can only be passed to one \
target, consider filtering\nthe package by passing e.g. \
`--lib` or `--bin NAME` to specify a single target"
)
}
}
(&None, &None) => for &to_build in to_builds.iter() {
let all_features = resolve_all_features(&resolve_with_overrides, to_build.package_id());
let targets =
generate_targets(to_build, profiles, mode, filter, &all_features, release)?;
package_targets.push((to_build, targets));
},
};
for &(target, ref profile) in &general_targets {
@ -321,23 +331,26 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>,
build_config.doc_all = deps;
}
ops::compile_targets(ws,
&package_targets,
&packages,
&resolve_with_overrides,
config,
build_config,
profiles,
&exec)?
ops::compile_targets(
ws,
&package_targets,
&packages,
&resolve_with_overrides,
config,
build_config,
profiles,
&exec,
)?
};
ret.to_doc_test = to_builds.into_iter().cloned().collect();
return Ok(ret);
fn resolve_all_features(resolve_with_overrides: &Resolve,
package_id: &PackageId)
-> HashSet<String> {
fn resolve_all_features(
resolve_with_overrides: &Resolve,
package_id: &PackageId,
) -> HashSet<String> {
let mut features = resolve_with_overrides.features(package_id).clone();
// Include features enabled for use by dependencies so targets can also use them with the
@ -365,9 +378,7 @@ impl FilterRule {
fn matches(&self, target: &Target) -> bool {
match *self {
FilterRule::All => true,
FilterRule::Just(ref targets) => {
targets.iter().any(|x| *x == target.name())
},
FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()),
}
}
@ -387,12 +398,18 @@ impl FilterRule {
}
impl CompileFilter {
pub fn new(lib_only: bool,
bins: Vec<String>, all_bins: bool,
tsts: Vec<String>, all_tsts: bool,
exms: Vec<String>, all_exms: bool,
bens: Vec<String>, all_bens: bool,
all_targets: bool) -> CompileFilter {
pub fn new(
lib_only: bool,
bins: Vec<String>,
all_bins: bool,
tsts: Vec<String>,
all_tsts: bool,
exms: Vec<String>,
all_exms: bool,
bens: Vec<String>,
all_bens: bool,
all_targets: bool,
) -> CompileFilter {
let rule_bins = FilterRule::new(bins, all_bins);
let rule_tsts = FilterRule::new(tsts, all_tsts);
let rule_exms = FilterRule::new(exms, all_exms);
@ -401,16 +418,21 @@ impl CompileFilter {
if all_targets {
CompileFilter::Only {
all_targets: true,
lib: true, bins: FilterRule::All,
examples: FilterRule::All, benches: FilterRule::All,
lib: true,
bins: FilterRule::All,
examples: FilterRule::All,
benches: FilterRule::All,
tests: FilterRule::All,
}
} else if lib_only || rule_bins.is_specific() || rule_tsts.is_specific()
|| rule_exms.is_specific() || rule_bens.is_specific() {
|| rule_exms.is_specific() || rule_bens.is_specific()
{
CompileFilter::Only {
all_targets: false,
lib: lib_only, bins: rule_bins,
examples: rule_exms, benches: rule_bens,
lib: lib_only,
bins: rule_bins,
examples: rule_exms,
benches: rule_bens,
tests: rule_tsts,
}
} else {
@ -423,21 +445,31 @@ impl CompileFilter {
pub fn need_dev_deps(&self) -> bool {
match *self {
CompileFilter::Default { .. } => true,
CompileFilter::Only { ref examples, ref tests, ref benches, .. } =>
examples.is_specific() || tests.is_specific() || benches.is_specific()
CompileFilter::Only {
ref examples,
ref tests,
ref benches,
..
} => examples.is_specific() || tests.is_specific() || benches.is_specific(),
}
}
pub fn matches(&self, target: &Target) -> bool {
match *self {
CompileFilter::Default { .. } => true,
CompileFilter::Only { lib, ref bins, ref examples, ref tests, ref benches, .. } => {
CompileFilter::Only {
lib,
ref bins,
ref examples,
ref tests,
ref benches,
..
} => {
let rule = match *target.kind() {
TargetKind::Bin => bins,
TargetKind::Test => tests,
TargetKind::Bench => benches,
TargetKind::ExampleBin |
TargetKind::ExampleLib(..) => examples,
TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples,
TargetKind::Lib(..) => return lib,
TargetKind::CustomBuild => return false,
};
@ -461,30 +493,33 @@ struct BuildProposal<'a> {
required: bool,
}
fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target],
profile: &'a Profile,
dep: &'a Profile,
required_features_filterable: bool) -> Vec<BuildProposal<'a>> {
fn generate_auto_targets<'a>(
mode: CompileMode,
targets: &'a [Target],
profile: &'a Profile,
dep: &'a Profile,
required_features_filterable: bool,
) -> Vec<BuildProposal<'a>> {
match mode {
CompileMode::Bench => {
targets.iter().filter(|t| t.benched()).map(|t| {
BuildProposal {
target: t,
profile,
required: !required_features_filterable,
}
}).collect::<Vec<_>>()
}
CompileMode::Bench => targets
.iter()
.filter(|t| t.benched())
.map(|t| BuildProposal {
target: t,
profile,
required: !required_features_filterable,
})
.collect::<Vec<_>>(),
CompileMode::Test => {
let mut base = targets.iter().filter(|t| {
t.tested()
}).map(|t| {
BuildProposal {
let mut base = targets
.iter()
.filter(|t| t.tested())
.map(|t| BuildProposal {
target: t,
profile: if t.is_example() {dep} else {profile},
profile: if t.is_example() { dep } else { profile },
required: !required_features_filterable,
}
}).collect::<Vec<_>>();
})
.collect::<Vec<_>>();
// Always compile the library if we're testing everything as
// it'll be needed for doctests
@ -499,35 +534,37 @@ fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target],
}
base
}
CompileMode::Build | CompileMode::Check{..} => {
targets.iter().filter(|t| {
t.is_bin() || t.is_lib()
}).map(|t| BuildProposal {
CompileMode::Build | CompileMode::Check { .. } => targets
.iter()
.filter(|t| t.is_bin() || t.is_lib())
.map(|t| BuildProposal {
target: t,
profile,
required: !required_features_filterable,
}).collect()
}
CompileMode::Doc { .. } => {
targets.iter().filter(|t| {
t.documented() && (
!t.is_bin() ||
!targets.iter().any(|l| l.is_lib() && l.name() == t.name())
)
}).map(|t| BuildProposal {
})
.collect(),
CompileMode::Doc { .. } => targets
.iter()
.filter(|t| {
t.documented()
&& (!t.is_bin() || !targets.iter().any(|l| l.is_lib() && l.name() == t.name()))
})
.map(|t| BuildProposal {
target: t,
profile,
required: !required_features_filterable,
}).collect()
}
})
.collect(),
CompileMode::Doctest => {
if let Some(t) = targets.iter().find(|t| t.is_lib()) {
if t.doctested() {
return vec![BuildProposal {
target: t,
profile,
required: !required_features_filterable,
}];
return vec![
BuildProposal {
target: t,
profile,
required: !required_features_filterable,
},
];
}
}
@ -537,28 +574,31 @@ fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target],
}
/// Given a filter rule and some context, propose a list of targets
fn propose_indicated_targets<'a>(pkg: &'a Package,
rule: &FilterRule,
desc: &'static str,
is_expected_kind: fn(&Target) -> bool,
profile: &'a Profile) -> CargoResult<Vec<BuildProposal<'a>>> {
fn propose_indicated_targets<'a>(
pkg: &'a Package,
rule: &FilterRule,
desc: &'static str,
is_expected_kind: fn(&Target) -> bool,
profile: &'a Profile,
) -> CargoResult<Vec<BuildProposal<'a>>> {
match *rule {
FilterRule::All => {
let result = pkg.targets().iter().filter(|t| is_expected_kind(t)).map(|t| {
BuildProposal {
let result = pkg.targets()
.iter()
.filter(|t| is_expected_kind(t))
.map(|t| BuildProposal {
target: t,
profile,
required: false,
}
});
});
Ok(result.collect())
}
FilterRule::Just(ref names) => {
let mut targets = Vec::new();
for name in names {
let target = pkg.targets().iter().find(|t| {
t.name() == *name && is_expected_kind(t)
});
let target = pkg.targets()
.iter()
.find(|t| t.name() == *name && is_expected_kind(t));
let t = match target {
Some(t) => t,
None => {
@ -566,8 +606,12 @@ fn propose_indicated_targets<'a>(pkg: &'a Package,
match suggestion {
Some(s) => {
let suggested_name = s.name();
bail!("no {} target named `{}`\n\nDid you mean `{}`?",
desc, name, suggested_name)
bail!(
"no {} target named `{}`\n\nDid you mean `{}`?",
desc,
name,
suggested_name
)
}
None => bail!("no {} target named `{}`", desc, name),
}
@ -586,9 +630,10 @@ fn propose_indicated_targets<'a>(pkg: &'a Package,
}
/// Collect the targets that are libraries or have all required features available.
fn filter_compatible_targets<'a>(mut proposals: Vec<BuildProposal<'a>>,
features: &HashSet<String>)
-> CargoResult<Vec<(&'a Target, &'a Profile)>> {
fn filter_compatible_targets<'a>(
mut proposals: Vec<BuildProposal<'a>>,
features: &HashSet<String>,
) -> CargoResult<Vec<(&'a Target, &'a Profile)>> {
let mut compatible = Vec::with_capacity(proposals.len());
for proposal in proposals.drain(..) {
let unavailable_features = match proposal.target.required_features() {
@ -599,14 +644,17 @@ fn filter_compatible_targets<'a>(mut proposals: Vec<BuildProposal<'a>>,
compatible.push((proposal.target, proposal.profile));
} else if proposal.required {
let required_features = proposal.target.required_features().unwrap();
let quoted_required_features: Vec<String> = required_features.iter()
.map(|s| format!("`{}`",s))
.collect();
bail!("target `{}` requires the features: {}\n\
Consider enabling them by passing e.g. `--features=\"{}\"`",
proposal.target.name(),
quoted_required_features.join(", "),
required_features.join(" "));
let quoted_required_features: Vec<String> = required_features
.iter()
.map(|s| format!("`{}`", s))
.collect();
bail!(
"target `{}` requires the features: {}\n\
Consider enabling them by passing e.g. `--features=\"{}\"`",
proposal.target.name(),
quoted_required_features.join(", "),
required_features.join(" ")
);
}
}
Ok(compatible)
@ -614,21 +662,30 @@ fn filter_compatible_targets<'a>(mut proposals: Vec<BuildProposal<'a>>,
/// Given the configuration for a build, this function will generate all
/// target/profile combinations needed to be built.
fn generate_targets<'a>(pkg: &'a Package,
profiles: &'a Profiles,
mode: CompileMode,
filter: &CompileFilter,
features: &HashSet<String>,
release: bool)
-> CargoResult<Vec<(&'a Target, &'a Profile)>> {
let build = if release {&profiles.release} else {&profiles.dev};
let test = if release {&profiles.bench} else {&profiles.test};
fn generate_targets<'a>(
pkg: &'a Package,
profiles: &'a Profiles,
mode: CompileMode,
filter: &CompileFilter,
features: &HashSet<String>,
release: bool,
) -> CargoResult<Vec<(&'a Target, &'a Profile)>> {
let build = if release {
&profiles.release
} else {
&profiles.dev
};
let test = if release {
&profiles.bench
} else {
&profiles.test
};
let profile = match mode {
CompileMode::Test => test,
CompileMode::Bench => &profiles.bench,
CompileMode::Build => build,
CompileMode::Check {test: false} => &profiles.check,
CompileMode::Check {test: true} => &profiles.check_test,
CompileMode::Check { test: false } => &profiles.check,
CompileMode::Check { test: true } => &profiles.check_test,
CompileMode::Doc { .. } => &profiles.doc,
CompileMode::Doctest => &profiles.doctest,
};
@ -650,15 +707,30 @@ fn generate_targets<'a>(pkg: &'a Package,
};
let targets = match *filter {
CompileFilter::Default { required_features_filterable } => {
CompileFilter::Default {
required_features_filterable,
} => {
let deps = if release {
&profiles.bench_deps
} else {
&profiles.test_deps
};
generate_auto_targets(mode, pkg.targets(), profile, deps, required_features_filterable)
generate_auto_targets(
mode,
pkg.targets(),
profile,
deps,
required_features_filterable,
)
}
CompileFilter::Only { all_targets, lib, ref bins, ref examples, ref tests, ref benches } => {
CompileFilter::Only {
all_targets,
lib,
ref bins,
ref examples,
ref tests,
ref benches,
} => {
let mut targets = Vec::new();
if lib {
@ -673,25 +745,45 @@ fn generate_targets<'a>(pkg: &'a Package,
}
}
targets.append(&mut propose_indicated_targets(
pkg, bins, "bin", Target::is_bin, profile)?);
pkg,
bins,
"bin",
Target::is_bin,
profile,
)?);
targets.append(&mut propose_indicated_targets(
pkg, examples, "example", Target::is_example, profile)?);
pkg,
examples,
"example",
Target::is_example,
profile,
)?);
// If --tests was specified, add all targets that would be
// generated by `cargo test`.
let test_filter = match *tests {
FilterRule::All => Target::tested,
FilterRule::Just(_) => Target::is_test
FilterRule::Just(_) => Target::is_test,
};
targets.append(&mut propose_indicated_targets(
pkg, tests, "test", test_filter, test_profile)?);
pkg,
tests,
"test",
test_filter,
test_profile,
)?);
// If --benches was specified, add all targets that would be
// generated by `cargo bench`.
let bench_filter = match *benches {
FilterRule::All => Target::benched,
FilterRule::Just(_) => Target::is_bench
FilterRule::Just(_) => Target::is_bench,
};
targets.append(&mut propose_indicated_targets(
pkg, benches, "bench", bench_filter, bench_profile)?);
pkg,
benches,
"bench",
bench_filter,
bench_profile,
)?);
targets
}
};
@ -707,23 +799,32 @@ fn generate_targets<'a>(pkg: &'a Package,
/// * target.$target.ar
/// * target.$target.linker
/// * target.$target.libfoo.metadata
fn scrape_build_config(config: &Config,
jobs: Option<u32>,
target: Option<String>)
-> CargoResult<ops::BuildConfig> {
fn scrape_build_config(
config: &Config,
jobs: Option<u32>,
target: Option<String>,
) -> CargoResult<ops::BuildConfig> {
if jobs.is_some() && config.jobserver_from_env().is_some() {
config.shell().warn("a `-j` argument was passed to Cargo but Cargo is \
also configured with an external jobserver in \
its environment, ignoring the `-j` parameter")?;
config.shell().warn(
"a `-j` argument was passed to Cargo but Cargo is \
also configured with an external jobserver in \
its environment, ignoring the `-j` parameter",
)?;
}
let cfg_jobs = match config.get_i64("build.jobs")? {
Some(v) => {
if v.val <= 0 {
bail!("build.jobs must be positive, but found {} in {}",
v.val, v.definition)
bail!(
"build.jobs must be positive, but found {} in {}",
v.val,
v.definition
)
} else if v.val >= i64::from(u32::max_value()) {
bail!("build.jobs is too large: found {} in {}", v.val,
v.definition)
bail!(
"build.jobs is too large: found {} in {}",
v.val,
v.definition
)
} else {
Some(v.val as u32)
}
@ -747,9 +848,7 @@ fn scrape_build_config(config: &Config,
Ok(base)
}
fn scrape_target_config(config: &Config, triple: &str)
-> CargoResult<ops::TargetConfig> {
fn scrape_target_config(config: &Config, triple: &str) -> CargoResult<ops::TargetConfig> {
let key = format!("target.{}", triple);
let mut ret = ops::TargetConfig {
ar: config.get_path(&format!("{}.ar", key))?.map(|v| v.val),
@ -762,9 +861,7 @@ fn scrape_target_config(config: &Config, triple: &str)
};
for (lib_name, value) in table {
match lib_name.as_str() {
"ar" | "linker" | "runner" | "rustflags" => {
continue
},
"ar" | "linker" | "runner" | "rustflags" => continue,
_ => {}
}
@ -781,46 +878,40 @@ fn scrape_target_config(config: &Config, triple: &str)
// We require deterministic order of evaluation, so we must sort the pairs by key first.
let mut pairs = Vec::new();
for (k, value) in value.table(&lib_name)?.0 {
pairs.push((k,value));
pairs.push((k, value));
}
pairs.sort_by_key( |p| p.0 );
for (k,value) in pairs{
pairs.sort_by_key(|p| p.0);
for (k, value) in pairs {
let key = format!("{}.{}", key, k);
match &k[..] {
"rustc-flags" => {
let (flags, definition) = value.string(k)?;
let whence = format!("in `{}` (in {})", key,
definition.display());
let (paths, links) =
BuildOutput::parse_rustc_flags(flags, &whence)
?;
let whence = format!("in `{}` (in {})", key, definition.display());
let (paths, links) = BuildOutput::parse_rustc_flags(flags, &whence)?;
output.library_paths.extend(paths);
output.library_links.extend(links);
}
"rustc-link-lib" => {
let list = value.list(k)?;
output.library_links.extend(list.iter()
.map(|v| v.0.clone()));
output
.library_links
.extend(list.iter().map(|v| v.0.clone()));
}
"rustc-link-search" => {
let list = value.list(k)?;
output.library_paths.extend(list.iter().map(|v| {
PathBuf::from(&v.0)
}));
output
.library_paths
.extend(list.iter().map(|v| PathBuf::from(&v.0)));
}
"rustc-cfg" => {
let list = value.list(k)?;
output.cfgs.extend(list.iter().map(|v| v.0.clone()));
}
"rustc-env" => {
for (name, val) in value.table(k)?.0 {
let val = val.string(name)?.0;
output.env.push((name.clone(), val.to_string()));
}
}
"warning" |
"rerun-if-changed" |
"rerun-if-env-changed" => {
"rustc-env" => for (name, val) in value.table(k)?.0 {
let val = val.string(name)?.0;
output.env.push((name.clone(), val.to_string()));
},
"warning" | "rerun-if-changed" | "rerun-if-env-changed" => {
bail!("`{}` is not supported in build script overrides", k);
}
_ => {

View File

@ -14,18 +14,23 @@ pub struct DocOptions<'a> {
pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
let specs = options.compile_opts.spec.into_package_id_specs(ws)?;
let resolve = ops::resolve_ws_precisely(ws,
None,
&options.compile_opts.features,
options.compile_opts.all_features,
options.compile_opts.no_default_features,
&specs)?;
let resolve = ops::resolve_ws_precisely(
ws,
None,
&options.compile_opts.features,
options.compile_opts.all_features,
options.compile_opts.no_default_features,
&specs,
)?;
let (packages, resolve_with_overrides) = resolve;
let pkgs = specs.iter().map(|p| {
let pkgid = p.query(resolve_with_overrides.iter())?;
packages.get(pkgid)
}).collect::<CargoResult<Vec<_>>>()?;
let pkgs = specs
.iter()
.map(|p| {
let pkgid = p.query(resolve_with_overrides.iter())?;
packages.get(pkgid)
})
.collect::<CargoResult<Vec<_>>>()?;
let mut lib_names = HashMap::new();
let mut bin_names = HashMap::new();
@ -33,16 +38,24 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
for target in package.targets().iter().filter(|t| t.documented()) {
if target.is_lib() {
if let Some(prev) = lib_names.insert(target.crate_name(), package) {
bail!("The library `{}` is specified by packages `{}` and \
`{}` but can only be documented once. Consider renaming \
or marking one of the targets as `doc = false`.",
target.crate_name(), prev, package);
bail!(
"The library `{}` is specified by packages `{}` and \
`{}` but can only be documented once. Consider renaming \
or marking one of the targets as `doc = false`.",
target.crate_name(),
prev,
package
);
}
} else if let Some(prev) = bin_names.insert(target.crate_name(), package) {
bail!("The binary `{}` is specified by packages `{}` and \
`{}` but can be documented only once. Consider renaming \
or marking one of the targets as `doc = false`.",
target.crate_name(), prev, package);
bail!(
"The binary `{}` is specified by packages `{}` and \
`{}` but can be documented only once. Consider renaming \
or marking one of the targets as `doc = false`.",
target.crate_name(),
prev,
package
);
}
}
}
@ -51,10 +64,15 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
if options.open_result {
let name = if pkgs.len() > 1 {
bail!("Passing multiple packages and `open` is not supported.\n\
Please re-run this command with `-p <spec>` where `<spec>` \
is one of the following:\n {}",
pkgs.iter().map(|p| p.name().to_inner()).collect::<Vec<_>>().join("\n "));
bail!(
"Passing multiple packages and `open` is not supported.\n\
Please re-run this command with `-p <spec>` where `<spec>` \
is one of the following:\n {}",
pkgs.iter()
.map(|p| p.name().to_inner())
.collect::<Vec<_>>()
.join("\n ")
);
} else if pkgs.len() == 1 {
pkgs[0].name().replace("-", "_")
} else {
@ -79,8 +97,7 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
match open_docs(&path) {
Ok(m) => shell.status("Launching", m)?,
Err(e) => {
shell.warn(
"warning: could not determine a browser to open docs with, tried:")?;
shell.warn("warning: could not determine a browser to open docs with, tried:")?;
for method in e {
shell.warn(format!("\t{}", method))?;
}

View File

@ -1,4 +1,4 @@
use core::{Resolve, PackageSet, Workspace};
use core::{PackageSet, Resolve, Workspace};
use ops;
use util::CargoResult;

View File

@ -19,21 +19,21 @@ pub struct UpdateOptions<'a> {
pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> {
let mut registry = PackageRegistry::new(ws.config())?;
let resolve = ops::resolve_with_previous(&mut registry,
ws,
Method::Everything,
None,
None,
&[],
true,
true)?;
let resolve = ops::resolve_with_previous(
&mut registry,
ws,
Method::Everything,
None,
None,
&[],
true,
true,
)?;
ops::write_pkg_lockfile(ws, &resolve)?;
Ok(())
}
pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
-> CargoResult<()> {
pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()> {
if opts.aggressive && opts.precise.is_some() {
bail!("cannot specify both aggressive and precise simultaneously")
}
@ -60,8 +60,7 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
for name in opts.to_update.iter() {
let dep = previous_resolve.query(name)?;
if opts.aggressive {
fill_with_deps(&previous_resolve, dep, &mut to_avoid,
&mut HashSet::new());
fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new());
} else {
to_avoid.insert(dep);
sources.push(match opts.precise {
@ -76,23 +75,23 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
};
dep.source_id().clone().with_precise(Some(precise))
}
None => {
dep.source_id().clone().with_precise(None)
}
None => dep.source_id().clone().with_precise(None),
});
}
}
registry.add_sources(&sources)?;
}
let resolve = ops::resolve_with_previous(&mut registry,
ws,
Method::Everything,
Some(&previous_resolve),
Some(&to_avoid),
&[],
true,
true)?;
let resolve = ops::resolve_with_previous(
&mut registry,
ws,
Method::Everything,
Some(&previous_resolve),
Some(&to_avoid),
&[],
true,
true,
)?;
// Summarize what is changing for the user.
let print_change = |status: &str, msg: String, color: Color| {
@ -101,8 +100,11 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) {
if removed.len() == 1 && added.len() == 1 {
let msg = if removed[0].source_id().is_git() {
format!("{} -> #{}", removed[0],
&added[0].source_id().precise().unwrap()[..8])
format!(
"{} -> #{}",
removed[0],
&added[0].source_id().precise().unwrap()[..8]
)
} else {
format!("{} -> v{}", removed[0], added[0].version())
};
@ -120,11 +122,14 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
ops::write_pkg_lockfile(ws, &resolve)?;
return Ok(());
fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
set: &mut HashSet<&'a PackageId>,
visited: &mut HashSet<&'a PackageId>) {
fn fill_with_deps<'a>(
resolve: &'a Resolve,
dep: &'a PackageId,
set: &mut HashSet<&'a PackageId>,
visited: &mut HashSet<&'a PackageId>,
) {
if !visited.insert(dep) {
return
return;
}
set.insert(dep);
for dep in resolve.deps(dep) {
@ -132,9 +137,10 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
}
}
fn compare_dependency_graphs<'a>(previous_resolve: &'a Resolve,
resolve: &'a Resolve) ->
Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> {
fn compare_dependency_graphs<'a>(
previous_resolve: &'a Resolve,
resolve: &'a Resolve,
) -> Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> {
fn key(dep: &PackageId) -> (&str, &SourceId) {
(dep.name().to_inner(), dep.source_id())
}
@ -143,41 +149,52 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
// more complicated because the equality for source ids does not take
// precise versions into account (e.g. git shas), but we want to take
// that into account here.
fn vec_subtract<'a>(a: &[&'a PackageId],
b: &[&'a PackageId]) -> Vec<&'a PackageId> {
a.iter().filter(|a| {
// If this package id is not found in `b`, then it's definitely
// in the subtracted set
let i = match b.binary_search(a) {
Ok(i) => i,
Err(..) => return true,
};
fn vec_subtract<'a>(a: &[&'a PackageId], b: &[&'a PackageId]) -> Vec<&'a PackageId> {
a.iter()
.filter(|a| {
// If this package id is not found in `b`, then it's definitely
// in the subtracted set
let i = match b.binary_search(a) {
Ok(i) => i,
Err(..) => return true,
};
// If we've found `a` in `b`, then we iterate over all instances
// (we know `b` is sorted) and see if they all have different
// precise versions. If so, then `a` isn't actually in `b` so
// we'll let it through.
//
// Note that we only check this for non-registry sources,
// however, as registries contain enough version information in
// the package id to disambiguate
if a.source_id().is_registry() {
return false
}
b[i..].iter().take_while(|b| a == b).all(|b| {
a.source_id().precise() != b.source_id().precise()
// If we've found `a` in `b`, then we iterate over all instances
// (we know `b` is sorted) and see if they all have different
// precise versions. If so, then `a` isn't actually in `b` so
// we'll let it through.
//
// Note that we only check this for non-registry sources,
// however, as registries contain enough version information in
// the package id to disambiguate
if a.source_id().is_registry() {
return false;
}
b[i..]
.iter()
.take_while(|b| a == b)
.all(|b| a.source_id().precise() != b.source_id().precise())
})
}).cloned().collect()
.cloned()
.collect()
}
// Map (package name, package source) to (removed versions, added versions).
let mut changes = BTreeMap::new();
let empty = (Vec::new(), Vec::new());
for dep in previous_resolve.iter() {
changes.entry(key(dep)).or_insert_with(||empty.clone()).0.push(dep);
changes
.entry(key(dep))
.or_insert_with(|| empty.clone())
.0
.push(dep);
}
for dep in resolve.iter() {
changes.entry(key(dep)).or_insert_with(||empty.clone()).1.push(dep);
changes
.entry(key(dep))
.or_insert_with(|| empty.clone())
.1
.push(dep);
}
for v in changes.values_mut() {

View File

@ -10,12 +10,12 @@ use semver::{Version, VersionReq};
use tempdir::TempDir;
use toml;
use core::{SourceId, Source, Package, Dependency, PackageIdSpec};
use core::{Dependency, Package, PackageIdSpec, Source, SourceId};
use core::{PackageId, Workspace};
use ops::{self, CompileFilter, DefaultExecutor};
use sources::{GitSource, PathSource, SourceConfigMap};
use util::{Config, internal};
use util::{Filesystem, FileLock};
use util::{internal, Config};
use util::{FileLock, Filesystem};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
@ -53,18 +53,28 @@ impl Drop for Transaction {
}
}
pub fn install(root: Option<&str>,
krates: Vec<&str>,
source_id: &SourceId,
vers: Option<&str>,
opts: &ops::CompileOptions,
force: bool) -> CargoResult<()> {
pub fn install(
root: Option<&str>,
krates: Vec<&str>,
source_id: &SourceId,
vers: Option<&str>,
opts: &ops::CompileOptions,
force: bool,
) -> CargoResult<()> {
let root = resolve_root(root, opts.config)?;
let map = SourceConfigMap::new(opts.config)?;
let (installed_anything, scheduled_error) = if krates.len() <= 1 {
install_one(&root, &map, krates.into_iter().next(), source_id, vers, opts,
force, true)?;
install_one(
&root,
&map,
krates.into_iter().next(),
source_id,
vers,
opts,
force,
true,
)?;
(true, false)
} else {
let mut succeeded = vec![];
@ -73,8 +83,16 @@ pub fn install(root: Option<&str>,
for krate in krates {
let root = root.clone();
let map = map.clone();
match install_one(&root, &map, Some(krate), source_id, vers,
opts, force, first) {
match install_one(
&root,
&map,
Some(krate),
source_id,
vers,
opts,
force,
first,
) {
Ok(()) => succeeded.push(krate),
Err(e) => {
::handle_error(e, &mut opts.config.shell());
@ -89,7 +107,10 @@ pub fn install(root: Option<&str>,
summary.push(format!("Successfully installed {}!", succeeded.join(", ")));
}
if !failed.is_empty() {
summary.push(format!("Failed to install {} (see error(s) above).", failed.join(", ")));
summary.push(format!(
"Failed to install {} (see error(s) above).",
failed.join(", ")
));
}
if !succeeded.is_empty() || !failed.is_empty() {
opts.config.shell().status("Summary", summary.join(" "))?;
@ -105,13 +126,15 @@ pub fn install(root: Option<&str>,
let path = env::var_os("PATH").unwrap_or_default();
for path in env::split_paths(&path) {
if path == dst {
return Ok(())
return Ok(());
}
}
opts.config.shell().warn(&format!("be sure to add `{}` to your PATH to be \
able to run the installed binaries",
dst.display()))?;
opts.config.shell().warn(&format!(
"be sure to add `{}` to your PATH to be \
able to run the installed binaries",
dst.display()
))?;
}
if scheduled_error {
@ -121,42 +144,64 @@ pub fn install(root: Option<&str>,
Ok(())
}
fn install_one(root: &Filesystem,
map: &SourceConfigMap,
krate: Option<&str>,
source_id: &SourceId,
vers: Option<&str>,
opts: &ops::CompileOptions,
force: bool,
is_first_install: bool) -> CargoResult<()> {
fn install_one(
root: &Filesystem,
map: &SourceConfigMap,
krate: Option<&str>,
source_id: &SourceId,
vers: Option<&str>,
opts: &ops::CompileOptions,
force: bool,
is_first_install: bool,
) -> CargoResult<()> {
let config = opts.config;
let (pkg, source) = if source_id.is_git() {
select_pkg(GitSource::new(source_id, config)?,
krate, vers, config, is_first_install,
&mut |git| git.read_packages())?
select_pkg(
GitSource::new(source_id, config)?,
krate,
vers,
config,
is_first_install,
&mut |git| git.read_packages(),
)?
} else if source_id.is_path() {
let path = source_id.url().to_file_path().map_err(|()| {
format_err!("path sources must have a valid path")
})?;
let path = source_id
.url()
.to_file_path()
.map_err(|()| format_err!("path sources must have a valid path"))?;
let mut src = PathSource::new(&path, source_id, config);
src.update().chain_err(|| {
format_err!("`{}` is not a crate root; specify a crate to \
install from crates.io, or use --path or --git to \
specify an alternate source", path.display())
format_err!(
"`{}` is not a crate root; specify a crate to \
install from crates.io, or use --path or --git to \
specify an alternate source",
path.display()
)
})?;
select_pkg(PathSource::new(&path, source_id, config),
krate, vers, config, is_first_install,
&mut |path| path.read_packages())?
select_pkg(
PathSource::new(&path, source_id, config),
krate,
vers,
config,
is_first_install,
&mut |path| path.read_packages(),
)?
} else {
select_pkg(map.load(source_id)?,
krate, vers, config, is_first_install,
&mut |_| {
bail!("must specify a crate to install from \
crates.io, or use --path or --git to \
specify alternate source")
})?
select_pkg(
map.load(source_id)?,
krate,
vers,
config,
is_first_install,
&mut |_| {
bail!(
"must specify a crate to install from \
crates.io, or use --path or --git to \
specify alternate source"
)
},
)?
};
let mut td_opt = None;
@ -196,36 +241,43 @@ fn install_one(root: &Filesystem,
check_overwrites(&dst, pkg, &opts.filter, &list, force)?;
}
let compile = ops::compile_ws(&ws,
Some(source),
opts,
Arc::new(DefaultExecutor)).chain_err(|| {
if let Some(td) = td_opt.take() {
// preserve the temporary directory, so the user can inspect it
td.into_path();
}
let compile =
ops::compile_ws(&ws, Some(source), opts, Arc::new(DefaultExecutor)).chain_err(|| {
if let Some(td) = td_opt.take() {
// preserve the temporary directory, so the user can inspect it
td.into_path();
}
format_err!("failed to compile `{}`, intermediate artifacts can be \
found at `{}`", pkg, ws.target_dir().display())
})?;
let binaries: Vec<(&str, &Path)> = compile.binaries.iter().map(|bin| {
let name = bin.file_name().unwrap();
if let Some(s) = name.to_str() {
Ok((s, bin.as_ref()))
} else {
bail!("Binary `{:?}` name can't be serialized into string", name)
}
}).collect::<CargoResult<_>>()?;
format_err!(
"failed to compile `{}`, intermediate artifacts can be \
found at `{}`",
pkg,
ws.target_dir().display()
)
})?;
let binaries: Vec<(&str, &Path)> = compile
.binaries
.iter()
.map(|bin| {
let name = bin.file_name().unwrap();
if let Some(s) = name.to_str() {
Ok((s, bin.as_ref()))
} else {
bail!("Binary `{:?}` name can't be serialized into string", name)
}
})
.collect::<CargoResult<_>>()?;
if binaries.is_empty() {
bail!("no binaries are available for install using the selected \
features");
bail!(
"no binaries are available for install using the selected \
features"
);
}
let metadata = metadata(config, root)?;
let mut list = read_crate_list(&metadata)?;
let dst = metadata.parent().join("bin");
let duplicates = check_overwrites(&dst, pkg, &opts.filter,
&list, force)?;
let duplicates = check_overwrites(&dst, pkg, &opts.filter, &list, force)?;
fs::create_dir_all(&dst)?;
@ -237,17 +289,17 @@ fn install_one(root: &Filesystem,
let dst = staging_dir.path().join(bin);
// Try to move if `target_dir` is transient.
if !source_id.is_path() && fs::rename(src, &dst).is_ok() {
continue
continue;
}
fs::copy(src, &dst).chain_err(|| {
format_err!("failed to copy `{}` to `{}`", src.display(),
dst.display())
format_err!("failed to copy `{}` to `{}`", src.display(), dst.display())
})?;
}
let (to_replace, to_install): (Vec<&str>, Vec<&str>) =
binaries.iter().map(|&(bin, _)| bin)
.partition(|&bin| duplicates.contains_key(bin));
let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries
.iter()
.map(|&(bin, _)| bin)
.partition(|&bin| duplicates.contains_key(bin));
let mut installed = Transaction { bins: Vec::new() };
@ -257,8 +309,7 @@ fn install_one(root: &Filesystem,
let dst = dst.join(bin);
config.shell().status("Installing", dst.display())?;
fs::rename(&src, &dst).chain_err(|| {
format_err!("failed to move `{}` to `{}`", src.display(),
dst.display())
format_err!("failed to move `{}` to `{}`", src.display(), dst.display())
})?;
installed.bins.push(dst);
}
@ -273,8 +324,7 @@ fn install_one(root: &Filesystem,
let dst = dst.join(bin);
config.shell().status("Replacing", dst.display())?;
fs::rename(&src, &dst).chain_err(|| {
format_err!("failed to move `{}` to `{}`", src.display(),
dst.display())
format_err!("failed to move `{}` to `{}`", src.display(), dst.display())
})?;
replaced_names.push(bin);
}
@ -290,24 +340,33 @@ fn install_one(root: &Filesystem,
set.remove(bin);
}
}
list.v1.entry(pkg.package_id().clone())
.or_insert_with(BTreeSet::new)
.insert(bin.to_string());
list.v1
.entry(pkg.package_id().clone())
.or_insert_with(BTreeSet::new)
.insert(bin.to_string());
}
// Remove empty metadata lines.
let pkgs = list.v1.iter()
.filter_map(|(p, set)| if set.is_empty() { Some(p.clone()) } else { None })
.collect::<Vec<_>>();
let pkgs = list.v1
.iter()
.filter_map(|(p, set)| {
if set.is_empty() {
Some(p.clone())
} else {
None
}
})
.collect::<Vec<_>>();
for p in pkgs.iter() {
list.v1.remove(p);
}
// If installation was successful record newly installed binaries.
if result.is_ok() {
list.v1.entry(pkg.package_id().clone())
.or_insert_with(BTreeSet::new)
.extend(to_install.iter().map(|s| s.to_string()));
list.v1
.entry(pkg.package_id().clone())
.or_insert_with(BTreeSet::new)
.extend(to_install.iter().map(|s| s.to_string()));
}
let write_result = write_crate_list(&metadata, list);
@ -330,14 +389,16 @@ fn install_one(root: &Filesystem,
Ok(())
}
fn select_pkg<'a, T>(mut source: T,
name: Option<&str>,
vers: Option<&str>,
config: &Config,
needs_update: bool,
list_all: &mut FnMut(&mut T) -> CargoResult<Vec<Package>>)
-> CargoResult<(Package, Box<Source + 'a>)>
where T: Source + 'a
fn select_pkg<'a, T>(
mut source: T,
name: Option<&str>,
vers: Option<&str>,
config: &Config,
needs_update: bool,
list_all: &mut FnMut(&mut T) -> CargoResult<Vec<Package>>,
) -> CargoResult<(Package, Box<Source + 'a>)>
where
T: Source + 'a,
{
if needs_update {
source.update()?;
@ -347,47 +408,52 @@ fn select_pkg<'a, T>(mut source: T,
Some(name) => {
let vers = match vers {
Some(v) => {
// If the version begins with character <, >, =, ^, ~ parse it as a
// version range, otherwise parse it as a specific version
let first = v.chars()
.nth(0)
.ok_or_else(||format_err!("no version provided for the `--vers` flag"))?;
.ok_or_else(|| format_err!("no version provided for the `--vers` flag"))?;
match first {
'<' | '>' | '=' | '^' | '~' => match v.parse::<VersionReq>() {
Ok(v) => Some(v.to_string()),
Err(_) => {
bail!("the `--vers` provided, `{}`, is \
Err(_) => bail!(
"the `--vers` provided, `{}`, is \
not a valid semver version requirement\n\n
Please have a look at \
http://doc.crates.io/specifying-dependencies.html \
for the correct format", v)
}
for the correct format",
v
),
},
_ => match v.parse::<Version>() {
Ok(v) => Some(format!("={}", v)),
Err(_) => {
let mut msg = format!("\
the `--vers` provided, `{}`, is \
not a valid semver version\n\n\
historically Cargo treated this \
as a semver version requirement \
accidentally\nand will continue \
to do so, but this behavior \
will be removed eventually", v
let mut msg = format!(
"\
the `--vers` provided, `{}`, is \
not a valid semver version\n\n\
historically Cargo treated this \
as a semver version requirement \
accidentally\nand will continue \
to do so, but this behavior \
will be removed eventually",
v
);
// If it is not a valid version but it is a valid version
// requirement, add a note to the warning
if v.parse::<VersionReq>().is_ok() {
msg.push_str(&format!("\nif you want to specify semver range, \
add an explicit qualifier, like ^{}", v));
msg.push_str(&format!(
"\nif you want to specify semver range, \
add an explicit qualifier, like ^{}",
v
));
}
config.shell().warn(&msg)?;
Some(v.to_string())
}
}
},
}
}
None => None,
@ -402,45 +468,55 @@ fn select_pkg<'a, T>(mut source: T,
}
None => {
let vers_info = vers.map(|v| format!(" with version `{}`", v))
.unwrap_or_default();
Err(format_err!("could not find `{}` in {}{}", name,
source.source_id(), vers_info))
.unwrap_or_default();
Err(format_err!(
"could not find `{}` in {}{}",
name,
source.source_id(),
vers_info
))
}
}
}
None => {
let candidates = list_all(&mut source)?;
let binaries = candidates.iter().filter(|cand| {
cand.targets().iter().filter(|t| t.is_bin()).count() > 0
});
let examples = candidates.iter().filter(|cand| {
cand.targets().iter().filter(|t| t.is_example()).count() > 0
});
let binaries = candidates
.iter()
.filter(|cand| cand.targets().iter().filter(|t| t.is_bin()).count() > 0);
let examples = candidates
.iter()
.filter(|cand| cand.targets().iter().filter(|t| t.is_example()).count() > 0);
let pkg = match one(binaries, |v| multi_err("binaries", v))? {
Some(p) => p,
None => {
match one(examples, |v| multi_err("examples", v))? {
Some(p) => p,
None => bail!("no packages found with binaries or \
examples"),
}
}
None => match one(examples, |v| multi_err("examples", v))? {
Some(p) => p,
None => bail!(
"no packages found with binaries or \
examples"
),
},
};
return Ok((pkg.clone(), Box::new(source)));
fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String {
pkgs.sort_by(|a, b| a.name().cmp(&b.name()));
format!("multiple packages with {} found: {}", kind,
pkgs.iter().map(|p| p.name().to_inner()).collect::<Vec<_>>()
.join(", "))
format!(
"multiple packages with {} found: {}",
kind,
pkgs.iter()
.map(|p| p.name().to_inner())
.collect::<Vec<_>>()
.join(", ")
)
}
}
}
}
fn one<I, F>(mut i: I, f: F) -> CargoResult<Option<I::Item>>
where I: Iterator,
F: FnOnce(Vec<I::Item>) -> String
where
I: Iterator,
F: FnOnce(Vec<I::Item>) -> String,
{
match (i.next(), i.next()) {
(Some(i1), Some(i2)) => {
@ -449,15 +525,17 @@ fn one<I, F>(mut i: I, f: F) -> CargoResult<Option<I::Item>>
Err(format_err!("{}", f(v)))
}
(Some(i), None) => Ok(Some(i)),
(None, _) => Ok(None)
(None, _) => Ok(None),
}
}
fn check_overwrites(dst: &Path,
pkg: &Package,
filter: &ops::CompileFilter,
prev: &CrateListingV1,
force: bool) -> CargoResult<BTreeMap<String, Option<PackageId>>> {
fn check_overwrites(
dst: &Path,
pkg: &Package,
filter: &ops::CompileFilter,
prev: &CrateListingV1,
force: bool,
) -> CargoResult<BTreeMap<String, Option<PackageId>>> {
// If explicit --bin or --example flags were passed then those'll
// get checked during cargo_compile, we only care about the "build
// everything" case here
@ -466,7 +544,7 @@ fn check_overwrites(dst: &Path,
}
let duplicates = find_duplicates(dst, pkg, filter, prev);
if force || duplicates.is_empty() {
return Ok(duplicates)
return Ok(duplicates);
}
// Format the error message.
let mut msg = String::new();
@ -482,10 +560,12 @@ fn check_overwrites(dst: &Path,
Err(format_err!("{}", msg))
}
fn find_duplicates(dst: &Path,
pkg: &Package,
filter: &ops::CompileFilter,
prev: &CrateListingV1) -> BTreeMap<String, Option<PackageId>> {
fn find_duplicates(
dst: &Path,
pkg: &Package,
filter: &ops::CompileFilter,
prev: &CrateListingV1,
) -> BTreeMap<String, Option<PackageId>> {
let check = |name: String| {
// Need to provide type, works around Rust Issue #93349
let name = format!("{}{}", name, env::consts::EXE_SUFFIX);
@ -498,27 +578,36 @@ fn find_duplicates(dst: &Path,
}
};
match *filter {
CompileFilter::Default { .. } => {
pkg.targets().iter()
.filter(|t| t.is_bin())
.filter_map(|t| check(t.name().to_string()))
.collect()
}
CompileFilter::Only { ref bins, ref examples, .. } => {
CompileFilter::Default { .. } => pkg.targets()
.iter()
.filter(|t| t.is_bin())
.filter_map(|t| check(t.name().to_string()))
.collect(),
CompileFilter::Only {
ref bins,
ref examples,
..
} => {
let all_bins: Vec<String> = bins.try_collect().unwrap_or_else(|| {
pkg.targets().iter().filter(|t| t.is_bin())
.map(|t| t.name().to_string())
.collect()
pkg.targets()
.iter()
.filter(|t| t.is_bin())
.map(|t| t.name().to_string())
.collect()
});
let all_examples: Vec<String> = examples.try_collect().unwrap_or_else(|| {
pkg.targets().iter().filter(|t| t.is_bin_example())
.map(|t| t.name().to_string())
.collect()
pkg.targets()
.iter()
.filter(|t| t.is_bin_example())
.map(|t| t.name().to_string())
.collect()
});
all_bins.iter().chain(all_examples.iter())
.filter_map(|t| check(t.clone()))
.collect::<BTreeMap<String, Option<PackageId>>>()
all_bins
.iter()
.chain(all_examples.iter())
.filter_map(|t| check(t.clone()))
.collect::<BTreeMap<String, Option<PackageId>>>()
}
}
}
@ -527,18 +616,20 @@ fn read_crate_list(file: &FileLock) -> CargoResult<CrateListingV1> {
let listing = (|| -> CargoResult<_> {
let mut contents = String::new();
file.file().read_to_string(&mut contents)?;
let listing = toml::from_str(&contents).chain_err(|| {
internal("invalid TOML found for metadata")
})?;
let listing =
toml::from_str(&contents).chain_err(|| internal("invalid TOML found for metadata"))?;
match listing {
CrateListing::V1(v1) => Ok(v1),
CrateListing::Empty(_) => {
Ok(CrateListingV1 { v1: BTreeMap::new() })
}
CrateListing::Empty(_) => Ok(CrateListingV1 {
v1: BTreeMap::new(),
}),
}
})().chain_err(|| {
format_err!("failed to parse crate metadata at `{}`",
file.path().to_string_lossy())
})()
.chain_err(|| {
format_err!(
"failed to parse crate metadata at `{}`",
file.path().to_string_lossy()
)
})?;
Ok(listing)
}
@ -551,9 +642,12 @@ fn write_crate_list(file: &FileLock, listing: CrateListingV1) -> CargoResult<()>
let data = toml::to_string(&CrateListing::V1(listing))?;
file.write_all(data.as_bytes())?;
Ok(())
})().chain_err(|| {
format_err!("failed to write crate metadata at `{}`",
file.path().to_string_lossy())
})()
.chain_err(|| {
format_err!(
"failed to write crate metadata at `{}`",
file.path().to_string_lossy()
)
})?;
Ok(())
}
@ -571,10 +665,12 @@ pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> {
Ok(())
}
pub fn uninstall(root: Option<&str>,
specs: Vec<&str>,
bins: &[String],
config: &Config) -> CargoResult<()> {
pub fn uninstall(
root: Option<&str>,
specs: Vec<&str>,
bins: &[String],
config: &Config,
) -> CargoResult<()> {
if specs.len() > 1 && !bins.is_empty() {
bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.");
}
@ -599,10 +695,16 @@ pub fn uninstall(root: Option<&str>,
let mut summary = vec![];
if !succeeded.is_empty() {
summary.push(format!("Successfully uninstalled {}!", succeeded.join(", ")));
summary.push(format!(
"Successfully uninstalled {}!",
succeeded.join(", ")
));
}
if !failed.is_empty() {
summary.push(format!("Failed to uninstall {} (see error(s) above).", failed.join(", ")));
summary.push(format!(
"Failed to uninstall {} (see error(s) above).",
failed.join(", ")
));
}
if !succeeded.is_empty() || !failed.is_empty() {
@ -619,16 +721,17 @@ pub fn uninstall(root: Option<&str>,
Ok(())
}
pub fn uninstall_one(root: &Filesystem,
spec: &str,
bins: &[String],
config: &Config) -> CargoResult<()> {
pub fn uninstall_one(
root: &Filesystem,
spec: &str,
bins: &[String],
config: &Config,
) -> CargoResult<()> {
let crate_metadata = metadata(config, root)?;
let mut metadata = read_crate_list(&crate_metadata)?;
let mut to_remove = Vec::new();
{
let result = PackageIdSpec::query_str(spec, metadata.v1.keys())?
.clone();
let result = PackageIdSpec::query_str(spec, metadata.v1.keys())?.clone();
let mut installed = match metadata.v1.entry(result.clone()) {
Entry::Occupied(e) => e,
Entry::Vacant(..) => panic!("entry not found: {}", result),
@ -637,18 +740,22 @@ pub fn uninstall_one(root: &Filesystem,
for bin in installed.get() {
let bin = dst.join(bin);
if fs::metadata(&bin).is_err() {
bail!("corrupt metadata, `{}` does not exist when it should",
bin.display())
bail!(
"corrupt metadata, `{}` does not exist when it should",
bin.display()
)
}
}
let bins = bins.iter().map(|s| {
if s.ends_with(env::consts::EXE_SUFFIX) {
s.to_string()
} else {
format!("{}{}", s, env::consts::EXE_SUFFIX)
}
}).collect::<Vec<_>>();
let bins = bins.iter()
.map(|s| {
if s.ends_with(env::consts::EXE_SUFFIX) {
s.to_string()
} else {
format!("{}{}", s, env::consts::EXE_SUFFIX)
}
})
.collect::<Vec<_>>();
for bin in bins.iter() {
if !installed.get().contains(bin) {
@ -682,14 +789,11 @@ fn metadata(config: &Config, root: &Filesystem) -> CargoResult<FileLock> {
root.open_rw(Path::new(".crates.toml"), config, "crate metadata")
}
fn resolve_root(flag: Option<&str>,
config: &Config) -> CargoResult<Filesystem> {
fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult<Filesystem> {
let config_root = config.get_path("install.root")?;
Ok(flag.map(PathBuf::from).or_else(|| {
env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from)
}).or_else(move || {
config_root.map(|v| v.val)
}).map(Filesystem::new).unwrap_or_else(|| {
config.home().clone()
}))
Ok(flag.map(PathBuf::from)
.or_else(|| env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from))
.or_else(move || config_root.map(|v| v.val))
.map(Filesystem::new)
.unwrap_or_else(|| config.home().clone()))
}

View File

@ -9,14 +9,20 @@ use git2::Repository as GitRepository;
use core::Workspace;
use ops::is_bad_artifact_name;
use util::{GitRepo, HgRepo, PijulRepo, FossilRepo, internal};
use util::{Config, paths};
use util::{internal, FossilRepo, GitRepo, HgRepo, PijulRepo};
use util::{paths, Config};
use util::errors::{CargoResult, CargoResultExt};
use toml;
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum VersionControl { Git, Hg, Pijul, Fossil, NoVcs }
pub enum VersionControl {
Git,
Hg,
Pijul,
Fossil,
NoVcs,
}
#[derive(Debug)]
pub struct NewOptions {
@ -62,12 +68,13 @@ struct MkOptions<'a> {
}
impl NewOptions {
pub fn new(version_control: Option<VersionControl>,
bin: bool,
lib: bool,
path: String,
name: Option<String>) -> CargoResult<NewOptions> {
pub fn new(
version_control: Option<VersionControl>,
bin: bool,
lib: bool,
path: String,
name: Option<String>,
) -> CargoResult<NewOptions> {
let kind = match (bin, lib) {
(true, true) => bail!("can't specify both lib and binary outputs"),
(false, true) => NewProjectKind::Lib,
@ -75,7 +82,12 @@ impl NewOptions {
(_, false) => NewProjectKind::Bin,
};
let opts = NewOptions { version_control, kind, path, name };
let opts = NewOptions {
version_control,
kind,
path,
name,
};
Ok(opts)
}
}
@ -92,16 +104,21 @@ fn get_name<'a>(path: &'a Path, opts: &'a NewOptions) -> CargoResult<&'a str> {
}
let file_name = path.file_name().ok_or_else(|| {
format_err!("cannot auto-detect project name from path {:?} ; use --name to override", path.as_os_str())
format_err!(
"cannot auto-detect project name from path {:?} ; use --name to override",
path.as_os_str()
)
})?;
file_name.to_str().ok_or_else(|| {
format_err!("cannot create project with a non-unicode name: {:?}", file_name)
format_err!(
"cannot create project with a non-unicode name: {:?}",
file_name
)
})
}
fn check_name(name: &str, opts: &NewOptions) -> CargoResult<()> {
// If --name is already used to override, no point in suggesting it
// again as a fix.
let name_help = match opts.name {
@ -111,45 +128,52 @@ fn check_name(name: &str, opts: &NewOptions) -> CargoResult<()> {
// Ban keywords + test list found at
// https://doc.rust-lang.org/grammar.html#keywords
let blacklist = ["abstract", "alignof", "as", "become", "box",
"break", "const", "continue", "crate", "do",
"else", "enum", "extern", "false", "final",
"fn", "for", "if", "impl", "in",
"let", "loop", "macro", "match", "mod",
"move", "mut", "offsetof", "override", "priv",
"proc", "pub", "pure", "ref", "return",
"self", "sizeof", "static", "struct",
"super", "test", "trait", "true", "type", "typeof",
"unsafe", "unsized", "use", "virtual", "where",
"while", "yield"];
let blacklist = [
"abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do",
"else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop",
"macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub",
"pure", "ref", "return", "self", "sizeof", "static", "struct", "super", "test", "trait",
"true", "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
];
if blacklist.contains(&name) || (opts.kind.is_bin() && is_bad_artifact_name(name)) {
bail!("The name `{}` cannot be used as a crate name{}",
bail!(
"The name `{}` cannot be used as a crate name{}",
name,
name_help)
name_help
)
}
if let Some(ref c) = name.chars().nth(0) {
if c.is_digit(10) {
bail!("Package names starting with a digit cannot be used as a crate name{}",
name_help)
bail!(
"Package names starting with a digit cannot be used as a crate name{}",
name_help
)
}
}
for c in name.chars() {
if c.is_alphanumeric() { continue }
if c == '_' || c == '-' { continue }
bail!("Invalid character `{}` in crate name: `{}`{}",
if c.is_alphanumeric() {
continue;
}
if c == '_' || c == '-' {
continue;
}
bail!(
"Invalid character `{}` in crate name: `{}`{}",
c,
name,
name_help)
name_help
)
}
Ok(())
}
fn detect_source_paths_and_types(project_path : &Path,
project_name: &str,
detected_files: &mut Vec<SourceFileInformation>,
) -> CargoResult<()> {
fn detect_source_paths_and_types(
project_path: &Path,
project_name: &str,
detected_files: &mut Vec<SourceFileInformation>,
) -> CargoResult<()> {
let path = project_path;
let name = project_name;
@ -165,44 +189,61 @@ fn detect_source_paths_and_types(project_path : &Path,
}
let tests = vec![
Test { proposed_path: format!("src/main.rs"), handling: H::Bin },
Test { proposed_path: format!("main.rs"), handling: H::Bin },
Test { proposed_path: format!("src/{}.rs", name), handling: H::Detect },
Test { proposed_path: format!("{}.rs", name), handling: H::Detect },
Test { proposed_path: format!("src/lib.rs"), handling: H::Lib },
Test { proposed_path: format!("lib.rs"), handling: H::Lib },
Test {
proposed_path: format!("src/main.rs"),
handling: H::Bin,
},
Test {
proposed_path: format!("main.rs"),
handling: H::Bin,
},
Test {
proposed_path: format!("src/{}.rs", name),
handling: H::Detect,
},
Test {
proposed_path: format!("{}.rs", name),
handling: H::Detect,
},
Test {
proposed_path: format!("src/lib.rs"),
handling: H::Lib,
},
Test {
proposed_path: format!("lib.rs"),
handling: H::Lib,
},
];
for i in tests {
let pp = i.proposed_path;
// path/pp does not exist or is not a file
if !fs::metadata(&path.join(&pp)).map(|x| x.is_file()).unwrap_or(false) {
if !fs::metadata(&path.join(&pp))
.map(|x| x.is_file())
.unwrap_or(false)
{
continue;
}
let sfi = match i.handling {
H::Bin => {
SourceFileInformation {
relative_path: pp,
target_name: project_name.to_string(),
bin: true
}
}
H::Lib => {
SourceFileInformation {
relative_path: pp,
target_name: project_name.to_string(),
bin: false
}
}
H::Bin => SourceFileInformation {
relative_path: pp,
target_name: project_name.to_string(),
bin: true,
},
H::Lib => SourceFileInformation {
relative_path: pp,
target_name: project_name.to_string(),
bin: false,
},
H::Detect => {
let content = paths::read(&path.join(pp.clone()))?;
let isbin = content.contains("fn main");
SourceFileInformation {
relative_path: pp,
target_name: project_name.to_string(),
bin: isbin
bin: isbin,
}
}
};
@ -211,26 +252,32 @@ fn detect_source_paths_and_types(project_path : &Path,
// Check for duplicate lib attempt
let mut previous_lib_relpath : Option<&str> = None;
let mut duplicates_checker : BTreeMap<&str, &SourceFileInformation> = BTreeMap::new();
let mut previous_lib_relpath: Option<&str> = None;
let mut duplicates_checker: BTreeMap<&str, &SourceFileInformation> = BTreeMap::new();
for i in detected_files {
if i.bin {
if let Some(x) = BTreeMap::get::<str>(&duplicates_checker, i.target_name.as_ref()) {
bail!("\
bail!(
"\
multiple possible binary sources found:
{}
{}
cannot automatically generate Cargo.toml as the main target would be ambiguous",
&x.relative_path, &i.relative_path);
&x.relative_path,
&i.relative_path
);
}
duplicates_checker.insert(i.target_name.as_ref(), i);
} else {
if let Some(plp) = previous_lib_relpath {
bail!("cannot have a project with \
multiple libraries, \
found both `{}` and `{}`",
plp, i.relative_path)
bail!(
"cannot have a project with \
multiple libraries, \
found both `{}` and `{}`",
plp,
i.relative_path
)
}
previous_lib_relpath = Some(&i.relative_path);
}
@ -242,15 +289,15 @@ cannot automatically generate Cargo.toml as the main target would be ambiguous",
fn plan_new_source_file(bin: bool, project_name: String) -> SourceFileInformation {
if bin {
SourceFileInformation {
relative_path: "src/main.rs".to_string(),
target_name: project_name,
bin: true,
relative_path: "src/main.rs".to_string(),
target_name: project_name,
bin: true,
}
} else {
SourceFileInformation {
relative_path: "src/lib.rs".to_string(),
target_name: project_name,
bin: false,
relative_path: "src/lib.rs".to_string(),
target_name: project_name,
bin: false,
}
}
}
@ -258,9 +305,11 @@ fn plan_new_source_file(bin: bool, project_name: String) -> SourceFileInformatio
pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> {
let path = config.cwd().join(&opts.path);
if fs::metadata(&path).is_ok() {
bail!("destination `{}` already exists\n\n\
Use `cargo init` to initialize the directory\
", path.display()
bail!(
"destination `{}` already exists\n\n\
Use `cargo init` to initialize the directory\
",
path.display()
)
}
@ -276,8 +325,11 @@ pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> {
};
mk(config, &mkopts).chain_err(|| {
format_err!("Failed to create project `{}` at `{}`",
name, path.display())
format_err!(
"Failed to create project `{}` at `{}`",
name,
path.display()
)
})?;
Ok(())
}
@ -333,9 +385,11 @@ pub fn init(opts: &NewOptions, config: &Config) -> CargoResult<()> {
// if none exists, maybe create git, like in `cargo new`
if num_detected_vsces > 1 {
bail!("more than one of .hg, .git, .pijul, .fossil configurations \
found and the ignore file can't be filled in as \
a result. specify --vcs to override detection");
bail!(
"more than one of .hg, .git, .pijul, .fossil configurations \
found and the ignore file can't be filled in as \
a result. specify --vcs to override detection"
);
}
}
@ -343,13 +397,16 @@ pub fn init(opts: &NewOptions, config: &Config) -> CargoResult<()> {
version_control,
path: &path,
name,
bin: src_paths_types.iter().any(|x|x.bin),
bin: src_paths_types.iter().any(|x| x.bin),
source_files: src_paths_types,
};
mk(config, &mkopts).chain_err(|| {
format_err!("Failed to create project `{}` at `{}`",
name, path.display())
format_err!(
"Failed to create project `{}` at `{}`",
name,
path.display()
)
})?;
Ok(())
}
@ -363,26 +420,30 @@ fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> {
let name = opts.name;
let cfg = global_config(config)?;
// Please ensure that ignore and hgignore are in sync.
let ignore = ["\n", "/target\n", "**/*.rs.bk\n",
if !opts.bin { "Cargo.lock\n" } else { "" }]
.concat();
let ignore = [
"\n",
"/target\n",
"**/*.rs.bk\n",
if !opts.bin { "Cargo.lock\n" } else { "" },
].concat();
// Mercurial glob ignores can't be rooted, so just sticking a 'syntax: glob' at the top of the
// file will exclude too much. Instead, use regexp-based ignores. See 'hg help ignore' for
// more.
let hgignore = ["\n", "^target/\n", "glob:*.rs.bk\n",
if !opts.bin { "glob:Cargo.lock\n" } else { "" }]
.concat();
let hgignore = [
"\n",
"^target/\n",
"glob:*.rs.bk\n",
if !opts.bin { "glob:Cargo.lock\n" } else { "" },
].concat();
let vcs = opts.version_control
.unwrap_or_else(|| {
let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path),
config.cwd());
match (cfg.version_control, in_existing_vcs) {
(None, false) => VersionControl::Git,
(Some(opt), false) => opt,
(_, true) => VersionControl::NoVcs,
}
});
let vcs = opts.version_control.unwrap_or_else(|| {
let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd());
match (cfg.version_control, in_existing_vcs) {
(None, false) => VersionControl::Git,
(Some(opt), false) => opt,
(_, true) => VersionControl::NoVcs,
}
});
match vcs {
VersionControl::Git => {
@ -390,38 +451,37 @@ fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> {
GitRepo::init(path, config.cwd())?;
}
paths::append(&path.join(".gitignore"), ignore.as_bytes())?;
},
}
VersionControl::Hg => {
if !fs::metadata(&path.join(".hg")).is_ok() {
HgRepo::init(path, config.cwd())?;
}
paths::append(&path.join(".hgignore"), hgignore.as_bytes())?;
},
}
VersionControl::Pijul => {
if !fs::metadata(&path.join(".pijul")).is_ok() {
PijulRepo::init(path, config.cwd())?;
}
paths::append(&path.join(".ignore"), ignore.as_bytes())?;
},
}
VersionControl::Fossil => {
if !fs::metadata(&path.join(".fossil")).is_ok() {
FossilRepo::init(path, config.cwd())?;
}
},
}
VersionControl::NoVcs => {
fs::create_dir_all(path)?;
},
}
};
let (author_name, email) = discover_author()?;
// Hoo boy, sure glad we've got exhaustiveness checking behind us.
let author = match (cfg.name, cfg.email, author_name, email) {
(Some(name), Some(email), _, _) |
(Some(name), None, _, Some(email)) |
(None, Some(email), name, _) |
(None, None, name, Some(email)) => format!("{} <{}>", name, email),
(Some(name), None, _, None) |
(None, None, name, None) => name,
(Some(name), Some(email), _, _)
| (Some(name), None, _, Some(email))
| (None, Some(email), name, _)
| (None, None, name, Some(email)) => format!("{} <{}>", name, email),
(Some(name), None, _, None) | (None, None, name, None) => name,
};
let mut cargotoml_path_specifier = String::new();
@ -431,32 +491,46 @@ fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> {
for i in &opts.source_files {
if i.bin {
if i.relative_path != "src/main.rs" {
cargotoml_path_specifier.push_str(&format!(r#"
cargotoml_path_specifier.push_str(&format!(
r#"
[[bin]]
name = "{}"
path = {}
"#, i.target_name, toml::Value::String(i.relative_path.clone())));
"#,
i.target_name,
toml::Value::String(i.relative_path.clone())
));
}
} else if i.relative_path != "src/lib.rs" {
cargotoml_path_specifier.push_str(&format!(r#"
cargotoml_path_specifier.push_str(&format!(
r#"
[lib]
name = "{}"
path = {}
"#, i.target_name, toml::Value::String(i.relative_path.clone())));
"#,
i.target_name,
toml::Value::String(i.relative_path.clone())
));
}
}
// Create Cargo.toml file with necessary [lib] and [[bin]] sections, if needed
paths::write(&path.join("Cargo.toml"), format!(
r#"[package]
paths::write(
&path.join("Cargo.toml"),
format!(
r#"[package]
name = "{}"
version = "0.1.0"
authors = [{}]
[dependencies]
{}"#, name, toml::Value::String(author), cargotoml_path_specifier).as_bytes())?;
{}"#,
name,
toml::Value::String(author),
cargotoml_path_specifier
).as_bytes(),
)?;
// Create all specified source files
// (with respective parent directories)
@ -469,7 +543,7 @@ authors = [{}]
fs::create_dir_all(src_dir)?;
}
let default_file_content : &[u8] = if i.bin {
let default_file_content: &[u8] = if i.bin {
b"\
fn main() {
println!(\"Hello, world!\");
@ -487,53 +561,71 @@ mod tests {
"
};
if !fs::metadata(&path_of_source_file).map(|x| x.is_file()).unwrap_or(false) {
if !fs::metadata(&path_of_source_file)
.map(|x| x.is_file())
.unwrap_or(false)
{
paths::write(&path_of_source_file, default_file_content)?;
}
}
if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) {
let msg = format!("compiling this new crate may not work due to invalid \
workspace configuration\n\n{}", e);
let msg = format!(
"compiling this new crate may not work due to invalid \
workspace configuration\n\n{}",
e
);
config.shell().warn(msg)?;
}
Ok(())
}
fn get_environment_variable(variables: &[&str] ) -> Option<String>{
variables.iter()
.filter_map(|var| env::var(var).ok())
.next()
fn get_environment_variable(variables: &[&str]) -> Option<String> {
variables.iter().filter_map(|var| env::var(var).ok()).next()
}
fn discover_author() -> CargoResult<(String, Option<String>)> {
let cwd = env::current_dir()?;
let git_config = if let Ok(repo) = GitRepository::discover(&cwd) {
repo.config().ok().or_else(|| GitConfig::open_default().ok())
repo.config()
.ok()
.or_else(|| GitConfig::open_default().ok())
} else {
GitConfig::open_default().ok()
};
let git_config = git_config.as_ref();
let name_variables = ["CARGO_NAME", "GIT_AUTHOR_NAME", "GIT_COMMITTER_NAME",
"USER", "USERNAME", "NAME"];
let name_variables = [
"CARGO_NAME",
"GIT_AUTHOR_NAME",
"GIT_COMMITTER_NAME",
"USER",
"USERNAME",
"NAME",
];
let name = get_environment_variable(&name_variables[0..3])
.or_else(|| git_config.and_then(|g| g.get_string("user.name").ok()))
.or_else(|| get_environment_variable(&name_variables[3..]));
.or_else(|| git_config.and_then(|g| g.get_string("user.name").ok()))
.or_else(|| get_environment_variable(&name_variables[3..]));
let name = match name {
Some(name) => name,
None => {
let username_var = if cfg!(windows) {"USERNAME"} else {"USER"};
bail!("could not determine the current user, please set ${}",
username_var)
let username_var = if cfg!(windows) { "USERNAME" } else { "USER" };
bail!(
"could not determine the current user, please set ${}",
username_var
)
}
};
let email_variables = ["CARGO_EMAIL", "GIT_AUTHOR_EMAIL", "GIT_COMMITTER_EMAIL",
"EMAIL"];
let email_variables = [
"CARGO_EMAIL",
"GIT_AUTHOR_EMAIL",
"GIT_COMMITTER_EMAIL",
"EMAIL",
];
let email = get_environment_variable(&email_variables[0..3])
.or_else(|| git_config.and_then(|g| g.get_string("user.email").ok()))
.or_else(|| get_environment_variable(&email_variables[3..]));
.or_else(|| git_config.and_then(|g| g.get_string("user.email").ok()))
.or_else(|| get_environment_variable(&email_variables[3..]));
let name = name.trim().to_string();
let email = email.map(|s| s.trim().to_string());
@ -552,11 +644,14 @@ fn global_config(config: &Config) -> CargoResult<CargoNewConfig> {
Some(("pijul", _)) => Some(VersionControl::Pijul),
Some(("none", _)) => Some(VersionControl::NoVcs),
Some((s, p)) => {
return Err(internal(format!("invalid configuration for key \
`cargo-new.vcs`, unknown vcs `{}` \
(found in {})", s, p)))
return Err(internal(format!(
"invalid configuration for key \
`cargo-new.vcs`, unknown vcs `{}` \
(found in {})",
s, p
)))
}
None => None
None => None,
};
Ok(CargoNewConfig {
name,

View File

@ -18,11 +18,13 @@ pub struct OutputMetadataOptions {
/// Loads the manifest, resolves the dependencies of the project to the concrete
/// used versions - considering overrides - and writes all dependencies in a JSON
/// format to stdout.
pub fn output_metadata(ws: &Workspace,
opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
pub fn output_metadata(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
if opt.version != VERSION {
bail!("metadata version {} not supported, only {} is currently supported",
opt.version, VERSION);
bail!(
"metadata version {} not supported, only {} is currently supported",
opt.version,
VERSION
);
}
if opt.no_deps {
metadata_no_deps(ws, opt)
@ -31,8 +33,7 @@ pub fn output_metadata(ws: &Workspace,
}
}
fn metadata_no_deps(ws: &Workspace,
_opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
fn metadata_no_deps(ws: &Workspace, _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
Ok(ExportInfo {
packages: ws.members().cloned().collect(),
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
@ -43,25 +44,27 @@ fn metadata_no_deps(ws: &Workspace,
})
}
fn metadata_full(ws: &Workspace,
opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
fn metadata_full(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
let specs = Packages::All.into_package_id_specs(ws)?;
let deps = ops::resolve_ws_precisely(ws,
None,
&opt.features,
opt.all_features,
opt.no_default_features,
&specs)?;
let deps = ops::resolve_ws_precisely(
ws,
None,
&opt.features,
opt.all_features,
opt.no_default_features,
&specs,
)?;
let (packages, resolve) = deps;
let packages = packages.package_ids()
.map(|i| packages.get(i).map(|p| p.clone()))
.collect::<CargoResult<Vec<_>>>()?;
let packages = packages
.package_ids()
.map(|i| packages.get(i).map(|p| p.clone()))
.collect::<CargoResult<Vec<_>>>()?;
Ok(ExportInfo {
packages,
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
resolve: Some(MetadataResolve{
resolve: Some(MetadataResolve {
resolve,
root: ws.current_opt().map(|pkg| pkg.package_id().clone()),
}),
@ -86,13 +89,13 @@ pub struct ExportInfo {
/// format for `PackageId`s
#[derive(Serialize)]
struct MetadataResolve {
#[serde(rename = "nodes", serialize_with = "serialize_resolve")]
resolve: Resolve,
#[serde(rename = "nodes", serialize_with = "serialize_resolve")] resolve: Resolve,
root: Option<PackageId>,
}
fn serialize_resolve<S>(resolve: &Resolve, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer,
where
S: ser::Serializer,
{
#[derive(Serialize)]
struct Node<'a> {
@ -101,11 +104,13 @@ fn serialize_resolve<S>(resolve: &Resolve, s: S) -> Result<S::Ok, S::Error>
features: Vec<&'a str>,
}
resolve.iter().map(|id| {
Node {
resolve
.iter()
.map(|id| Node {
id,
dependencies: resolve.deps(id).collect(),
features: resolve.features_sorted(id),
}
}).collect::<Vec<_>>().serialize(s)
})
.collect::<Vec<_>>()
.serialize(s)
}

View File

@ -5,11 +5,11 @@ use std::path::{self, Path};
use std::sync::Arc;
use flate2::read::GzDecoder;
use flate2::{GzBuilder, Compression};
use flate2::{Compression, GzBuilder};
use git2;
use tar::{Archive, Builder, Header, EntryType};
use tar::{Archive, Builder, EntryType, Header};
use core::{Package, Workspace, Source, SourceId};
use core::{Package, Source, SourceId, Workspace};
use sources::PathSource;
use util::{self, internal, Config, FileLock};
use util::paths;
@ -27,15 +27,12 @@ pub struct PackageOpts<'cfg> {
pub registry: Option<String>,
}
pub fn package(ws: &Workspace,
opts: &PackageOpts) -> CargoResult<Option<FileLock>> {
pub fn package(ws: &Workspace, opts: &PackageOpts) -> CargoResult<Option<FileLock>> {
ops::resolve_ws(ws)?;
let pkg = ws.current()?;
let config = ws.config();
let mut src = PathSource::new(pkg.root(),
pkg.package_id().source_id(),
config);
let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config);
src.update()?;
if opts.check_metadata {
@ -46,9 +43,10 @@ pub fn package(ws: &Workspace,
if opts.list {
let root = pkg.root();
let mut list: Vec<_> = src.list_files(pkg)?.iter().map(|file| {
util::without_prefix(file, root).unwrap().to_path_buf()
}).collect();
let mut list: Vec<_> = src.list_files(pkg)?
.iter()
.map(|file| util::without_prefix(file, root).unwrap().to_path_buf())
.collect();
if include_lockfile(&pkg) {
list.push("Cargo.lock".into());
}
@ -56,7 +54,7 @@ pub fn package(ws: &Workspace,
for file in list.iter() {
println!("{}", file.display());
}
return Ok(None)
return Ok(None);
}
if !opts.allow_dirty {
@ -74,31 +72,28 @@ pub fn package(ws: &Workspace,
// location if it actually passes all our tests. Any previously existing
// tarball can be assumed as corrupt or invalid, so we just blow it away if
// it exists.
config.shell().status("Packaging", pkg.package_id().to_string())?;
config
.shell()
.status("Packaging", pkg.package_id().to_string())?;
dst.file().set_len(0)?;
tar(ws, &src, dst.file(), &filename).chain_err(|| {
format_err!("failed to prepare local package for uploading")
})?;
tar(ws, &src, dst.file(), &filename)
.chain_err(|| format_err!("failed to prepare local package for uploading"))?;
if opts.verify {
dst.seek(SeekFrom::Start(0))?;
run_verify(ws, &dst, opts).chain_err(|| {
"failed to verify package tarball"
})?
run_verify(ws, &dst, opts).chain_err(|| "failed to verify package tarball")?
}
dst.seek(SeekFrom::Start(0))?;
{
let src_path = dst.path();
let dst_path = dst.parent().join(&filename);
fs::rename(&src_path, &dst_path).chain_err(|| {
"failed to move temporary tarball into final location"
})?;
fs::rename(&src_path, &dst_path)
.chain_err(|| "failed to move temporary tarball into final location")?;
}
Ok(Some(dst))
}
fn include_lockfile(pkg: &Package) -> bool {
pkg.manifest().publish_lockfile() &&
pkg.targets().iter().any(|t| t.is_example() || t.is_bin())
pkg.manifest().publish_lockfile() && pkg.targets().iter().any(|t| t.is_example() || t.is_bin())
}
// check that the package has some piece of metadata that a human can
@ -117,7 +112,11 @@ fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
)*
}}
}
lacking!(description, license || license_file, documentation || homepage || repository);
lacking!(
description,
license || license_file,
documentation || homepage || repository
);
if !missing.is_empty() {
let mut things = missing[..missing.len() - 1].join(", ");
@ -128,10 +127,11 @@ fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
}
things.push_str(missing.last().unwrap());
config.shell().warn(
&format!("manifest has no {things}.\n\
See http://doc.crates.io/manifest.html#package-metadata for more info.",
things = things))?
config.shell().warn(&format!(
"manifest has no {things}.\n\
See http://doc.crates.io/manifest.html#package-metadata for more info.",
things = things
))?
}
Ok(())
}
@ -140,9 +140,12 @@ fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
fn verify_dependencies(pkg: &Package) -> CargoResult<()> {
for dep in pkg.dependencies() {
if dep.source_id().is_path() && !dep.specified_req() {
bail!("all path dependencies must have a version specified \
when packaging.\ndependency `{}` does not specify \
a version.", dep.name())
bail!(
"all path dependencies must have a version specified \
when packaging.\ndependency `{}` does not specify \
a version.",
dep.name()
)
}
}
Ok(())
@ -151,14 +154,16 @@ fn verify_dependencies(pkg: &Package) -> CargoResult<()> {
fn check_not_dirty(p: &Package, src: &PathSource) -> CargoResult<()> {
if let Ok(repo) = git2::Repository::discover(p.root()) {
if let Some(workdir) = repo.workdir() {
debug!("found a git repo at {:?}, checking if index present",
workdir);
debug!(
"found a git repo at {:?}, checking if index present",
workdir
);
let path = p.manifest_path();
let path = path.strip_prefix(workdir).unwrap_or(path);
if let Ok(status) = repo.status_file(path) {
if (status & git2::Status::IGNORED).is_empty() {
debug!("Cargo.toml found in repo, checking if dirty");
return git(p, src, &repo)
return git(p, src, &repo);
}
}
}
@ -168,39 +173,45 @@ fn check_not_dirty(p: &Package, src: &PathSource) -> CargoResult<()> {
// have to assume that it's clean.
return Ok(());
fn git(p: &Package,
src: &PathSource,
repo: &git2::Repository) -> CargoResult<()> {
fn git(p: &Package, src: &PathSource, repo: &git2::Repository) -> CargoResult<()> {
let workdir = repo.workdir().unwrap();
let dirty = src.list_files(p)?.iter().filter(|file| {
let relative = file.strip_prefix(workdir).unwrap();
if let Ok(status) = repo.status_file(relative) {
status != git2::Status::CURRENT
} else {
false
}
}).map(|path| {
path.strip_prefix(p.root()).unwrap_or(path).display().to_string()
}).collect::<Vec<_>>();
let dirty = src.list_files(p)?
.iter()
.filter(|file| {
let relative = file.strip_prefix(workdir).unwrap();
if let Ok(status) = repo.status_file(relative) {
status != git2::Status::CURRENT
} else {
false
}
})
.map(|path| {
path.strip_prefix(p.root())
.unwrap_or(path)
.display()
.to_string()
})
.collect::<Vec<_>>();
if dirty.is_empty() {
Ok(())
} else {
bail!("{} files in the working directory contain changes that were \
not yet committed into git:\n\n{}\n\n\
to proceed despite this, pass the `--allow-dirty` flag",
dirty.len(), dirty.join("\n"))
bail!(
"{} files in the working directory contain changes that were \
not yet committed into git:\n\n{}\n\n\
to proceed despite this, pass the `--allow-dirty` flag",
dirty.len(),
dirty.join("\n")
)
}
}
}
fn tar(ws: &Workspace,
src: &PathSource,
dst: &File,
filename: &str) -> CargoResult<()> {
fn tar(ws: &Workspace, src: &PathSource, dst: &File, filename: &str) -> CargoResult<()> {
// Prepare the encoder and its header
let filename = Path::new(filename);
let encoder = GzBuilder::new().filename(util::path2bytes(filename)?)
.write(dst, Compression::best());
let encoder = GzBuilder::new()
.filename(util::path2bytes(filename)?)
.write(dst, Compression::best());
// Put all package files into a compressed archive
let mut ar = Builder::new(encoder);
@ -211,14 +222,18 @@ fn tar(ws: &Workspace,
let relative = util::without_prefix(file, root).unwrap();
check_filename(relative)?;
let relative = relative.to_str().ok_or_else(|| {
format_err!("non-utf8 path in source directory: {}",
relative.display())
format_err!("non-utf8 path in source directory: {}", relative.display())
})?;
config.shell().verbose(|shell| {
shell.status("Archiving", &relative)
})?;
let path = format!("{}-{}{}{}", pkg.name(), pkg.version(),
path::MAIN_SEPARATOR, relative);
config
.shell()
.verbose(|shell| shell.status("Archiving", &relative))?;
let path = format!(
"{}-{}{}{}",
pkg.name(),
pkg.version(),
path::MAIN_SEPARATOR,
relative
);
// The tar::Builder type by default will build GNU archives, but
// unfortunately we force it here to use UStar archives instead. The
@ -239,24 +254,21 @@ fn tar(ws: &Workspace,
// unpack the selectors 0.4.0 crate on crates.io. Either that or take a
// look at rust-lang/cargo#2326
let mut header = Header::new_ustar();
header.set_path(&path).chain_err(|| {
format!("failed to add to archive: `{}`", relative)
})?;
let mut file = File::open(file).chain_err(|| {
format!("failed to open for archiving: `{}`", file.display())
})?;
let metadata = file.metadata().chain_err(|| {
format!("could not learn metadata for: `{}`", relative)
})?;
header
.set_path(&path)
.chain_err(|| format!("failed to add to archive: `{}`", relative))?;
let mut file = File::open(file)
.chain_err(|| format!("failed to open for archiving: `{}`", file.display()))?;
let metadata = file.metadata()
.chain_err(|| format!("could not learn metadata for: `{}`", relative))?;
header.set_metadata(&metadata);
if relative == "Cargo.toml" {
let orig = Path::new(&path).with_file_name("Cargo.toml.orig");
header.set_path(&orig)?;
header.set_cksum();
ar.append(&header, &mut file).chain_err(|| {
internal(format!("could not archive source file `{}`", relative))
})?;
ar.append(&header, &mut file)
.chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
let mut header = Header::new_ustar();
let toml = pkg.to_registry_toml(ws.config())?;
@ -265,30 +277,31 @@ fn tar(ws: &Workspace,
header.set_mode(0o644);
header.set_size(toml.len() as u64);
header.set_cksum();
ar.append(&header, toml.as_bytes()).chain_err(|| {
internal(format!("could not archive source file `{}`", relative))
})?;
ar.append(&header, toml.as_bytes())
.chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
} else {
header.set_cksum();
ar.append(&header, &mut file).chain_err(|| {
internal(format!("could not archive source file `{}`", relative))
})?;
ar.append(&header, &mut file)
.chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
}
}
if include_lockfile(pkg) {
let toml = paths::read(&ws.root().join("Cargo.lock"))?;
let path = format!("{}-{}{}Cargo.lock", pkg.name(), pkg.version(),
path::MAIN_SEPARATOR);
let path = format!(
"{}-{}{}Cargo.lock",
pkg.name(),
pkg.version(),
path::MAIN_SEPARATOR
);
let mut header = Header::new_ustar();
header.set_path(&path)?;
header.set_entry_type(EntryType::file());
header.set_mode(0o644);
header.set_size(toml.len() as u64);
header.set_cksum();
ar.append(&header, toml.as_bytes()).chain_err(|| {
internal("could not archive source file `Cargo.lock`")
})?;
ar.append(&header, toml.as_bytes())
.chain_err(|| internal("could not archive source file `Cargo.lock`"))?;
}
let encoder = ar.into_inner()?;
@ -303,7 +316,8 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult
config.shell().status("Verifying", pkg)?;
let f = GzDecoder::new(tar.file());
let dst = tar.parent().join(&format!("{}-{}", pkg.name(), pkg.version()));
let dst = tar.parent()
.join(&format!("{}-{}", pkg.name(), pkg.version()));
if dst.exists() {
paths::remove_dir_all(&dst)?;
}
@ -317,21 +331,28 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult
let new_pkg = src.root_package()?;
let ws = Workspace::ephemeral(new_pkg, config, None, true)?;
ops::compile_ws(&ws, None, &ops::CompileOptions {
config,
jobs: opts.jobs,
target: opts.target.clone(),
features: Vec::new(),
no_default_features: false,
all_features: false,
spec: ops::Packages::Packages(Vec::new()),
filter: ops::CompileFilter::Default { required_features_filterable: true },
release: false,
message_format: ops::MessageFormat::Human,
mode: ops::CompileMode::Build,
target_rustdoc_args: None,
target_rustc_args: None,
}, Arc::new(DefaultExecutor))?;
ops::compile_ws(
&ws,
None,
&ops::CompileOptions {
config,
jobs: opts.jobs,
target: opts.target.clone(),
features: Vec::new(),
no_default_features: false,
all_features: false,
spec: ops::Packages::Packages(Vec::new()),
filter: ops::CompileFilter::Default {
required_features_filterable: true,
},
release: false,
message_format: ops::MessageFormat::Human,
mode: ops::CompileMode::Build,
target_rustdoc_args: None,
target_rustc_args: None,
},
Arc::new(DefaultExecutor),
)?;
Ok(())
}
@ -349,15 +370,19 @@ fn check_filename(file: &Path) -> CargoResult<()> {
};
let name = match name.to_str() {
Some(name) => name,
None => {
bail!("path does not have a unicode filename which may not unpack \
on all platforms: {}", file.display())
}
None => bail!(
"path does not have a unicode filename which may not unpack \
on all platforms: {}",
file.display()
),
};
let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
bail!("cannot package a filename with a special character `{}`: {}",
c, file.display())
bail!(
"cannot package a filename with a special character `{}`: {}",
c,
file.display()
)
}
Ok(())
}

View File

@ -3,34 +3,49 @@ use std::fs;
use std::io;
use std::path::{Path, PathBuf};
use core::{Package, SourceId, PackageId, EitherManifest};
use core::{EitherManifest, Package, PackageId, SourceId};
use util::{self, Config};
use util::errors::{CargoResult, CargoError};
use util::errors::{CargoError, CargoResult};
use util::important_paths::find_project_manifest_exact;
use util::toml::read_manifest;
pub fn read_package(path: &Path, source_id: &SourceId, config: &Config)
-> CargoResult<(Package, Vec<PathBuf>)> {
trace!("read_package; path={}; source-id={}", path.display(), source_id);
pub fn read_package(
path: &Path,
source_id: &SourceId,
config: &Config,
) -> CargoResult<(Package, Vec<PathBuf>)> {
trace!(
"read_package; path={}; source-id={}",
path.display(),
source_id
);
let (manifest, nested) = read_manifest(path, source_id, config)?;
let manifest = match manifest {
EitherManifest::Real(manifest) => manifest,
EitherManifest::Virtual(..) => {
bail!("found a virtual manifest at `{}` instead of a package \
manifest", path.display())
}
EitherManifest::Virtual(..) => bail!(
"found a virtual manifest at `{}` instead of a package \
manifest",
path.display()
),
};
Ok((Package::new(manifest, path), nested))
}
pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config)
-> CargoResult<Vec<Package>> {
pub fn read_packages(
path: &Path,
source_id: &SourceId,
config: &Config,
) -> CargoResult<Vec<Package>> {
let mut all_packages = HashMap::new();
let mut visited = HashSet::<PathBuf>::new();
let mut errors = Vec::<CargoError>::new();
trace!("looking for root package: {}, source_id={}", path.display(), source_id);
trace!(
"looking for root package: {}, source_id={}",
path.display(),
source_id
);
walk(path, &mut |dir| {
trace!("looking for child package: {}", dir.display());
@ -39,24 +54,31 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config)
if dir != path {
let name = dir.file_name().and_then(|s| s.to_str());
if name.map(|s| s.starts_with('.')) == Some(true) {
return Ok(false)
return Ok(false);
}
// Don't automatically discover packages across git submodules
if fs::metadata(&dir.join(".git")).is_ok() {
return Ok(false)
return Ok(false);
}
}
// Don't ever look at target directories
if dir.file_name().and_then(|s| s.to_str()) == Some("target") &&
has_manifest(dir.parent().unwrap()) {
return Ok(false)
if dir.file_name().and_then(|s| s.to_str()) == Some("target")
&& has_manifest(dir.parent().unwrap())
{
return Ok(false);
}
if has_manifest(dir) {
read_nested_packages(dir, &mut all_packages, source_id, config,
&mut visited, &mut errors)?;
read_nested_packages(
dir,
&mut all_packages,
source_id,
config,
&mut visited,
&mut errors,
)?;
}
Ok(true)
})?;
@ -64,31 +86,31 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config)
if all_packages.is_empty() {
match errors.pop() {
Some(err) => Err(err),
None => Err(format_err!("Could not find Cargo.toml in `{}`", path.display())),
None => Err(format_err!(
"Could not find Cargo.toml in `{}`",
path.display()
)),
}
} else {
Ok(all_packages.into_iter().map(|(_, v)| v).collect())
}
}
fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult<bool>)
-> CargoResult<()> {
fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult<bool>) -> CargoResult<()> {
if !callback(path)? {
trace!("not processing {}", path.display());
return Ok(())
return Ok(());
}
// Ignore any permission denied errors because temporary directories
// can often have some weird permissions on them.
let dirs = match fs::read_dir(path) {
Ok(dirs) => dirs,
Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => {
return Ok(())
}
Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => return Ok(()),
Err(e) => {
let cx = format!("failed to read directory `{}`", path.display());
let e = CargoError::from(e);
return Err(e.context(cx).into())
return Err(e.context(cx).into());
}
};
for dir in dirs {
@ -104,13 +126,17 @@ fn has_manifest(path: &Path) -> bool {
find_project_manifest_exact(path, "Cargo.toml").is_ok()
}
fn read_nested_packages(path: &Path,
all_packages: &mut HashMap<PackageId, Package>,
source_id: &SourceId,
config: &Config,
visited: &mut HashSet<PathBuf>,
errors: &mut Vec<CargoError>) -> CargoResult<()> {
if !visited.insert(path.to_path_buf()) { return Ok(()) }
fn read_nested_packages(
path: &Path,
all_packages: &mut HashMap<PackageId, Package>,
source_id: &SourceId,
config: &Config,
visited: &mut HashSet<PathBuf>,
errors: &mut Vec<CargoError>,
) -> CargoResult<()> {
if !visited.insert(path.to_path_buf()) {
return Ok(());
}
let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?;
@ -123,12 +149,14 @@ fn read_nested_packages(path: &Path,
// it's safer to ignore malformed manifests to avoid
//
// TODO: Add a way to exclude folders?
info!("skipping malformed package found at `{}`",
path.to_string_lossy());
info!(
"skipping malformed package found at `{}`",
path.to_string_lossy()
);
errors.push(err);
return Ok(());
}
Ok(tuple) => tuple
Ok(tuple) => tuple,
};
let manifest = match manifest {
@ -138,12 +166,17 @@ fn read_nested_packages(path: &Path,
let pkg = Package::new(manifest, &manifest_path);
let pkg_id = pkg.package_id().clone();
use ::std::collections::hash_map::Entry;
use std::collections::hash_map::Entry;
match all_packages.entry(pkg_id) {
Entry::Vacant(v) => { v.insert(pkg); },
Entry::Vacant(v) => {
v.insert(pkg);
}
Entry::Occupied(_) => {
info!("skipping nested package `{}` found at `{}`",
pkg.name(), path.to_string_lossy());
info!(
"skipping nested package `{}` found at `{}`",
pkg.name(),
path.to_string_lossy()
);
}
}
@ -158,8 +191,7 @@ fn read_nested_packages(path: &Path,
if !source_id.is_registry() {
for p in nested.iter() {
let path = util::normalize_path(&path.join(p));
read_nested_packages(&path, all_packages, source_id,
config, visited, errors)?;
read_nested_packages(&path, all_packages, source_id, config, visited, errors)?;
}
}

View File

@ -4,35 +4,40 @@ use ops::{self, Packages};
use util::{self, CargoResult, ProcessError};
use core::Workspace;
pub fn run(ws: &Workspace,
options: &ops::CompileOptions,
args: &[String]) -> CargoResult<Option<ProcessError>> {
pub fn run(
ws: &Workspace,
options: &ops::CompileOptions,
args: &[String],
) -> CargoResult<Option<ProcessError>> {
let config = ws.config();
let pkg = match options.spec {
Packages::All |
Packages::Default |
Packages::OptOut(_) => unreachable!("cargo run supports single package only"),
Packages::All | Packages::Default | Packages::OptOut(_) => {
unreachable!("cargo run supports single package only")
}
Packages::Packages(ref xs) => match xs.len() {
0 => ws.current()?,
1 => ws.members()
.find(|pkg| &*pkg.name() == xs[0])
.ok_or_else(||
.ok_or_else(|| {
format_err!("package `{}` is not a member of the workspace", xs[0])
)?,
})?,
_ => unreachable!("cargo run supports single package only"),
}
},
};
let bins: Vec<_> = pkg.manifest().targets().iter().filter(|a| {
!a.is_lib() && !a.is_custom_build() && if !options.filter.is_specific() {
a.is_bin()
} else {
options.filter.matches(a)
}
})
.map(|bin| bin.name())
.collect();
let bins: Vec<_> = pkg.manifest()
.targets()
.iter()
.filter(|a| {
!a.is_lib() && !a.is_custom_build() && if !options.filter.is_specific() {
a.is_bin()
} else {
options.filter.matches(a)
}
})
.map(|bin| bin.name())
.collect();
if bins.is_empty() {
if !options.filter.is_specific() {
@ -43,12 +48,17 @@ pub fn run(ws: &Workspace,
}
if bins.len() > 1 {
if !options.filter.is_specific() {
bail!("`cargo run` requires that a project only have one \
executable; use the `--bin` option to specify which one \
to run\navailable binaries: {}", bins.join(", "))
bail!(
"`cargo run` requires that a project only have one \
executable; use the `--bin` option to specify which one \
to run\navailable binaries: {}",
bins.join(", ")
)
} else {
bail!("`cargo run` can run at most one executable, but \
multiple were specified")
bail!(
"`cargo run` can run at most one executable, but \
multiple were specified"
)
}
}
@ -56,8 +66,9 @@ pub fn run(ws: &Workspace,
assert_eq!(compile.binaries.len(), 1);
let exe = &compile.binaries[0];
let exe = match util::without_prefix(exe, config.cwd()) {
Some(path) if path.file_name() == Some(path.as_os_str())
=> Path::new(".").join(path).to_path_buf(),
Some(path) if path.file_name() == Some(path.as_os_str()) => {
Path::new(".").join(path).to_path_buf()
}
Some(path) => path.to_path_buf(),
None => exe.to_path_buf(),
};

View File

@ -1,12 +1,12 @@
use std::collections::{HashMap, HashSet, BTreeSet};
use std::collections::{BTreeSet, HashMap, HashSet};
use std::ffi::OsStr;
use std::path::PathBuf;
use semver::Version;
use lazycell::LazyCell;
use core::{PackageId, Package, Target, TargetKind};
use util::{self, CargoResult, Config, ProcessBuilder, process, join_paths};
use core::{Package, PackageId, Target, TargetKind};
use util::{self, join_paths, process, CargoResult, Config, ProcessBuilder};
/// A structure returning the result of a compilation.
pub struct Compilation<'cfg> {
@ -68,7 +68,7 @@ impl<'cfg> Compilation<'cfg> {
pub fn new(config: &'cfg Config) -> Compilation<'cfg> {
Compilation {
libraries: HashMap::new(),
native_dirs: BTreeSet::new(), // TODO: deprecated, remove
native_dirs: BTreeSet::new(), // TODO: deprecated, remove
root_output: PathBuf::from("/"),
deps_output: PathBuf::from("/"),
host_deps_output: PathBuf::from("/"),
@ -97,8 +97,11 @@ impl<'cfg> Compilation<'cfg> {
}
/// See `process`.
pub fn host_process<T: AsRef<OsStr>>(&self, cmd: T, pkg: &Package)
-> CargoResult<ProcessBuilder> {
pub fn host_process<T: AsRef<OsStr>>(
&self,
cmd: T,
pkg: &Package,
) -> CargoResult<ProcessBuilder> {
self.fill_env(process(cmd), pkg, true)
}
@ -110,8 +113,11 @@ impl<'cfg> Compilation<'cfg> {
}
/// See `process`.
pub fn target_process<T: AsRef<OsStr>>(&self, cmd: T, pkg: &Package)
-> CargoResult<ProcessBuilder> {
pub fn target_process<T: AsRef<OsStr>>(
&self,
cmd: T,
pkg: &Package,
) -> CargoResult<ProcessBuilder> {
let builder = if let Some((ref runner, ref args)) = *self.target_runner()? {
let mut builder = process(runner);
builder.args(args);
@ -128,17 +134,19 @@ impl<'cfg> Compilation<'cfg> {
///
/// The package argument is also used to configure environment variables as
/// well as the working directory of the child process.
fn fill_env(&self, mut cmd: ProcessBuilder, pkg: &Package, is_host: bool)
-> CargoResult<ProcessBuilder> {
fn fill_env(
&self,
mut cmd: ProcessBuilder,
pkg: &Package,
is_host: bool,
) -> CargoResult<ProcessBuilder> {
let mut search_path = if is_host {
let mut search_path = vec![self.host_deps_output.clone()];
search_path.extend(self.host_dylib_path.clone());
search_path
} else {
let mut search_path =
super::filter_dynamic_search_path(self.native_dirs.iter(),
&self.root_output);
super::filter_dynamic_search_path(self.native_dirs.iter(), &self.root_output);
search_path.push(self.root_output.clone());
search_path.push(self.deps_output.clone());
search_path.extend(self.target_dylib_path.clone());
@ -165,16 +173,25 @@ impl<'cfg> Compilation<'cfg> {
// consider adding the corresponding properties to the hash
// in Context::target_metadata()
cmd.env("CARGO_MANIFEST_DIR", pkg.root())
.env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string())
.env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string())
.env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string())
.env("CARGO_PKG_VERSION_PRE", &pre_version_component(pkg.version()))
.env("CARGO_PKG_VERSION", &pkg.version().to_string())
.env("CARGO_PKG_NAME", &*pkg.name())
.env("CARGO_PKG_DESCRIPTION", metadata.description.as_ref().unwrap_or(&String::new()))
.env("CARGO_PKG_HOMEPAGE", metadata.homepage.as_ref().unwrap_or(&String::new()))
.env("CARGO_PKG_AUTHORS", &pkg.authors().join(":"))
.cwd(pkg.root());
.env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string())
.env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string())
.env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string())
.env(
"CARGO_PKG_VERSION_PRE",
&pre_version_component(pkg.version()),
)
.env("CARGO_PKG_VERSION", &pkg.version().to_string())
.env("CARGO_PKG_NAME", &*pkg.name())
.env(
"CARGO_PKG_DESCRIPTION",
metadata.description.as_ref().unwrap_or(&String::new()),
)
.env(
"CARGO_PKG_HOMEPAGE",
metadata.homepage.as_ref().unwrap_or(&String::new()),
)
.env("CARGO_PKG_AUTHORS", &pkg.authors().join(":"))
.cwd(pkg.root());
Ok(cmd)
}
}
@ -187,7 +204,9 @@ fn pre_version_component(v: &Version) -> String {
let mut ret = String::new();
for (i, x) in v.pre.iter().enumerate() {
if i != 0 { ret.push('.') };
if i != 0 {
ret.push('.')
};
ret.push_str(&x.to_string());
}

View File

@ -1,10 +1,10 @@
#![allow(deprecated)]
use std::collections::{HashSet, HashMap, BTreeSet};
use std::collections::{BTreeSet, HashMap, HashSet};
use std::collections::hash_map::Entry;
use std::env;
use std::fmt;
use std::hash::{Hasher, Hash, SipHasher};
use std::hash::{Hash, Hasher, SipHasher};
use std::path::{Path, PathBuf};
use std::str::{self, FromStr};
use std::sync::Arc;
@ -12,18 +12,18 @@ use std::cell::RefCell;
use jobserver::Client;
use core::{Package, PackageId, PackageSet, Resolve, Target, Profile};
use core::{TargetKind, Profiles, Dependency, Workspace};
use core::{Package, PackageId, PackageSet, Profile, Resolve, Target};
use core::{Dependency, Profiles, TargetKind, Workspace};
use core::dependency::Kind as DepKind;
use util::{self, ProcessBuilder, internal, Config, profile, Cfg, CfgExpr};
use util::{self, internal, profile, Cfg, CfgExpr, Config, ProcessBuilder};
use util::errors::{CargoResult, CargoResultExt};
use super::TargetConfig;
use super::custom_build::{BuildState, BuildScripts, BuildDeps};
use super::custom_build::{BuildDeps, BuildScripts, BuildState};
use super::fingerprint::Fingerprint;
use super::layout::Layout;
use super::links::Links;
use super::{Kind, Compilation, BuildConfig};
use super::{BuildConfig, Compilation, Kind};
/// All information needed to define a Unit.
///
@ -126,8 +126,11 @@ impl TargetInfo {
process.arg("--crate-type").arg(crate_type);
let output = process.exec_with_output().chain_err(|| {
format!("failed to run `rustc` to learn about \
crate-type {} information", crate_type)
format!(
"failed to run `rustc` to learn about \
crate-type {} information",
crate_type
)
})?;
let error = str::from_utf8(&output.stderr).unwrap();
@ -140,14 +143,19 @@ impl TargetInfo {
pub struct Metadata(u64);
impl<'a, 'cfg> Context<'a, 'cfg> {
pub fn new(ws: &'a Workspace<'cfg>,
resolve: &'a Resolve,
packages: &'a PackageSet<'cfg>,
config: &'cfg Config,
build_config: BuildConfig,
profiles: &'a Profiles) -> CargoResult<Context<'a, 'cfg>> {
let dest = if build_config.release { "release" } else { "debug" };
pub fn new(
ws: &'a Workspace<'cfg>,
resolve: &'a Resolve,
packages: &'a PackageSet<'cfg>,
config: &'cfg Config,
build_config: BuildConfig,
profiles: &'a Profiles,
) -> CargoResult<Context<'a, 'cfg>> {
let dest = if build_config.release {
"release"
} else {
"debug"
};
let host_layout = Layout::new(ws, None, dest)?;
let target_layout = match build_config.requested_target.as_ref() {
Some(target) => Some(Layout::new(ws, Some(target), dest)?),
@ -169,9 +177,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// is ourself, a running process.
let jobserver = match config.jobserver_from_env() {
Some(c) => c.clone(),
None => Client::new(build_config.jobs as usize - 1).chain_err(|| {
"failed to create jobserver"
})?,
None => Client::new(build_config.jobs as usize - 1)
.chain_err(|| "failed to create jobserver")?,
};
Ok(Context {
@ -208,13 +215,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
pub fn prepare(&mut self) -> CargoResult<()> {
let _p = profile::start("preparing layout");
self.host.prepare().chain_err(|| {
internal("couldn't prepare build directories")
})?;
self.host
.prepare()
.chain_err(|| internal("couldn't prepare build directories"))?;
if let Some(ref mut target) = self.target {
target.prepare().chain_err(|| {
internal("couldn't prepare build directories")
})?;
target
.prepare()
.chain_err(|| internal("couldn't prepare build directories"))?;
}
self.compilation.host_deps_output = self.host.deps().to_path_buf();
@ -252,11 +259,12 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// for this unit and its dependencies.
///
/// Tracks visited units to avoid unnecessary work.
fn visit_crate_type(&self,
unit: &Unit<'a>,
crate_types: &mut BTreeSet<String>,
visited_units: &mut HashSet<Unit<'a>>)
-> CargoResult<()> {
fn visit_crate_type(
&self,
unit: &Unit<'a>,
crate_types: &mut BTreeSet<String>,
visited_units: &mut HashSet<Unit<'a>>,
) -> CargoResult<()> {
if !visited_units.insert(*unit) {
return Ok(());
}
@ -275,21 +283,26 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
Ok(())
}
fn probe_target_info_kind(&mut self,
crate_types: &BTreeSet<String>,
kind: Kind)
-> CargoResult<()> {
let rustflags = env_args(self.config,
&self.build_config,
self.info(&kind),
kind,
"RUSTFLAGS")?;
fn probe_target_info_kind(
&mut self,
crate_types: &BTreeSet<String>,
kind: Kind,
) -> CargoResult<()> {
let rustflags = env_args(
self.config,
&self.build_config,
self.info(&kind),
kind,
"RUSTFLAGS",
)?;
let mut process = self.config.rustc()?.process();
process.arg("-")
.arg("--crate-name").arg("___")
.arg("--print=file-names")
.args(&rustflags)
.env_remove("RUST_LOG");
process
.arg("-")
.arg("--crate-name")
.arg("___")
.arg("--print=file-names")
.args(&rustflags)
.env_remove("RUST_LOG");
if kind == Kind::Target {
process.arg("--target").arg(&self.target_triple());
@ -306,12 +319,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
with_cfg.arg("--print=cfg");
let mut has_cfg_and_sysroot = true;
let output = with_cfg.exec_with_output().or_else(|_| {
has_cfg_and_sysroot = false;
process.exec_with_output()
}).chain_err(|| {
"failed to run `rustc` to learn about target-specific information"
})?;
let output = with_cfg
.exec_with_output()
.or_else(|_| {
has_cfg_and_sysroot = false;
process.exec_with_output()
})
.chain_err(|| "failed to run `rustc` to learn about target-specific information")?;
let error = str::from_utf8(&output.stderr).unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
@ -325,8 +339,10 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
if has_cfg_and_sysroot {
let line = match lines.next() {
Some(line) => line,
None => bail!("output of --print=sysroot missing when learning about \
target-specific information from rustc"),
None => bail!(
"output of --print=sysroot missing when learning about \
target-specific information from rustc"
),
};
let mut rustlib = PathBuf::from(line);
if kind == Kind::Host {
@ -366,32 +382,28 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
///
/// This will recursively walk `units` and all of their dependencies to
/// determine which crate are going to be used in plugins or not.
pub fn build_used_in_plugin_map(&mut self, units: &[Unit<'a>])
-> CargoResult<()> {
pub fn build_used_in_plugin_map(&mut self, units: &[Unit<'a>]) -> CargoResult<()> {
let mut visited = HashSet::new();
for unit in units {
self.walk_used_in_plugin_map(unit,
unit.target.for_host(),
&mut visited)?;
self.walk_used_in_plugin_map(unit, unit.target.for_host(), &mut visited)?;
}
Ok(())
}
fn walk_used_in_plugin_map(&mut self,
unit: &Unit<'a>,
is_plugin: bool,
visited: &mut HashSet<(Unit<'a>, bool)>)
-> CargoResult<()> {
fn walk_used_in_plugin_map(
&mut self,
unit: &Unit<'a>,
is_plugin: bool,
visited: &mut HashSet<(Unit<'a>, bool)>,
) -> CargoResult<()> {
if !visited.insert((*unit, is_plugin)) {
return Ok(())
return Ok(());
}
if is_plugin {
self.used_in_plugin.insert(*unit);
}
for unit in self.dep_targets(unit)? {
self.walk_used_in_plugin_map(&unit,
is_plugin || unit.target.for_host(),
visited)?;
self.walk_used_in_plugin_map(&unit, is_plugin || unit.target.for_host(), visited)?;
}
Ok(())
}
@ -400,7 +412,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
fn layout(&self, kind: Kind) -> &Layout {
match kind {
Kind::Host => &self.host,
Kind::Target => self.target.as_ref().unwrap_or(&self.host)
Kind::Target => self.target.as_ref().unwrap_or(&self.host),
}
}
@ -471,7 +483,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// Return the target triple which this context is targeting.
pub fn target_triple(&self) -> &str {
self.requested_target().unwrap_or_else(|| self.host_triple())
self.requested_target()
.unwrap_or_else(|| self.host_triple())
}
/// Requested (not actual) target for the build
@ -492,7 +505,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// like `target/debug/libfoo.{a,so,rlib}` and such.
pub fn target_metadata(&mut self, unit: &Unit<'a>) -> Option<Metadata> {
if let Some(cache) = self.target_metadatas.get(unit) {
return cache.clone()
return cache.clone();
}
let metadata = self.calc_target_metadata(unit);
@ -522,11 +535,11 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// just here for rustbuild. We need a more principled method
// doing this eventually.
let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA");
if !(unit.profile.test || unit.profile.check) &&
(unit.target.is_dylib() || unit.target.is_cdylib() ||
(unit.target.is_bin() && self.target_triple().starts_with("wasm32-"))) &&
unit.pkg.package_id().source_id().is_path() &&
!__cargo_default_lib_metadata.is_ok()
if !(unit.profile.test || unit.profile.check)
&& (unit.target.is_dylib() || unit.target.is_cdylib()
|| (unit.target.is_bin() && self.target_triple().starts_with("wasm32-")))
&& unit.pkg.package_id().source_id().is_path()
&& !__cargo_default_lib_metadata.is_ok()
{
return None;
}
@ -535,7 +548,10 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// Unique metadata per (name, source, version) triple. This'll allow us
// to pull crates from anywhere w/o worrying about conflicts
unit.pkg.package_id().stable_hash(self.ws.root()).hash(&mut hasher);
unit.pkg
.package_id()
.stable_hash(self.ws.root())
.hash(&mut hasher);
// Add package properties which map to environment variables
// exposed by Cargo
@ -546,13 +562,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// Also mix in enabled features to our metadata. This'll ensure that
// when changing feature sets each lib is separately cached.
self.resolve.features_sorted(unit.pkg.package_id()).hash(&mut hasher);
self.resolve
.features_sorted(unit.pkg.package_id())
.hash(&mut hasher);
// Mix in the target-metadata of all the dependencies of this target
if let Ok(deps) = self.dep_targets(unit) {
let mut deps_metadata = deps.into_iter().map(|dep_unit| {
self.target_metadata(&dep_unit)
}).collect::<Vec<_>>();
let mut deps_metadata = deps.into_iter()
.map(|dep_unit| self.target_metadata(&dep_unit))
.collect::<Vec<_>>();
deps_metadata.sort();
deps_metadata.hash(&mut hasher);
}
@ -588,8 +606,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// Returns the file stem for a given target/profile combo (with metadata)
pub fn file_stem(&mut self, unit: &Unit<'a>) -> String {
match self.target_metadata(unit) {
Some(ref metadata) => format!("{}-{}", unit.target.crate_name(),
metadata),
Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), metadata),
None => self.bin_stem(unit),
}
}
@ -621,19 +638,21 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// we don't want to link it up.
if src_dir.ends_with("deps") {
// Don't lift up library dependencies
if self.ws.members().find(|&p| p == unit.pkg).is_none() &&
!unit.target.is_bin() {
if self.ws.members().find(|&p| p == unit.pkg).is_none() && !unit.target.is_bin() {
None
} else {
Some((
src_dir.parent().unwrap().to_owned(),
if unit.profile.test {file_stem} else {bin_stem},
if unit.profile.test {
file_stem
} else {
bin_stem
},
))
}
} else if bin_stem == file_stem {
None
} else if src_dir.ends_with("examples")
|| src_dir.parent().unwrap().ends_with("build") {
} else if src_dir.ends_with("examples") || src_dir.parent().unwrap().ends_with("build") {
Some((src_dir, bin_stem))
} else {
None
@ -646,10 +665,12 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// - filename: filename rustc compiles to. (Often has metadata suffix).
/// - link_dst: Optional file to link/copy the result to (without metadata suffix)
/// - linkable: Whether possible to link against file (eg it's a library)
pub fn target_filenames(&mut self, unit: &Unit<'a>)
-> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> {
pub fn target_filenames(
&mut self,
unit: &Unit<'a>,
) -> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> {
if let Some(cache) = self.target_filenames.get(unit) {
return Ok(Arc::clone(cache))
return Ok(Arc::clone(cache));
}
let result = self.calc_target_filenames(unit);
@ -659,8 +680,10 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
result
}
fn calc_target_filenames(&mut self, unit: &Unit<'a>)
-> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> {
fn calc_target_filenames(
&mut self,
unit: &Unit<'a>,
) -> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> {
let out_dir = self.out_dir(unit);
let stem = self.file_stem(unit);
let link_stem = self.link_stem(unit);
@ -675,13 +698,17 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
{
if unit.profile.check {
let filename = out_dir.join(format!("lib{}.rmeta", stem));
let link_dst = link_stem.clone().map(|(ld, ls)| {
ld.join(format!("lib{}.rmeta", ls))
});
let link_dst = link_stem
.clone()
.map(|(ld, ls)| ld.join(format!("lib{}.rmeta", ls)));
ret.push((filename, link_dst, TargetFileType::Linkable));
} else {
let mut add = |crate_type: &str, file_type: TargetFileType| -> CargoResult<()> {
let crate_type = if crate_type == "lib" {"rlib"} else {crate_type};
let crate_type = if crate_type == "lib" {
"rlib"
} else {
crate_type
};
let mut crate_types = info.crate_types.borrow_mut();
let entry = crate_types.entry(crate_type.to_string());
let crate_type_info = match entry {
@ -706,13 +733,19 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// "-" and "_". should_replace_hyphens is a flag to indicate that
// we need to convert the stem "web-stuff" to "web_stuff", so we
// won't miss "web_stuff.wasm".
let conv = |s: String| if should_replace_hyphens {
s.replace("-", "_")
} else {
s
let conv = |s: String| {
if should_replace_hyphens {
s.replace("-", "_")
} else {
s
}
};
let filename =
out_dir.join(format!("{}{}{}", prefix, conv(stem.clone()), suffix));
let filename = out_dir.join(format!(
"{}{}{}",
prefix,
conv(stem.clone()),
suffix
));
let link_dst = link_stem.clone().map(|(ld, ls)| {
ld.join(format!("{}{}{}", prefix, conv(ls), suffix))
});
@ -729,26 +762,26 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
};
//info!("{:?}", unit);
match *unit.target.kind() {
TargetKind::Bin |
TargetKind::CustomBuild |
TargetKind::ExampleBin |
TargetKind::Bench |
TargetKind::Test => {
TargetKind::Bin
| TargetKind::CustomBuild
| TargetKind::ExampleBin
| TargetKind::Bench
| TargetKind::Test => {
add("bin", TargetFileType::Normal)?;
}
TargetKind::Lib(..) |
TargetKind::ExampleLib(..)
if unit.profile.test => {
TargetKind::Lib(..) | TargetKind::ExampleLib(..) if unit.profile.test => {
add("bin", TargetFileType::Normal)?;
}
TargetKind::ExampleLib(ref kinds) |
TargetKind::Lib(ref kinds) => {
TargetKind::ExampleLib(ref kinds) | TargetKind::Lib(ref kinds) => {
for kind in kinds {
add(kind.crate_type(), if kind.linkable() {
TargetFileType::Linkable
} else {
TargetFileType::Normal
})?;
add(
kind.crate_type(),
if kind.linkable() {
TargetFileType::Linkable
} else {
TargetFileType::Normal
},
)?;
}
}
}
@ -756,13 +789,20 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}
if ret.is_empty() {
if !unsupported.is_empty() {
bail!("cannot produce {} for `{}` as the target `{}` \
does not support these crate types",
unsupported.join(", "), unit.pkg, self.target_triple())
bail!(
"cannot produce {} for `{}` as the target `{}` \
does not support these crate types",
unsupported.join(", "),
unit.pkg,
self.target_triple()
)
}
bail!("cannot compile `{}` as the target `{}` does not \
support any of the output crate types",
unit.pkg, self.target_triple());
bail!(
"cannot compile `{}` as the target `{}` does not \
support any of the output crate types",
unit.pkg,
self.target_triple()
);
}
info!("Target filenames: {:?}", ret);
@ -773,7 +813,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// for that package.
pub fn dep_targets(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> {
if unit.profile.run_custom_build {
return self.dep_run_custom_build(unit)
return self.dep_run_custom_build(unit);
} else if unit.profile.doc && !unit.profile.test {
return self.doc_deps(unit);
}
@ -781,61 +821,61 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
let id = unit.pkg.package_id();
let deps = self.resolve.deps(id);
let mut ret = deps.filter(|dep| {
unit.pkg.dependencies().iter().filter(|d| {
d.name() == dep.name() && d.version_req().matches(dep.version())
}).any(|d| {
// If this target is a build command, then we only want build
// dependencies, otherwise we want everything *other than* build
// dependencies.
if unit.target.is_custom_build() != d.is_build() {
return false
}
unit.pkg
.dependencies()
.iter()
.filter(|d| d.name() == dep.name() && d.version_req().matches(dep.version()))
.any(|d| {
// If this target is a build command, then we only want build
// dependencies, otherwise we want everything *other than* build
// dependencies.
if unit.target.is_custom_build() != d.is_build() {
return false;
}
// If this dependency is *not* a transitive dependency, then it
// only applies to test/example targets
if !d.is_transitive() && !unit.target.is_test() &&
!unit.target.is_example() && !unit.profile.test {
return false
}
// If this dependency is *not* a transitive dependency, then it
// only applies to test/example targets
if !d.is_transitive() && !unit.target.is_test() && !unit.target.is_example()
&& !unit.profile.test
{
return false;
}
// If this dependency is only available for certain platforms,
// make sure we're only enabling it for that platform.
if !self.dep_platform_activated(d, unit.kind) {
return false
}
// If this dependency is only available for certain platforms,
// make sure we're only enabling it for that platform.
if !self.dep_platform_activated(d, unit.kind) {
return false;
}
// If the dependency is optional, then we're only activating it
// if the corresponding feature was activated
if d.is_optional() && !self.resolve.features(id).contains(&*d.name()) {
return false;
}
// If the dependency is optional, then we're only activating it
// if the corresponding feature was activated
if d.is_optional() && !self.resolve.features(id).contains(&*d.name()) {
return false;
}
// If we've gotten past all that, then this dependency is
// actually used!
true
// If we've gotten past all that, then this dependency is
// actually used!
true
})
}).filter_map(|id| match self.get_package(id) {
Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
let unit = Unit {
pkg,
target: t,
profile: self.lib_or_check_profile(unit, t),
kind: unit.kind.for_target(t),
};
Ok(unit)
}),
Err(e) => Some(Err(e)),
})
}).filter_map(|id| {
match self.get_package(id) {
Ok(pkg) => {
pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
let unit = Unit {
pkg,
target: t,
profile: self.lib_or_check_profile(unit, t),
kind: unit.kind.for_target(t),
};
Ok(unit)
})
}
Err(e) => Some(Err(e))
}
}).collect::<CargoResult<Vec<_>>>()?;
.collect::<CargoResult<Vec<_>>>()?;
// If this target is a build script, then what we've collected so far is
// all we need. If this isn't a build script, then it depends on the
// build script if there is one.
if unit.target.is_custom_build() {
return Ok(ret)
return Ok(ret);
}
ret.extend(self.dep_build_script(unit));
@ -844,29 +884,32 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// didn't include `pkg` in the return values, so we need to special case
// it here and see if we need to push `(pkg, pkg_lib_target)`.
if unit.target.is_lib() && !unit.profile.doc {
return Ok(ret)
return Ok(ret);
}
ret.extend(self.maybe_lib(unit));
// Integration tests/benchmarks require binaries to be built
if unit.profile.test &&
(unit.target.is_test() || unit.target.is_bench()) {
ret.extend(unit.pkg.targets().iter().filter(|t| {
let no_required_features = Vec::new();
if unit.profile.test && (unit.target.is_test() || unit.target.is_bench()) {
ret.extend(
unit.pkg
.targets()
.iter()
.filter(|t| {
let no_required_features = Vec::new();
t.is_bin() &&
t.is_bin() &&
// Skip binaries with required features that have not been selected.
t.required_features().unwrap_or(&no_required_features).iter().all(|f| {
self.resolve.features(id).contains(f)
})
}).map(|t| {
Unit {
pkg: unit.pkg,
target: t,
profile: self.lib_or_check_profile(unit, t),
kind: unit.kind.for_target(t),
}
}));
})
.map(|t| Unit {
pkg: unit.pkg,
target: t,
profile: self.lib_or_check_profile(unit, t),
kind: unit.kind.for_target(t),
}),
);
}
Ok(ret)
}
@ -875,14 +918,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
///
/// The `unit` provided must represent an execution of a build script, and
/// the returned set of units must all be run before `unit` is run.
pub fn dep_run_custom_build(&self, unit: &Unit<'a>)
-> CargoResult<Vec<Unit<'a>>> {
pub fn dep_run_custom_build(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> {
// If this build script's execution has been overridden then we don't
// actually depend on anything, we've reached the end of the dependency
// chain as we've got all the info we're gonna get.
let key = (unit.pkg.package_id().clone(), unit.kind);
if self.build_script_overridden.contains(&key) {
return Ok(Vec::new())
return Ok(Vec::new());
}
// When not overridden, then the dependencies to run a build script are:
@ -890,42 +932,47 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// 1. Compiling the build script itself
// 2. For each immediate dependency of our package which has a `links`
// key, the execution of that build script.
let not_custom_build = unit.pkg.targets().iter().find(|t| {
!t.is_custom_build()
}).unwrap();
let not_custom_build = unit.pkg
.targets()
.iter()
.find(|t| !t.is_custom_build())
.unwrap();
let tmp = Unit {
target: not_custom_build,
profile: &self.profiles.dev,
..*unit
};
let deps = self.dep_targets(&tmp)?;
Ok(deps.iter().filter_map(|unit| {
if !unit.target.linkable() || unit.pkg.manifest().links().is_none() {
return None
}
self.dep_build_script(unit)
}).chain(Some(Unit {
profile: self.build_script_profile(unit.pkg.package_id()),
kind: Kind::Host, // build scripts always compiled for the host
..*unit
})).collect())
Ok(deps.iter()
.filter_map(|unit| {
if !unit.target.linkable() || unit.pkg.manifest().links().is_none() {
return None;
}
self.dep_build_script(unit)
})
.chain(Some(Unit {
profile: self.build_script_profile(unit.pkg.package_id()),
kind: Kind::Host, // build scripts always compiled for the host
..*unit
}))
.collect())
}
/// Returns the dependencies necessary to document a package
fn doc_deps(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> {
let deps = self.resolve.deps(unit.pkg.package_id()).filter(|dep| {
unit.pkg.dependencies().iter().filter(|d| {
d.name() == dep.name()
}).any(|dep| {
match dep.kind() {
DepKind::Normal => self.dep_platform_activated(dep,
unit.kind),
_ => false,
}
let deps = self.resolve
.deps(unit.pkg.package_id())
.filter(|dep| {
unit.pkg
.dependencies()
.iter()
.filter(|d| d.name() == dep.name())
.any(|dep| match dep.kind() {
DepKind::Normal => self.dep_platform_activated(dep, unit.kind),
_ => false,
})
})
}).map(|dep| {
self.get_package(dep)
});
.map(|dep| self.get_package(dep));
// To document a library, we depend on dependencies actually being
// built. If we're documenting *all* libraries, then we also depend on
@ -971,25 +1018,29 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// of work is still returned. `None` is only returned if the package has no
/// build script.
fn dep_build_script(&self, unit: &Unit<'a>) -> Option<Unit<'a>> {
unit.pkg.targets().iter().find(|t| t.is_custom_build()).map(|t| {
Unit {
unit.pkg
.targets()
.iter()
.find(|t| t.is_custom_build())
.map(|t| Unit {
pkg: unit.pkg,
target: t,
profile: &self.profiles.custom_build,
kind: unit.kind,
}
})
})
}
fn maybe_lib(&self, unit: &Unit<'a>) -> Option<Unit<'a>> {
unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| {
Unit {
unit.pkg
.targets()
.iter()
.find(|t| t.linkable())
.map(|t| Unit {
pkg: unit.pkg,
target: t,
profile: self.lib_or_check_profile(unit, t),
kind: unit.kind.for_target(t),
}
})
})
}
fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool {
@ -1039,7 +1090,9 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}
/// Number of jobs specified for this build
pub fn jobs(&self) -> u32 { self.build_config.jobs }
pub fn jobs(&self) -> u32 {
self.build_config.jobs
}
pub fn lib_profile(&self) -> &'a Profile {
let (normal, test) = if self.build_config.release {
@ -1056,8 +1109,9 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
pub fn lib_or_check_profile(&self, unit: &Unit, target: &Target) -> &'a Profile {
if !target.is_custom_build() && !target.for_host()
&& (unit.profile.check || (unit.profile.doc && !unit.profile.test)) {
return &self.profiles.check
&& (unit.profile.check || (unit.profile.doc && !unit.profile.test))
{
return &self.profiles.check;
}
self.lib_profile()
}
@ -1098,7 +1152,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
};
if !incremental {
return Ok(Vec::new())
return Ok(Vec::new());
}
// Only enable incremental compilation for sources the user can
@ -1108,22 +1162,31 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
//
// (see also https://github.com/rust-lang/cargo/issues/3972)
if !unit.pkg.package_id().source_id().is_path() {
return Ok(Vec::new())
return Ok(Vec::new());
}
let dir = self.layout(unit.kind).incremental().display();
Ok(vec![
"-C".to_string(),
format!("incremental={}", dir),
])
Ok(vec!["-C".to_string(), format!("incremental={}", dir)])
}
pub fn rustflags_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
env_args(self.config, &self.build_config, self.info(&unit.kind), unit.kind, "RUSTFLAGS")
env_args(
self.config,
&self.build_config,
self.info(&unit.kind),
unit.kind,
"RUSTFLAGS",
)
}
pub fn rustdocflags_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
env_args(self.config, &self.build_config, self.info(&unit.kind), unit.kind, "RUSTDOCFLAGS")
env_args(
self.config,
&self.build_config,
self.info(&unit.kind),
unit.kind,
"RUSTDOCFLAGS",
)
}
pub fn show_warnings(&self, pkg: &PackageId) -> bool {
@ -1155,11 +1218,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
///
/// Note that if a `target` is specified, no args will be passed to host code (plugins, build
/// scripts, ...), even if it is the same as the target.
fn env_args(config: &Config,
build_config: &BuildConfig,
target_info: &TargetInfo,
kind: Kind,
name: &str) -> CargoResult<Vec<String>> {
fn env_args(
config: &Config,
build_config: &BuildConfig,
target_info: &TargetInfo,
kind: Kind,
name: &str,
) -> CargoResult<Vec<String>> {
// We *want* to apply RUSTFLAGS only to builds for the
// requested target architecture, and not to things like build
// scripts and plugins, which may be for an entirely different
@ -1200,9 +1265,14 @@ fn env_args(config: &Config,
let mut rustflags = Vec::new();
let name = name.chars().flat_map(|c| c.to_lowercase()).collect::<String>();
let name = name.chars()
.flat_map(|c| c.to_lowercase())
.collect::<String>();
// Then the target.*.rustflags value...
let target = build_config.requested_target.as_ref().unwrap_or(&build_config.host_triple);
let target = build_config
.requested_target
.as_ref()
.unwrap_or(&build_config.host_triple);
let key = format!("target.{}.{}", target, name);
if let Some(args) = config.get_list_or_split_string(&key)? {
let args = args.val.into_iter();
@ -1214,9 +1284,13 @@ fn env_args(config: &Config,
let cfgs = table.val.keys().filter_map(|t| {
if t.starts_with("cfg(") && t.ends_with(')') {
let cfg = &t[4..t.len() - 1];
CfgExpr::from_str(cfg)
.ok()
.and_then(|c| if c.matches(target_cfg) { Some(t) } else { None })
CfgExpr::from_str(cfg).ok().and_then(|c| {
if c.matches(target_cfg) {
Some(t)
} else {
None
}
})
} else {
None
}
@ -1275,24 +1349,28 @@ fn parse_crate_type(
lines: &mut str::Lines,
) -> CargoResult<Option<(String, String)>> {
let not_supported = error.lines().any(|line| {
(line.contains("unsupported crate type") ||
line.contains("unknown crate type")) &&
line.contains(crate_type)
(line.contains("unsupported crate type") || line.contains("unknown crate type"))
&& line.contains(crate_type)
});
if not_supported {
return Ok(None);
}
let line = match lines.next() {
Some(line) => line,
None => bail!("malformed output when learning about \
crate-type {} information", crate_type),
None => bail!(
"malformed output when learning about \
crate-type {} information",
crate_type
),
};
let mut parts = line.trim().split("___");
let prefix = parts.next().unwrap();
let suffix = match parts.next() {
Some(part) => part,
None => bail!("output of --print=file-names has changed in \
the compiler, cannot parse"),
None => bail!(
"output of --print=file-names has changed in \
the compiler, cannot parse"
),
};
Ok(Some((prefix.to_string(), suffix.to_string())))
@ -1313,16 +1391,14 @@ fn add_target_specific_suffixes(
let mut ret = vec![(suffix.to_string(), file_type, false)];
// rust-lang/cargo#4500
if target_triple.ends_with("pc-windows-msvc") && crate_type.ends_with("dylib") &&
suffix == ".dll"
if target_triple.ends_with("pc-windows-msvc") && crate_type.ends_with("dylib")
&& suffix == ".dll"
{
ret.push((".dll.lib".to_string(), TargetFileType::Normal, false));
}
// rust-lang/cargo#4535
if target_triple.starts_with("wasm32-") && crate_type == "bin" &&
suffix == ".js"
{
if target_triple.starts_with("wasm32-") && crate_type == "bin" && suffix == ".js" {
ret.push((".wasm".to_string(), TargetFileType::Normal, true));
}

View File

@ -1,17 +1,17 @@
use std::collections::{HashMap, BTreeSet, HashSet};
use std::collections::{BTreeSet, HashMap, HashSet};
use std::fs;
use std::path::{PathBuf, Path};
use std::path::{Path, PathBuf};
use std::str;
use std::sync::{Mutex, Arc};
use std::sync::{Arc, Mutex};
use core::PackageId;
use util::{Freshness, Cfg};
use util::{Cfg, Freshness};
use util::errors::{CargoResult, CargoResultExt};
use util::{self, internal, profile, paths};
use util::{self, internal, paths, profile};
use util::machine_message;
use super::job::Work;
use super::{fingerprint, Kind, Context, Unit};
use super::{fingerprint, Context, Kind, Unit};
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug, Hash)]
@ -75,10 +75,15 @@ pub struct BuildDeps {
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
-> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
unit.pkg, unit.target.name()));
pub fn prepare<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
) -> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!(
"build script prepare: {}/{}",
unit.pkg,
unit.target.name()
));
let key = (unit.pkg.package_id().clone(), unit.kind);
let overridden = cx.build_script_overridden.contains(&key);
@ -90,18 +95,17 @@ pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// Now that we've prep'd our work, build the work needed to manage the
// fingerprint and then start returning that upwards.
let (freshness, dirty, fresh) =
fingerprint::prepare_build_cmd(cx, unit)?;
let (freshness, dirty, fresh) = fingerprint::prepare_build_cmd(cx, unit)?;
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
}
fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
-> CargoResult<(Work, Work)> {
fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work)> {
let dependencies = cx.dep_run_custom_build(unit)?;
let build_script_unit = dependencies.iter().find(|d| {
!d.profile.run_custom_build && d.target.is_custom_build()
}).expect("running a script not depending on an actual script");
let build_script_unit = dependencies
.iter()
.find(|d| !d.profile.run_custom_build && d.target.is_custom_build())
.expect("running a script not depending on an actual script");
let script_output = cx.build_script_dir(build_script_unit);
let build_output = cx.build_script_out_dir(unit);
@ -116,19 +120,29 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let to_exec = to_exec.into_os_string();
let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?;
cmd.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET", &match unit.kind {
Kind::Host => cx.host_triple(),
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.is_some().to_string())
.env("OPT_LEVEL", &profile.opt_level)
.env("PROFILE", if cx.build_config.release { "release" } else { "debug" })
.env("HOST", cx.host_triple())
.env("RUSTC", &cx.config.rustc()?.path)
.env("RUSTDOC", &*cx.config.rustdoc()?)
.inherit_jobserver(&cx.jobserver);
.env("CARGO_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env(
"TARGET",
&match unit.kind {
Kind::Host => cx.host_triple(),
Kind::Target => cx.target_triple(),
},
)
.env("DEBUG", &profile.debuginfo.is_some().to_string())
.env("OPT_LEVEL", &profile.opt_level)
.env(
"PROFILE",
if cx.build_config.release {
"release"
} else {
"debug"
},
)
.env("HOST", cx.host_triple())
.env("RUSTC", &cx.config.rustc()?.path)
.env("RUSTDOC", &*cx.config.rustdoc()?)
.inherit_jobserver(&cx.jobserver);
if let Some(links) = unit.pkg.manifest().links() {
cmd.env("CARGO_MANIFEST_LINKS", links);
@ -143,9 +157,13 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let mut cfg_map = HashMap::new();
for cfg in cx.cfg(unit.kind) {
match *cfg {
Cfg::Name(ref n) => { cfg_map.insert(n.clone(), None); }
Cfg::Name(ref n) => {
cfg_map.insert(n.clone(), None);
}
Cfg::KeyPair(ref k, ref v) => {
if let Some(ref mut values) = *cfg_map.entry(k.clone()).or_insert_with(||Some(Vec::new())) {
if let Some(ref mut values) =
*cfg_map.entry(k.clone()).or_insert_with(|| Some(Vec::new()))
{
values.push(v.clone())
}
}
@ -154,8 +172,12 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
for (k, v) in cfg_map {
let k = format!("CARGO_CFG_{}", super::envify(&k));
match v {
Some(list) => { cmd.env(&k, list.join(",")); }
None => { cmd.env(&k, ""); }
Some(list) => {
cmd.env(&k, list.join(","));
}
None => {
cmd.env(&k, "");
}
}
}
@ -165,14 +187,19 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
dependencies.iter().filter_map(|unit| {
if unit.profile.run_custom_build {
Some((unit.pkg.manifest().links().unwrap().to_string(),
unit.pkg.package_id().clone()))
} else {
None
}
}).collect::<Vec<_>>()
dependencies
.iter()
.filter_map(|unit| {
if unit.profile.run_custom_build {
Some((
unit.pkg.manifest().links().unwrap().to_string(),
unit.pkg.package_id().clone(),
))
} else {
None
}
})
.collect::<Vec<_>>()
};
let pkg_name = unit.pkg.to_string();
let build_state = Arc::clone(&cx.build_state);
@ -185,8 +212,13 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
(output_file, err_file, root_output_file)
};
let root_output = cx.target_root().to_path_buf();
let all = (id.clone(), pkg_name.clone(), Arc::clone(&build_state),
output_file.clone(), root_output.clone());
let all = (
id.clone(),
pkg_name.clone(),
Arc::clone(&build_state),
output_file.clone(),
root_output.clone(),
);
let build_scripts = super::load_build_deps(cx, unit);
let kind = unit.kind;
let json_messages = cx.build_config.json_messages;
@ -196,12 +228,8 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let prev_root_output = paths::read_bytes(&root_output_file)
.and_then(|bytes| util::bytes2path(&bytes))
.unwrap_or_else(|_| cmd.get_cwd().unwrap().to_path_buf());
let prev_output = BuildOutput::parse_file(
&output_file,
&pkg_name,
&prev_root_output,
&root_output,
).ok();
let prev_output =
BuildOutput::parse_file(&output_file, &pkg_name, &prev_root_output, &root_output).ok();
let deps = BuildDeps::new(&output_file, prev_output.as_ref());
cx.build_explicit_deps.insert(*unit, deps);
@ -220,8 +248,10 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// otherwise create it!
if fs::metadata(&build_output).is_err() {
fs::create_dir(&build_output).chain_err(|| {
internal("failed to create script output directory for \
build command")
internal(
"failed to create script output directory for \
build command",
)
})?;
}
@ -234,35 +264,45 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
for (name, id) in lib_deps {
let key = (id.clone(), kind);
let state = build_state.get(&key).ok_or_else(|| {
internal(format!("failed to locate build state for env \
vars: {}/{:?}", id, kind))
internal(format!(
"failed to locate build state for env \
vars: {}/{:?}",
id, kind
))
})?;
let data = &state.metadata;
for &(ref key, ref value) in data.iter() {
cmd.env(&format!("DEP_{}_{}", super::envify(&name),
super::envify(key)), value);
cmd.env(
&format!("DEP_{}_{}", super::envify(&name), super::envify(key)),
value,
);
}
}
if let Some(build_scripts) = build_scripts {
super::add_plugin_deps(&mut cmd, &build_state,
&build_scripts,
&root_output)?;
super::add_plugin_deps(&mut cmd, &build_state, &build_scripts, &root_output)?;
}
}
// And now finally, run the build command itself!
state.running(&cmd);
let output = cmd.exec_with_streaming(
&mut |out_line| { state.stdout(out_line); Ok(()) },
&mut |err_line| { state.stderr(err_line); Ok(()) },
&mut |out_line| {
state.stdout(out_line);
Ok(())
},
&mut |err_line| {
state.stderr(err_line);
Ok(())
},
true,
).map_err(|e| {
format_err!("failed to run custom build command for `{}`\n{}",
pkg_name, e)
format_err!(
"failed to run custom build command for `{}`\n{}",
pkg_name,
e
)
})?;
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
@ -273,17 +313,15 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
paths::write(&output_file, &output.stdout)?;
paths::write(&err_file, &output.stderr)?;
paths::write(&root_output_file, util::path2bytes(&root_output)?)?;
let parsed_output = BuildOutput::parse(
&output.stdout,
&pkg_name,
&root_output,
&root_output,
)?;
let parsed_output =
BuildOutput::parse(&output.stdout, &pkg_name, &root_output, &root_output)?;
if json_messages {
let library_paths = parsed_output.library_paths.iter().map(|l| {
l.display().to_string()
}).collect::<Vec<_>>();
let library_paths = parsed_output
.library_paths
.iter()
.map(|l| l.display().to_string())
.collect::<Vec<_>>();
machine_message::emit(&machine_message::BuildScript {
package_id: &id,
linked_libs: &parsed_output.library_links,
@ -305,12 +343,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let output = match prev_output {
Some(output) => output,
None => {
BuildOutput::parse_file(
&output_file,
&pkg_name,
&prev_root_output,
&root_output,
)?
BuildOutput::parse_file(&output_file, &pkg_name, &prev_root_output, &root_output)?
}
};
build_state.insert(id, kind, output);
@ -340,20 +373,24 @@ impl BuildState {
}
impl BuildOutput {
pub fn parse_file(path: &Path,
pkg_name: &str,
root_output_when_generated: &Path,
root_output: &Path) -> CargoResult<BuildOutput> {
pub fn parse_file(
path: &Path,
pkg_name: &str,
root_output_when_generated: &Path,
root_output: &Path,
) -> CargoResult<BuildOutput> {
let contents = paths::read_bytes(path)?;
BuildOutput::parse(&contents, pkg_name, root_output_when_generated, root_output)
}
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(input: &[u8],
pkg_name: &str,
root_output_when_generated: &Path,
root_output: &Path) -> CargoResult<BuildOutput> {
pub fn parse(
input: &[u8],
pkg_name: &str,
root_output_when_generated: &Path,
root_output: &Path,
) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut cfgs = Vec::new();
@ -376,7 +413,7 @@ impl BuildOutput {
}
let data = match iter.next() {
Some(val) => val,
None => continue
None => continue,
};
// getting the `key=value` part of the line
@ -389,17 +426,14 @@ impl BuildOutput {
_ => bail!("Wrong output in {}: `{}`", whence, line),
};
let path = |val: &str| {
match Path::new(val).strip_prefix(root_output_when_generated) {
Ok(path) => root_output.join(path),
Err(_) => PathBuf::from(val),
}
let path = |val: &str| match Path::new(val).strip_prefix(root_output_when_generated) {
Ok(path) => root_output.join(path),
Err(_) => PathBuf::from(val),
};
match key {
"rustc-flags" => {
let (paths, links) =
BuildOutput::parse_rustc_flags(value, &whence)?;
let (paths, links) = BuildOutput::parse_rustc_flags(value, &whence)?;
library_links.extend(links.into_iter());
library_paths.extend(paths.into_iter());
}
@ -426,35 +460,43 @@ impl BuildOutput {
})
}
pub fn parse_rustc_flags(value: &str, whence: &str)
-> CargoResult<(Vec<PathBuf>, Vec<String>)> {
pub fn parse_rustc_flags(
value: &str,
whence: &str,
) -> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c| !c.is_whitespace()));
let mut flags_iter = value
.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c| !c.is_whitespace()));
let (mut library_paths, mut library_links) = (Vec::new(), Vec::new());
while let Some(flag) = flags_iter.next() {
if flag != "-l" && flag != "-L" {
bail!("Only `-l` and `-L` flags are allowed in {}: `{}`",
whence, value)
bail!(
"Only `-l` and `-L` flags are allowed in {}: `{}`",
whence,
value
)
}
let value = match flags_iter.next() {
Some(v) => v,
None => bail!("Flag in rustc-flags has no value in {}: `{}`",
whence, value)
None => bail!(
"Flag in rustc-flags has no value in {}: `{}`",
whence,
value
),
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => bail!("only -l and -L flags are allowed")
_ => bail!("only -l and -L flags are allowed"),
};
}
Ok((library_paths, library_links))
}
pub fn parse_rustc_env(value: &str, whence: &str)
-> CargoResult<(String, String)> {
pub fn parse_rustc_env(value: &str, whence: &str) -> CargoResult<(String, String)> {
let mut iter = value.splitn(2, '=');
let name = iter.next();
let val = iter.next();
@ -469,12 +511,14 @@ impl BuildDeps {
pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps {
BuildDeps {
build_script_output: output_file.to_path_buf(),
rerun_if_changed: output.map(|p| &p.rerun_if_changed)
.cloned()
.unwrap_or_default(),
rerun_if_env_changed: output.map(|p| &p.rerun_if_env_changed)
.cloned()
.unwrap_or_default(),
rerun_if_changed: output
.map(|p| &p.rerun_if_changed)
.cloned()
.unwrap_or_default(),
rerun_if_env_changed: output
.map(|p| &p.rerun_if_env_changed)
.cloned()
.unwrap_or_default(),
}
}
}
@ -488,32 +532,33 @@ impl BuildDeps {
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
units: &[Unit<'b>])
-> CargoResult<()> {
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CargoResult<()> {
let mut ret = HashMap::new();
for unit in units {
build(&mut ret, cx, unit)?;
}
cx.build_scripts.extend(ret.into_iter().map(|(k, v)| {
(k, Arc::new(v))
}));
cx.build_scripts
.extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v))));
return Ok(());
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<Unit<'b>, BuildScripts>,
cx: &mut Context<'b, 'cfg>,
unit: &Unit<'b>)
-> CargoResult<&'a BuildScripts> {
fn build<'a, 'b, 'cfg>(
out: &'a mut HashMap<Unit<'b>, BuildScripts>,
cx: &mut Context<'b, 'cfg>,
unit: &Unit<'b>,
) -> CargoResult<&'a BuildScripts> {
// Do a quick pre-flight check to see if we've already calculated the
// set of dependencies.
if out.contains_key(unit) {
return Ok(&out[unit])
return Ok(&out[unit]);
}
{
let key = unit.pkg.manifest().links().map(|l| (l.to_string(), unit.kind));
let key = unit.pkg
.manifest()
.links()
.map(|l| (l.to_string(), unit.kind));
let build_state = &cx.build_state;
if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) {
let key = (unit.pkg.package_id().clone(), unit.kind);
@ -543,8 +588,8 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
let dep_scripts = build(out, cx, unit)?;
if unit.target.for_host() {
ret.plugins.extend(dep_scripts.to_link.iter()
.map(|p| &p.0).cloned());
ret.plugins
.extend(dep_scripts.to_link.iter().map(|p| &p.0).cloned());
} else if unit.target.linkable() {
for &(ref pkg, kind) in dep_scripts.to_link.iter() {
add_to_link(&mut ret, pkg, kind);

View File

@ -11,12 +11,12 @@ use serde_json;
use core::{Epoch, Package, TargetKind};
use util;
use util::{Fresh, Dirty, Freshness, internal, profile};
use util::{internal, profile, Dirty, Fresh, Freshness};
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
use super::job::Work;
use super::context::{Context, Unit, TargetFileType};
use super::context::{Context, TargetFileType, Unit};
use super::custom_build::BuildDeps;
/// A tuple result of the `prepare_foo` functions in this module.
@ -47,10 +47,15 @@ pub type Preparation = (Freshness, Work, Work);
/// This function will calculate the fingerprint for a target and prepare the
/// work necessary to either write the fingerprint or copy over all fresh files
/// from the old directories to their new locations.
pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>) -> CargoResult<Preparation> {
let _p = profile::start(format!("fingerprint: {} / {}",
unit.pkg.package_id(), unit.target.name()));
pub fn prepare_target<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
) -> CargoResult<Preparation> {
let _p = profile::start(format!(
"fingerprint: {} / {}",
unit.pkg.package_id(),
unit.target.name()
));
let new = cx.fingerprint_dir(unit);
let loc = new.join(&filename(cx, unit));
@ -73,9 +78,9 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
if compare.is_err() {
let source_id = unit.pkg.package_id().source_id();
let sources = cx.packages.sources();
let source = sources.get(source_id).ok_or_else(|| {
internal("missing package source")
})?;
let source = sources
.get(source_id)
.ok_or_else(|| internal("missing package source"))?;
source.verify(unit.pkg.package_id())?;
}
@ -83,7 +88,8 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
let mut missing_outputs = false;
if unit.profile.doc {
missing_outputs = !root.join(unit.target.crate_name())
.join("index.html").exists();
.join("index.html")
.exists();
} else {
for &(ref src, ref link_dst, file_type) in cx.target_filenames(unit)?.iter() {
if file_type == TargetFileType::DebugInfo {
@ -102,13 +108,17 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
match fingerprint.update_local(&target_root) {
Ok(()) => {}
Err(..) if allow_failure => return Ok(()),
Err(e) => return Err(e)
Err(e) => return Err(e),
}
write_fingerprint(&loc, &*fingerprint)
});
let fresh = compare.is_ok() && !missing_outputs;
Ok((if fresh {Fresh} else {Dirty}, write_fingerprint, Work::noop()))
Ok((
if fresh { Fresh } else { Dirty },
write_fingerprint,
Work::noop(),
))
}
/// A fingerprint can be considered to be a "short string" representing the
@ -142,39 +152,46 @@ pub struct Fingerprint {
#[serde(serialize_with = "serialize_deps", deserialize_with = "deserialize_deps")]
deps: Vec<(String, Arc<Fingerprint>)>,
local: Vec<LocalFingerprint>,
#[serde(skip_serializing, skip_deserializing)]
memoized_hash: Mutex<Option<u64>>,
#[serde(skip_serializing, skip_deserializing)] memoized_hash: Mutex<Option<u64>>,
rustflags: Vec<String>,
epoch: Epoch,
}
fn serialize_deps<S>(deps: &[(String, Arc<Fingerprint>)], ser: S)
-> Result<S::Ok, S::Error>
where S: ser::Serializer,
fn serialize_deps<S>(deps: &[(String, Arc<Fingerprint>)], ser: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
deps.iter().map(|&(ref a, ref b)| {
(a, b.hash())
}).collect::<Vec<_>>().serialize(ser)
deps.iter()
.map(|&(ref a, ref b)| (a, b.hash()))
.collect::<Vec<_>>()
.serialize(ser)
}
fn deserialize_deps<'de, D>(d: D) -> Result<Vec<(String, Arc<Fingerprint>)>, D::Error>
where D: de::Deserializer<'de>,
where
D: de::Deserializer<'de>,
{
let decoded = <Vec<(String, u64)>>::deserialize(d)?;
Ok(decoded.into_iter().map(|(name, hash)| {
(name, Arc::new(Fingerprint {
rustc: 0,
target: 0,
profile: 0,
path: 0,
local: vec![LocalFingerprint::Precalculated(String::new())],
features: String::new(),
deps: Vec::new(),
memoized_hash: Mutex::new(Some(hash)),
epoch: Epoch::Epoch2015,
rustflags: Vec::new(),
}))
}).collect())
Ok(decoded
.into_iter()
.map(|(name, hash)| {
(
name,
Arc::new(Fingerprint {
rustc: 0,
target: 0,
profile: 0,
path: 0,
local: vec![LocalFingerprint::Precalculated(String::new())],
features: String::new(),
deps: Vec::new(),
memoized_hash: Mutex::new(Some(hash)),
epoch: Epoch::Epoch2015,
rustflags: Vec::new(),
}),
)
})
.collect())
}
#[derive(Serialize, Deserialize, Hash)]
@ -185,9 +202,7 @@ enum LocalFingerprint {
}
impl LocalFingerprint {
fn mtime(root: &Path, mtime: Option<FileTime>, path: &Path)
-> LocalFingerprint
{
fn mtime(root: &Path, mtime: Option<FileTime>, path: &Path) -> LocalFingerprint {
let mtime = MtimeSlot(Mutex::new(mtime));
assert!(path.is_absolute());
let path = path.strip_prefix(root).unwrap_or(path);
@ -205,14 +220,11 @@ impl Fingerprint {
LocalFingerprint::MtimeBased(ref slot, ref path) => {
let path = root.join(path);
let meta = fs::metadata(&path)
.chain_err(|| {
internal(format!("failed to stat `{}`", path.display()))
})?;
.chain_err(|| internal(format!("failed to stat `{}`", path.display())))?;
let mtime = FileTime::from_last_modification_time(&meta);
*slot.0.lock().unwrap() = Some(mtime);
}
LocalFingerprint::EnvBased(..) |
LocalFingerprint::Precalculated(..) => continue,
LocalFingerprint::EnvBased(..) | LocalFingerprint::Precalculated(..) => continue,
}
hash_busted = true;
}
@ -225,7 +237,7 @@ impl Fingerprint {
fn hash(&self) -> u64 {
if let Some(s) = *self.memoized_hash.lock().unwrap() {
return s
return s;
}
let ret = util::hash_u64(self);
*self.memoized_hash.lock().unwrap() = Some(ret);
@ -237,7 +249,11 @@ impl Fingerprint {
bail!("rust compiler has changed")
}
if self.features != old.features {
bail!("features have changed: {} != {}", self.features, old.features)
bail!(
"features have changed: {} != {}",
self.features,
old.features
)
}
if self.target != old.target {
bail!("target configuration has changed")
@ -259,15 +275,18 @@ impl Fingerprint {
}
for (new, old) in self.local.iter().zip(&old.local) {
match (new, old) {
(&LocalFingerprint::Precalculated(ref a),
&LocalFingerprint::Precalculated(ref b)) => {
(
&LocalFingerprint::Precalculated(ref a),
&LocalFingerprint::Precalculated(ref b),
) => {
if a != b {
bail!("precalculated components have changed: {} != {}",
a, b)
bail!("precalculated components have changed: {} != {}", a, b)
}
}
(&LocalFingerprint::MtimeBased(ref on_disk_mtime, ref ap),
&LocalFingerprint::MtimeBased(ref previously_built_mtime, ref bp)) => {
(
&LocalFingerprint::MtimeBased(ref on_disk_mtime, ref ap),
&LocalFingerprint::MtimeBased(ref previously_built_mtime, ref bp),
) => {
let on_disk_mtime = on_disk_mtime.0.lock().unwrap();
let previously_built_mtime = previously_built_mtime.0.lock().unwrap();
@ -278,19 +297,30 @@ impl Fingerprint {
};
if should_rebuild {
bail!("mtime based components have changed: previously {:?} now {:?}, \
paths are {:?} and {:?}",
*previously_built_mtime, *on_disk_mtime, ap, bp)
bail!(
"mtime based components have changed: previously {:?} now {:?}, \
paths are {:?} and {:?}",
*previously_built_mtime,
*on_disk_mtime,
ap,
bp
)
}
}
(&LocalFingerprint::EnvBased(ref akey, ref avalue),
&LocalFingerprint::EnvBased(ref bkey, ref bvalue)) => {
(
&LocalFingerprint::EnvBased(ref akey, ref avalue),
&LocalFingerprint::EnvBased(ref bkey, ref bvalue),
) => {
if *akey != *bkey {
bail!("env vars changed: {} != {}", akey, bkey);
}
if *avalue != *bvalue {
bail!("env var `{}` changed: previously {:?} now {:?}",
akey, bvalue, avalue)
bail!(
"env var `{}` changed: previously {:?} now {:?}",
akey,
bvalue,
avalue
)
}
}
_ => bail!("local fingerprint type has changed"),
@ -323,7 +353,16 @@ impl hash::Hash for Fingerprint {
ref rustflags,
..
} = *self;
(rustc, features, target, path, profile, local, epoch, rustflags).hash(h);
(
rustc,
features,
target,
path,
profile,
local,
epoch,
rustflags,
).hash(h);
h.write_usize(deps.len());
for &(ref name, ref fingerprint) in deps {
@ -342,17 +381,21 @@ impl hash::Hash for MtimeSlot {
impl ser::Serialize for MtimeSlot {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer,
where
S: ser::Serializer,
{
self.0.lock().unwrap().map(|ft| {
(ft.seconds_relative_to_1970(), ft.nanoseconds())
}).serialize(s)
self.0
.lock()
.unwrap()
.map(|ft| (ft.seconds_relative_to_1970(), ft.nanoseconds()))
.serialize(s)
}
}
impl<'de> de::Deserialize<'de> for MtimeSlot {
fn deserialize<D>(d: D) -> Result<MtimeSlot, D::Error>
where D: de::Deserializer<'de>,
where
D: de::Deserializer<'de>,
{
let kind: Option<(u64, u32)> = de::Deserialize::deserialize(d)?;
Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| {
@ -373,10 +416,12 @@ impl<'de> de::Deserialize<'de> for MtimeSlot {
///
/// Information like file modification time is only calculated for path
/// dependencies and is calculated in `calculate_target_fresh`.
fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
-> CargoResult<Arc<Fingerprint>> {
fn calculate<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
) -> CargoResult<Arc<Fingerprint>> {
if let Some(s) = cx.fingerprints.get(unit) {
return Ok(Arc::clone(s))
return Ok(Arc::clone(s));
}
// Next, recursively calculate the fingerprint for all of our dependencies.
@ -387,13 +432,12 @@ fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// induce a recompile, they're just dependencies in the sense that they need
// to be built.
let deps = cx.dep_targets(unit)?;
let deps = deps.iter().filter(|u| {
!u.target.is_custom_build() && !u.target.is_bin()
}).map(|unit| {
calculate(cx, unit).map(|fingerprint| {
(unit.pkg.package_id().to_string(), fingerprint)
let deps = deps.iter()
.filter(|u| !u.target.is_custom_build() && !u.target.is_bin())
.map(|unit| {
calculate(cx, unit).map(|fingerprint| (unit.pkg.package_id().to_string(), fingerprint))
})
}).collect::<CargoResult<Vec<_>>>()?;
.collect::<CargoResult<Vec<_>>>()?;
// And finally, calculate what our own local fingerprint is
let local = if use_dep_info(unit) {
@ -429,7 +473,6 @@ fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
Ok(fingerprint)
}
// We want to use the mtime for files if we're a path source, but if we're a
// git/registry source, then the mtime of files may fluctuate, but they won't
// change so long as the source itself remains constant (which is the
@ -456,10 +499,11 @@ fn use_dep_info(unit: &Unit) -> bool {
///
/// The currently implemented solution is option (1), although it is planned to
/// migrate to option (2) in the near future.
pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
-> CargoResult<Preparation> {
let _p = profile::start(format!("fingerprint build cmd: {}",
unit.pkg.package_id()));
pub fn prepare_build_cmd<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
) -> CargoResult<Preparation> {
let _p = profile::start(format!("fingerprint build cmd: {}", unit.pkg.package_id()));
let new = cx.fingerprint_dir(unit);
let loc = new.join("build");
@ -499,8 +543,7 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
if let Some(output_path) = output_path {
let outputs = state.outputs.lock().unwrap();
let outputs = &outputs[&key];
if !outputs.rerun_if_changed.is_empty() ||
!outputs.rerun_if_env_changed.is_empty() {
if !outputs.rerun_if_changed.is_empty() || !outputs.rerun_if_env_changed.is_empty() {
let deps = BuildDeps::new(&output_path, Some(outputs));
fingerprint.local = local_fingerprints_deps(&deps, &target_root, &pkg_root);
fingerprint.update_local(&target_root)?;
@ -509,13 +552,17 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
write_fingerprint(&loc, &fingerprint)
});
Ok((if compare.is_ok() {Fresh} else {Dirty}, write_fingerprint, Work::noop()))
Ok((
if compare.is_ok() { Fresh } else { Dirty },
write_fingerprint,
Work::noop(),
))
}
fn build_script_local_fingerprints<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>)
-> CargoResult<(Vec<LocalFingerprint>, Option<PathBuf>)>
{
fn build_script_local_fingerprints<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
) -> CargoResult<(Vec<LocalFingerprint>, Option<PathBuf>)> {
let state = cx.build_state.outputs.lock().unwrap();
// First up, if this build script is entirely overridden, then we just
// return the hash of what we overrode it with.
@ -524,9 +571,11 @@ fn build_script_local_fingerprints<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// fingerprint afterwards because this is all just overridden.
if let Some(output) = state.get(&(unit.pkg.package_id().clone(), unit.kind)) {
debug!("override local fingerprints deps");
let s = format!("overridden build state with hash: {}",
util::hash_u64(output));
return Ok((vec![LocalFingerprint::Precalculated(s)], None))
let s = format!(
"overridden build state with hash: {}",
util::hash_u64(output)
);
return Ok((vec![LocalFingerprint::Precalculated(s)], None));
}
// Next up we look at the previously listed dependencies for the build
@ -540,18 +589,23 @@ fn build_script_local_fingerprints<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() {
debug!("old local fingerprints deps");
let s = pkg_fingerprint(cx, unit.pkg)?;
return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output)))
return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output)));
}
// Ok so now we're in "new mode" where we can have files listed as
// dependencies as well as env vars listed as dependencies. Process them all
// here.
Ok((local_fingerprints_deps(deps, cx.target_root(), unit.pkg.root()), Some(output)))
Ok((
local_fingerprints_deps(deps, cx.target_root(), unit.pkg.root()),
Some(output),
))
}
fn local_fingerprints_deps(deps: &BuildDeps, target_root: &Path, pkg_root: &Path)
-> Vec<LocalFingerprint>
{
fn local_fingerprints_deps(
deps: &BuildDeps,
target_root: &Path,
pkg_root: &Path,
) -> Vec<LocalFingerprint> {
debug!("new local fingerprints deps");
let mut local = Vec::new();
if !deps.rerun_if_changed.is_empty() {
@ -573,8 +627,10 @@ fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> {
let hash = fingerprint.hash();
debug!("write fingerprint: {}", loc.display());
paths::write(loc, util::to_hex(hash).as_bytes())?;
paths::write(&loc.with_extension("json"),
&serde_json::to_vec(&fingerprint).unwrap())?;
paths::write(
&loc.with_extension("json"),
&serde_json::to_vec(&fingerprint).unwrap(),
)?;
Ok(())
}
@ -590,16 +646,16 @@ pub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> Ca
}
pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> PathBuf {
cx.fingerprint_dir(unit).join(&format!("dep-{}", filename(cx, unit)))
cx.fingerprint_dir(unit)
.join(&format!("dep-{}", filename(cx, unit)))
}
fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint)
-> CargoResult<()> {
fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult<()> {
let old_fingerprint_short = paths::read(loc)?;
let new_hash = new_fingerprint.hash();
if util::to_hex(new_hash) == old_fingerprint_short {
return Ok(())
return Ok(());
}
let old_fingerprint_json = paths::read(&loc.with_extension("json"))?;
@ -621,9 +677,7 @@ fn log_compare(unit: &Unit, compare: &CargoResult<()>) {
}
// Parse the dep-info into a list of paths
pub fn parse_dep_info(pkg: &Package, dep_info: &Path)
-> CargoResult<Option<Vec<PathBuf>>>
{
pub fn parse_dep_info(pkg: &Package, dep_info: &Path) -> CargoResult<Option<Vec<PathBuf>>> {
let data = match paths::read_bytes(dep_info) {
Ok(data) => data,
Err(_) => return Ok(None),
@ -639,9 +693,7 @@ pub fn parse_dep_info(pkg: &Package, dep_info: &Path)
}
}
fn dep_info_mtime_if_fresh(pkg: &Package, dep_info: &Path)
-> CargoResult<Option<FileTime>>
{
fn dep_info_mtime_if_fresh(pkg: &Package, dep_info: &Path) -> CargoResult<Option<FileTime>> {
if let Some(paths) = parse_dep_info(pkg, dep_info)? {
Ok(mtime_if_fresh(dep_info, paths.iter()))
} else {
@ -653,15 +705,16 @@ fn pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult<String> {
let source_id = pkg.package_id().source_id();
let sources = cx.packages.sources();
let source = sources.get(source_id).ok_or_else(|| {
internal("missing package source")
})?;
let source = sources
.get(source_id)
.ok_or_else(|| internal("missing package source"))?;
source.fingerprint(pkg)
}
fn mtime_if_fresh<I>(output: &Path, paths: I) -> Option<FileTime>
where I: IntoIterator,
I::Item: AsRef<Path>,
where
I: IntoIterator,
I::Item: AsRef<Path>,
{
let meta = match fs::metadata(output) {
Ok(meta) => meta,
@ -675,7 +728,7 @@ fn mtime_if_fresh<I>(output: &Path, paths: I) -> Option<FileTime>
Ok(meta) => meta,
Err(..) => {
info!("stale: {} -- missing", path.display());
return true
return true;
}
};
let mtime2 = FileTime::from_last_modification_time(&meta);
@ -703,8 +756,7 @@ fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String {
TargetKind::Lib(..) => "lib",
TargetKind::Bin => "bin",
TargetKind::Test => "integration-test",
TargetKind::ExampleBin |
TargetKind::ExampleLib(..) => "example",
TargetKind::ExampleBin | TargetKind::ExampleLib(..) => "example",
TargetKind::Bench => "bench",
TargetKind::CustomBuild => "build-script",
};
@ -734,14 +786,17 @@ fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String {
///
/// The serialized Cargo format will contain a list of files, all of which are
/// relative if they're under `root`. or absolute if they're elsewehre.
pub fn translate_dep_info(rustc_dep_info: &Path,
cargo_dep_info: &Path,
pkg_root: &Path,
rustc_cwd: &Path) -> CargoResult<()> {
pub fn translate_dep_info(
rustc_dep_info: &Path,
cargo_dep_info: &Path,
pkg_root: &Path,
rustc_cwd: &Path,
) -> CargoResult<()> {
let target = parse_rustc_dep_info(rustc_dep_info)?;
let deps = &target.get(0).ok_or_else(|| {
internal("malformed dep-info format, no targets".to_string())
})?.1;
let deps = &target
.get(0)
.ok_or_else(|| internal("malformed dep-info format, no targets".to_string()))?
.1;
let mut new_contents = Vec::new();
for file in deps {
@ -754,11 +809,10 @@ pub fn translate_dep_info(rustc_dep_info: &Path,
Ok(())
}
pub fn parse_rustc_dep_info(rustc_dep_info: &Path)
-> CargoResult<Vec<(String, Vec<String>)>>
{
pub fn parse_rustc_dep_info(rustc_dep_info: &Path) -> CargoResult<Vec<(String, Vec<String>)>> {
let contents = paths::read(rustc_dep_info)?;
contents.lines()
contents
.lines()
.filter_map(|l| l.find(": ").map(|i| (l, i)))
.map(|(line, pos)| {
let target = &line[..pos];

View File

@ -1,14 +1,17 @@
use std::fmt;
use util::{CargoResult, Fresh, Dirty, Freshness};
use util::{CargoResult, Dirty, Fresh, Freshness};
use super::job_queue::JobState;
pub struct Job { dirty: Work, fresh: Work }
pub struct Job {
dirty: Work,
fresh: Work,
}
/// Each proc should send its description before starting.
/// It should send either once or close immediately.
pub struct Work {
inner: Box<for <'a, 'b> FnBox<&'a JobState<'b>, CargoResult<()>> + Send>,
inner: Box<for<'a, 'b> FnBox<&'a JobState<'b>, CargoResult<()>> + Send>,
}
trait FnBox<A, R> {
@ -23,7 +26,8 @@ impl<A, R, F: FnOnce(A) -> R> FnBox<A, R> for F {
impl Work {
pub fn new<F>(f: F) -> Work
where F: FnOnce(&JobState) -> CargoResult<()> + Send + 'static
where
F: FnOnce(&JobState) -> CargoResult<()> + Send + 'static,
{
Work { inner: Box::new(f) }
}

View File

@ -3,15 +3,15 @@ use std::collections::hash_map::HashMap;
use std::fmt;
use std::io;
use std::mem;
use std::sync::mpsc::{channel, Sender, Receiver};
use std::sync::mpsc::{channel, Receiver, Sender};
use crossbeam::{self, Scope};
use jobserver::{Acquired, HelperThread};
use core::{PackageId, Target, Profile};
use util::{Config, DependencyQueue, Fresh, Dirty, Freshness};
use util::{CargoResult, ProcessBuilder, profile, internal, CargoResultExt};
use {handle_error};
use core::{PackageId, Profile, Target};
use util::{Config, DependencyQueue, Dirty, Fresh, Freshness};
use util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder};
use handle_error;
use super::{Context, Kind, Unit};
use super::job::Job;
@ -92,14 +92,18 @@ impl<'a> JobQueue<'a> {
}
}
pub fn enqueue<'cfg>(&mut self,
cx: &Context<'a, 'cfg>,
unit: &Unit<'a>,
job: Job,
fresh: Freshness) -> CargoResult<()> {
pub fn enqueue<'cfg>(
&mut self,
cx: &Context<'a, 'cfg>,
unit: &Unit<'a>,
job: Job,
fresh: Freshness,
) -> CargoResult<()> {
let key = Key::new(unit);
let deps = key.dependencies(cx)?;
self.queue.queue(Fresh, key, Vec::new(), &deps).push((job, fresh));
self.queue
.queue(Fresh, key, Vec::new(), &deps)
.push((job, fresh));
*self.counts.entry(key.pkg).or_insert(0) += 1;
Ok(())
}
@ -127,25 +131,23 @@ impl<'a> JobQueue<'a> {
// As a result, this `transmute` to a longer lifetime should be safe in
// practice.
let tx = self.tx.clone();
let tx = unsafe {
mem::transmute::<Sender<Message<'a>>, Sender<Message<'static>>>(tx)
};
let helper = cx.jobserver.clone().into_helper_thread(move |token| {
drop(tx.send(Message::Token(token)));
}).chain_err(|| {
"failed to create helper thread for jobserver management"
})?;
let tx = unsafe { mem::transmute::<Sender<Message<'a>>, Sender<Message<'static>>>(tx) };
let helper = cx.jobserver
.clone()
.into_helper_thread(move |token| {
drop(tx.send(Message::Token(token)));
})
.chain_err(|| "failed to create helper thread for jobserver management")?;
crossbeam::scope(|scope| {
self.drain_the_queue(cx, scope, &helper)
})
crossbeam::scope(|scope| self.drain_the_queue(cx, scope, &helper))
}
fn drain_the_queue(&mut self,
cx: &mut Context,
scope: &Scope<'a>,
jobserver_helper: &HelperThread)
-> CargoResult<()> {
fn drain_the_queue(
&mut self,
cx: &mut Context,
scope: &Scope<'a>,
jobserver_helper: &HelperThread,
) -> CargoResult<()> {
use std::time::Instant;
let mut tokens = Vec::new();
@ -170,13 +172,14 @@ impl<'a> JobQueue<'a> {
// start requesting job tokens. Each job after the first needs to
// request a token.
while let Some((fresh, key, jobs)) = self.queue.dequeue() {
let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| {
f.combine(fresh)
});
self.pending.insert(key, PendingBuild {
amt: jobs.len(),
fresh: total_fresh,
});
let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| f.combine(fresh));
self.pending.insert(
key,
PendingBuild {
amt: jobs.len(),
fresh: total_fresh,
},
);
for (job, f) in jobs {
queue.push((key, job, f.combine(fresh)));
if self.active + queue.len() > 0 {
@ -196,7 +199,7 @@ impl<'a> JobQueue<'a> {
// If after all that we're not actually running anything then we're
// done!
if self.active == 0 {
break
break;
}
// And finally, before we block waiting for the next event, drop any
@ -237,8 +240,9 @@ impl<'a> JobQueue<'a> {
error = Some(format_err!("build failed"));
handle_error(e, &mut *cx.config.shell());
cx.config.shell().warn(
"build failed, waiting for other \
jobs to finish...")?;
"build failed, waiting for other \
jobs to finish...",
)?;
} else {
error = Some(e);
}
@ -246,29 +250,32 @@ impl<'a> JobQueue<'a> {
}
}
Message::Token(acquired_token) => {
tokens.push(acquired_token.chain_err(|| {
"failed to acquire jobserver token"
})?);
tokens.push(acquired_token.chain_err(|| "failed to acquire jobserver token")?);
}
}
}
let build_type = if self.is_release { "release" } else { "dev" };
let profile = cx.lib_profile();
let mut opt_type = String::from(if profile.opt_level == "0" { "unoptimized" }
else { "optimized" });
let mut opt_type = String::from(if profile.opt_level == "0" {
"unoptimized"
} else {
"optimized"
});
if profile.debuginfo.is_some() {
opt_type += " + debuginfo";
}
let duration = start_time.elapsed();
let time_elapsed = format!("{}.{1:.2} secs",
duration.as_secs(),
duration.subsec_nanos() / 10_000_000);
let time_elapsed = format!(
"{}.{1:.2} secs",
duration.as_secs(),
duration.subsec_nanos() / 10_000_000
);
if self.queue.is_empty() {
let message = format!("{} [{}] target(s) in {}",
build_type,
opt_type,
time_elapsed);
let message = format!(
"{} [{}] target(s) in {}",
build_type, opt_type, time_elapsed
);
cx.config.shell().status("Finished", message)?;
Ok(())
} else if let Some(e) = error {
@ -281,12 +288,14 @@ impl<'a> JobQueue<'a> {
/// Executes a job in the `scope` given, pushing the spawned thread's
/// handled onto `threads`.
fn run(&mut self,
key: Key<'a>,
fresh: Freshness,
job: Job,
config: &Config,
scope: &Scope<'a>) -> CargoResult<()> {
fn run(
&mut self,
key: Key<'a>,
fresh: Freshness,
job: Job,
config: &Config,
scope: &Scope<'a>,
) -> CargoResult<()> {
info!("start: {:?}", key);
self.active += 1;
@ -294,14 +303,14 @@ impl<'a> JobQueue<'a> {
let my_tx = self.tx.clone();
let doit = move || {
let res = job.run(fresh, &JobState {
tx: my_tx.clone(),
});
let res = job.run(fresh, &JobState { tx: my_tx.clone() });
my_tx.send(Message::Finish(key, res)).unwrap();
};
match fresh {
Freshness::Fresh => doit(),
Freshness::Dirty => { scope.spawn(doit); }
Freshness::Dirty => {
scope.spawn(doit);
}
}
// Print out some nice progress information
@ -354,13 +363,16 @@ impl<'a> JobQueue<'a> {
// In general, we try to print "Compiling" for the first nontrivial task
// run for a package, regardless of when that is. We then don't print
// out any more information for a package after we've printed it once.
fn note_working_on(&mut self,
config: &Config,
key: &Key<'a>,
fresh: Freshness) -> CargoResult<()> {
if (self.compiled.contains(key.pkg) && !key.profile.doc) ||
(self.documented.contains(key.pkg) && key.profile.doc) {
return Ok(())
fn note_working_on(
&mut self,
config: &Config,
key: &Key<'a>,
fresh: Freshness,
) -> CargoResult<()> {
if (self.compiled.contains(key.pkg) && !key.profile.doc)
|| (self.documented.contains(key.pkg) && key.profile.doc)
{
return Ok(());
}
match fresh {
@ -397,8 +409,7 @@ impl<'a> Key<'a> {
}
}
fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>)
-> CargoResult<Vec<Key<'a>>> {
fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) -> CargoResult<Vec<Key<'a>>> {
let unit = Unit {
pkg: cx.get_package(self.pkg)?,
target: self.target,
@ -406,21 +417,27 @@ impl<'a> Key<'a> {
kind: self.kind,
};
let targets = cx.dep_targets(&unit)?;
Ok(targets.iter().filter_map(|unit| {
// Binaries aren't actually needed to *compile* tests, just to run
// them, so we don't include this dependency edge in the job graph.
if self.target.is_test() && unit.target.is_bin() {
None
} else {
Some(Key::new(unit))
}
}).collect())
Ok(targets
.iter()
.filter_map(|unit| {
// Binaries aren't actually needed to *compile* tests, just to run
// them, so we don't include this dependency edge in the job graph.
if self.target.is_test() && unit.target.is_bin() {
None
} else {
Some(Key::new(unit))
}
})
.collect())
}
}
impl<'a> fmt::Debug for Key<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} => {}/{} => {:?}", self.pkg, self.target, self.profile,
self.kind)
write!(
f,
"{} => {}/{} => {:?}",
self.pkg, self.target, self.profile, self.kind
)
}
}

View File

@ -51,10 +51,10 @@
use std::fs;
use std::io;
use std::path::{PathBuf, Path};
use std::path::{Path, PathBuf};
use core::Workspace;
use util::{Config, FileLock, CargoResult, Filesystem};
use util::{CargoResult, Config, FileLock, Filesystem};
/// Contains the paths of all target output locations.
///
@ -84,17 +84,15 @@ impl Layout {
///
/// Differs from `at` in that this calculates the root path from the workspace target directory,
/// adding the target triple and the profile (debug, release, ...).
pub fn new(ws: &Workspace,
triple: Option<&str>,
dest: &str) -> CargoResult<Layout> {
pub fn new(ws: &Workspace, triple: Option<&str>, dest: &str) -> CargoResult<Layout> {
let mut path = ws.target_dir();
// Flexible target specifications often point at filenames, so interpret
// the target triple as a Path and then just use the file stem as the
// component for the directory name.
if let Some(triple) = triple {
path.push(Path::new(triple).file_stem().ok_or_else(|| {
format_err!("target was empty")
})?);
path.push(Path::new(triple)
.file_stem()
.ok_or_else(|| format_err!("target was empty"))?);
}
path.push(dest);
Layout::at(ws.config(), path)
@ -131,7 +129,7 @@ impl Layout {
/// This is recommended to prevent derived/temporary files from bloating backups.
fn exclude_from_backups(&self, path: &Path) {
use std::ptr;
use core_foundation::{url, number, string};
use core_foundation::{number, string, url};
use core_foundation::base::TCFType;
// For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey
@ -177,17 +175,31 @@ impl Layout {
}
/// Fetch the root path.
pub fn dest(&self) -> &Path { &self.root }
pub fn dest(&self) -> &Path {
&self.root
}
/// Fetch the deps path.
pub fn deps(&self) -> &Path { &self.deps }
pub fn deps(&self) -> &Path {
&self.deps
}
/// Fetch the examples path.
pub fn examples(&self) -> &Path { &self.examples }
pub fn examples(&self) -> &Path {
&self.examples
}
/// Fetch the root path.
pub fn root(&self) -> &Path { &self.root }
pub fn root(&self) -> &Path {
&self.root
}
/// Fetch the incremental path.
pub fn incremental(&self) -> &Path { &self.incremental }
pub fn incremental(&self) -> &Path {
&self.incremental
}
/// Fetch the fingerprint path.
pub fn fingerprint(&self) -> &Path { &self.fingerprint }
pub fn fingerprint(&self) -> &Path {
&self.fingerprint
}
/// Fetch the build path.
pub fn build(&self) -> &Path { &self.build }
pub fn build(&self) -> &Path {
&self.build
}
}

View File

@ -1,7 +1,7 @@
use std::collections::{HashMap, HashSet};
use std::fmt::Write;
use core::{Resolve, PackageId};
use core::{PackageId, Resolve};
use util::CargoResult;
use super::Unit;
@ -21,7 +21,7 @@ impl<'a> Links<'a> {
pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'a>) -> CargoResult<()> {
if !self.validated.insert(unit.pkg.package_id()) {
return Ok(())
return Ok(());
}
let lib = match unit.pkg.manifest().links() {
Some(lib) => lib,
@ -34,26 +34,37 @@ impl<'a> Links<'a> {
let dep_path = resolve.path_to_top(pkgid);
let mut dep_path_desc = format!("package `{}`", dep_path[0]);
for dep in dep_path.iter().skip(1) {
write!(dep_path_desc,
"\n ... which is depended on by `{}`",
dep).unwrap();
write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap();
}
dep_path_desc
};
bail!("multiple packages link to native library `{}`, \
but a native library can be linked only once\n\
\n\
{}\nlinks to native library `{}`\n\
\n\
{}\nalso links to native library `{}`",
lib,
describe_path(prev), lib,
describe_path(pkg), lib)
bail!(
"multiple packages link to native library `{}`, \
but a native library can be linked only once\n\
\n\
{}\nlinks to native library `{}`\n\
\n\
{}\nalso links to native library `{}`",
lib,
describe_path(prev),
lib,
describe_path(pkg),
lib
)
}
if !unit.pkg.manifest().targets().iter().any(|t| t.is_custom_build()) {
bail!("package `{}` specifies that it links to `{}` but does not \
have a custom build script", unit.pkg.package_id(), lib)
if !unit.pkg
.manifest()
.targets()
.iter()
.any(|t| t.is_custom_build())
{
bail!(
"package `{}` specifies that it links to `{}` but does not \
have a custom build script",
unit.pkg.package_id(),
lib
)
}
self.links.insert(lib.to_string(), unit.pkg.package_id());
Ok(())

View File

@ -9,12 +9,12 @@ use std::sync::Arc;
use same_file::is_same_file;
use serde_json;
use core::{Feature, Package, PackageId, PackageSet, Target, Resolve};
use core::{Feature, Package, PackageId, PackageSet, Resolve, Target};
use core::{Profile, Profiles, Workspace};
use core::manifest::Lto;
use core::shell::ColorChoice;
use util::{self, ProcessBuilder, machine_message};
use util::{Config, internal, profile, join_paths};
use util::{self, machine_message, ProcessBuilder};
use util::{internal, join_paths, profile, Config};
use util::paths;
use util::errors::{CargoResult, CargoResultExt, Internal};
use util::Freshness;
@ -25,8 +25,8 @@ use self::job_queue::JobQueue;
use self::output_depinfo::output_depinfo;
pub use self::compilation::Compilation;
pub use self::context::{Context, Unit, TargetFileType};
pub use self::custom_build::{BuildOutput, BuildMap, BuildScripts};
pub use self::context::{Context, TargetFileType, Unit};
pub use self::custom_build::{BuildMap, BuildOutput, BuildScripts};
pub use self::layout::is_bad_artifact_name;
mod compilation;
@ -43,7 +43,10 @@ mod output_depinfo;
///
/// These will be the same unless cross-compiling.
#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord)]
pub enum Kind { Host, Target }
pub enum Kind {
Host,
Target,
}
/// Configuration information for a rustc build.
#[derive(Default, Clone)]
@ -97,22 +100,19 @@ pub trait Executor: Send + Sync + 'static {
/// In case of an `Err`, Cargo will not continue with the build process for
/// this package.
fn exec(&self,
cmd: ProcessBuilder,
_id: &PackageId,
_target: &Target)
-> CargoResult<()> {
fn exec(&self, cmd: ProcessBuilder, _id: &PackageId, _target: &Target) -> CargoResult<()> {
cmd.exec()?;
Ok(())
}
fn exec_json(&self,
cmd: ProcessBuilder,
_id: &PackageId,
_target: &Target,
handle_stdout: &mut FnMut(&str) -> CargoResult<()>,
handle_stderr: &mut FnMut(&str) -> CargoResult<()>)
-> CargoResult<()> {
fn exec_json(
&self,
cmd: ProcessBuilder,
_id: &PackageId,
_target: &Target,
handle_stdout: &mut FnMut(&str) -> CargoResult<()>,
handle_stderr: &mut FnMut(&str) -> CargoResult<()>,
) -> CargoResult<()> {
cmd.exec_with_streaming(handle_stdout, handle_stderr, false)?;
Ok(())
}
@ -133,33 +133,38 @@ impl Executor for DefaultExecutor {}
// Returns a mapping of the root package plus its immediate dependencies to
// where the compiled libraries are all located.
pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
pkg_targets: &'a PackagesToBuild<'a>,
packages: &'a PackageSet<'cfg>,
resolve: &'a Resolve,
config: &'cfg Config,
build_config: BuildConfig,
profiles: &'a Profiles,
exec: &Arc<Executor>)
-> CargoResult<Compilation<'cfg>> {
let units = pkg_targets.iter().flat_map(|&(pkg, ref targets)| {
let default_kind = if build_config.requested_target.is_some() {
Kind::Target
} else {
Kind::Host
};
targets.iter().map(move |&(target, profile)| {
Unit {
pub fn compile_targets<'a, 'cfg: 'a>(
ws: &Workspace<'cfg>,
pkg_targets: &'a PackagesToBuild<'a>,
packages: &'a PackageSet<'cfg>,
resolve: &'a Resolve,
config: &'cfg Config,
build_config: BuildConfig,
profiles: &'a Profiles,
exec: &Arc<Executor>,
) -> CargoResult<Compilation<'cfg>> {
let units = pkg_targets
.iter()
.flat_map(|&(pkg, ref targets)| {
let default_kind = if build_config.requested_target.is_some() {
Kind::Target
} else {
Kind::Host
};
targets.iter().map(move |&(target, profile)| Unit {
pkg,
target,
profile,
kind: if target.for_host() {Kind::Host} else {default_kind},
}
kind: if target.for_host() {
Kind::Host
} else {
default_kind
},
})
})
}).collect::<Vec<_>>();
.collect::<Vec<_>>();
let mut cx = Context::new(ws, resolve, packages, config,
build_config, profiles)?;
let mut cx = Context::new(ws, resolve, packages, config, build_config, profiles)?;
let mut queue = JobQueue::new(&cx);
@ -192,50 +197,73 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
};
if unit.profile.test {
cx.compilation.tests.push((unit.pkg.clone(),
unit.target.kind().clone(),
unit.target.name().to_string(),
dst.clone()));
cx.compilation.tests.push((
unit.pkg.clone(),
unit.target.kind().clone(),
unit.target.name().to_string(),
dst.clone(),
));
} else if unit.target.is_bin() || unit.target.is_example() {
cx.compilation.binaries.push(bindst.clone());
} else if unit.target.is_lib() {
let pkgid = unit.pkg.package_id().clone();
cx.compilation.libraries.entry(pkgid).or_insert_with(HashSet::new)
.insert((unit.target.clone(), dst.clone()));
cx.compilation
.libraries
.entry(pkgid)
.or_insert_with(HashSet::new)
.insert((unit.target.clone(), dst.clone()));
}
}
for dep in cx.dep_targets(unit)?.iter() {
if !unit.target.is_lib() { continue }
if !unit.target.is_lib() {
continue;
}
if dep.profile.run_custom_build {
let out_dir = cx.build_script_out_dir(dep).display().to_string();
cx.compilation.extra_env.entry(dep.pkg.package_id().clone())
.or_insert_with(Vec::new)
.push(("OUT_DIR".to_string(), out_dir));
cx.compilation
.extra_env
.entry(dep.pkg.package_id().clone())
.or_insert_with(Vec::new)
.push(("OUT_DIR".to_string(), out_dir));
}
if !dep.target.is_lib() { continue }
if dep.profile.doc { continue }
if !dep.target.is_lib() {
continue;
}
if dep.profile.doc {
continue;
}
let v = cx.target_filenames(dep)?;
cx.compilation.libraries
cx.compilation
.libraries
.entry(unit.pkg.package_id().clone())
.or_insert_with(HashSet::new)
.extend(v.iter().map(|&(ref f, _, _)| {
(dep.target.clone(), f.clone())
}));
.extend(
v.iter()
.map(|&(ref f, _, _)| (dep.target.clone(), f.clone())),
);
}
let feats = cx.resolve.features(unit.pkg.package_id());
if !feats.is_empty() {
cx.compilation.cfgs.entry(unit.pkg.package_id().clone()).or_insert_with(|| {
feats.iter().map(|feat| format!("feature=\"{}\"", feat)).collect()
});
cx.compilation
.cfgs
.entry(unit.pkg.package_id().clone())
.or_insert_with(|| {
feats
.iter()
.map(|feat| format!("feature=\"{}\"", feat))
.collect()
});
}
let rustdocflags = cx.rustdocflags_args(unit)?;
if !rustdocflags.is_empty() {
cx.compilation.rustdocflags.entry(unit.pkg.package_id().clone())
cx.compilation
.rustdocflags
.entry(unit.pkg.package_id().clone())
.or_insert(rustdocflags);
}
@ -243,11 +271,15 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
}
for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() {
cx.compilation.cfgs.entry(pkg.clone())
cx.compilation
.cfgs
.entry(pkg.clone())
.or_insert_with(HashSet::new)
.extend(output.cfgs.iter().cloned());
cx.compilation.extra_env.entry(pkg.clone())
cx.compilation
.extra_env
.entry(pkg.clone())
.or_insert_with(Vec::new)
.extend(output.env.iter().cloned());
@ -259,18 +291,19 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
Ok(cx.compilation)
}
fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>,
jobs: &mut JobQueue<'a>,
unit: &Unit<'a>,
exec: &Arc<Executor>) -> CargoResult<()> {
fn compile<'a, 'cfg: 'a>(
cx: &mut Context<'a, 'cfg>,
jobs: &mut JobQueue<'a>,
unit: &Unit<'a>,
exec: &Arc<Executor>,
) -> CargoResult<()> {
if !cx.compiled.insert(*unit) {
return Ok(())
return Ok(());
}
// Build up the work to be done to compile this unit, enqueuing it once
// we've got everything constructed.
let p = profile::start(format!("preparing: {}/{}", unit.pkg,
unit.target.name()));
let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name()));
fingerprint::prepare_init(cx, unit)?;
cx.links.validate(cx.resolve, unit)?;
@ -307,9 +340,11 @@ fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>,
Ok(())
}
fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
exec: &Arc<Executor>) -> CargoResult<Work> {
fn rustc<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
exec: &Arc<Executor>,
) -> CargoResult<Work> {
let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?;
let name = unit.pkg.name().to_string();
@ -336,8 +371,7 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// If we are a binary and the package also contains a library, then we
// don't pass the `-l` flags.
let pass_l_flag = unit.target.is_lib() ||
!unit.pkg.targets().iter().any(|t| t.is_lib());
let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib());
let do_rename = unit.target.allows_underscores() && !unit.profile.test;
let real_name = unit.target.name().to_string();
let crate_name = unit.target.crate_name();
@ -360,7 +394,10 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
let root_output = cx.target_root().to_path_buf();
let pkg_root = unit.pkg.root().to_path_buf();
let cwd = rustc.get_cwd().unwrap_or_else(|| cx.config.cwd()).to_path_buf();
let cwd = rustc
.get_cwd()
.unwrap_or_else(|| cx.config.cwd())
.to_path_buf();
return Ok(Work::new(move |state| {
// Only at runtime have we discovered what the extra -L and -l
@ -372,10 +409,14 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// previous build scripts, we include them in the rustc invocation.
if let Some(build_deps) = build_deps {
let build_state = build_state.outputs.lock().unwrap();
add_native_deps(&mut rustc, &build_state, &build_deps,
pass_l_flag, &current_id)?;
add_plugin_deps(&mut rustc, &build_state, &build_deps,
&root_output)?;
add_native_deps(
&mut rustc,
&build_state,
&build_deps,
pass_l_flag,
&current_id,
)?;
add_plugin_deps(&mut rustc, &build_state, &build_deps, &root_output)?;
add_custom_env(&mut rustc, &build_state, &current_id, kind)?;
}
@ -393,11 +434,19 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
state.running(&rustc);
if json_messages {
exec.exec_json(rustc, &package_id, &target,
&mut |line| if !line.is_empty() {
Err(internal(&format!("compiler stdout is not empty: `{}`", line)))
} else {
Ok(())
exec.exec_json(
rustc,
&package_id,
&target,
&mut |line| {
if !line.is_empty() {
Err(internal(&format!(
"compiler stdout is not empty: `{}`",
line
)))
} else {
Ok(())
}
},
&mut |line| {
// stderr from rustc can have a mix of JSON and non-JSON output
@ -417,38 +466,36 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
writeln!(io::stderr(), "{}", line)?;
}
Ok(())
}
).chain_err(|| {
format!("Could not compile `{}`.", name)
})?;
},
).chain_err(|| format!("Could not compile `{}`.", name))?;
} else {
exec.exec(rustc, &package_id, &target)
.map_err(Internal::new)
.chain_err(|| {
format!("Could not compile `{}`.", name)
})?;
.chain_err(|| format!("Could not compile `{}`.", name))?;
}
if do_rename && real_name != crate_name {
let dst = &filenames[0].0;
let src = dst.with_file_name(dst.file_name().unwrap()
.to_str().unwrap()
.replace(&real_name, &crate_name));
let src = dst.with_file_name(
dst.file_name()
.unwrap()
.to_str()
.unwrap()
.replace(&real_name, &crate_name),
);
if src.exists() && src.file_name() != dst.file_name() {
fs::rename(&src, &dst).chain_err(|| {
internal(format!("could not rename crate {:?}", src))
})?;
fs::rename(&src, &dst)
.chain_err(|| internal(format!("could not rename crate {:?}", src)))?;
}
}
if rustc_dep_info_loc.exists() {
fingerprint::translate_dep_info(&rustc_dep_info_loc,
&dep_info_loc,
&pkg_root,
&cwd)
fingerprint::translate_dep_info(&rustc_dep_info_loc, &dep_info_loc, &pkg_root, &cwd)
.chain_err(|| {
internal(format!("could not parse/generate dep info at: {}",
rustc_dep_info_loc.display()))
internal(format!(
"could not parse/generate dep info at: {}",
rustc_dep_info_loc.display()
))
})?;
}
@ -457,15 +504,19 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// Add all relevant -L and -l flags from dependencies (now calculated and
// present in `state`) to the command provided
fn add_native_deps(rustc: &mut ProcessBuilder,
build_state: &BuildMap,
build_scripts: &BuildScripts,
pass_l_flag: bool,
current_id: &PackageId) -> CargoResult<()> {
fn add_native_deps(
rustc: &mut ProcessBuilder,
build_state: &BuildMap,
build_scripts: &BuildScripts,
pass_l_flag: bool,
current_id: &PackageId,
) -> CargoResult<()> {
for key in build_scripts.to_link.iter() {
let output = build_state.get(key).ok_or_else(|| {
internal(format!("couldn't find build state for {}/{:?}",
key.0, key.1))
internal(format!(
"couldn't find build state for {}/{:?}",
key.0, key.1
))
})?;
for path in output.library_paths.iter() {
rustc.arg("-L").arg(path);
@ -486,10 +537,12 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// Add all custom environment variables present in `state` (after they've
// been put there by one of the `build_scripts`) to the command provided.
fn add_custom_env(rustc: &mut ProcessBuilder,
build_state: &BuildMap,
current_id: &PackageId,
kind: Kind) -> CargoResult<()> {
fn add_custom_env(
rustc: &mut ProcessBuilder,
build_state: &BuildMap,
current_id: &PackageId,
kind: Kind,
) -> CargoResult<()> {
let key = (current_id.clone(), kind);
if let Some(output) = build_state.get(&key) {
for &(ref name, ref value) in output.env.iter() {
@ -502,14 +555,18 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
/// Link the compiled target (often of form `foo-{metadata_hash}`) to the
/// final target. This must happen during both "Fresh" and "Compile"
fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
fresh: bool) -> CargoResult<Work> {
fn link_targets<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
fresh: bool,
) -> CargoResult<Work> {
let filenames = cx.target_filenames(unit)?;
let package_id = unit.pkg.package_id().clone();
let target = unit.target.clone();
let profile = unit.profile.clone();
let features = cx.resolve.features_sorted(&package_id).into_iter()
let features = cx.resolve
.features_sorted(&package_id)
.into_iter()
.map(|s| s.to_owned())
.collect();
let json_messages = cx.build_config.json_messages;
@ -524,7 +581,7 @@ fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// This may have been a `cargo rustc` command which changes the
// output, so the source may not actually exist.
if !src.exists() {
continue
continue;
}
let dst = match link_dst.as_ref() {
Some(dst) => dst,
@ -537,7 +594,7 @@ fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
debug!("linking {} to {}", src.display(), dst.display());
if is_same_file(src, dst).unwrap_or(false) {
continue
continue;
}
if dst.exists() {
paths::remove_file(&dst)?;
@ -563,8 +620,11 @@ fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
fs::copy(src, dst).map(|_| ())
})
.chain_err(|| {
format!("failed to link or copy `{}` to `{}`",
src.display(), dst.display())
format!(
"failed to link or copy `{}` to `{}`",
src.display(),
dst.display()
)
})?;
}
@ -589,21 +649,24 @@ fn load_build_deps(cx: &Context, unit: &Unit) -> Option<Arc<BuildScripts>> {
// For all plugin dependencies, add their -L paths (now calculated and
// present in `state`) to the dynamic library load path for the command to
// execute.
fn add_plugin_deps(rustc: &mut ProcessBuilder,
build_state: &BuildMap,
build_scripts: &BuildScripts,
root_output: &PathBuf)
-> CargoResult<()> {
fn add_plugin_deps(
rustc: &mut ProcessBuilder,
build_state: &BuildMap,
build_scripts: &BuildScripts,
root_output: &PathBuf,
) -> CargoResult<()> {
let var = util::dylib_path_envvar();
let search_path = rustc.get_env(var).unwrap_or_default();
let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>();
for id in build_scripts.plugins.iter() {
let key = (id.clone(), Kind::Host);
let output = build_state.get(&key).ok_or_else(|| {
internal(format!("couldn't find libs for plugin dep {}", id))
})?;
search_path.append(&mut filter_dynamic_search_path(output.library_paths.iter(),
root_output));
let output = build_state
.get(&key)
.ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", id)))?;
search_path.append(&mut filter_dynamic_search_path(
output.library_paths.iter(),
root_output,
));
}
let search_path = join_paths(&search_path, var)?;
rustc.env(var, &search_path);
@ -615,19 +678,21 @@ fn add_plugin_deps(rustc: &mut ProcessBuilder,
// Strip off prefixes like "native=" or "framework=" and filter out directories
// *not* inside our output directory since they are likely spurious and can cause
// clashes with system shared libraries (issue #3366).
fn filter_dynamic_search_path<'a, I>(paths :I, root_output: &PathBuf) -> Vec<PathBuf>
where I: Iterator<Item=&'a PathBuf> {
fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &PathBuf) -> Vec<PathBuf>
where
I: Iterator<Item = &'a PathBuf>,
{
let mut search_path = vec![];
for dir in paths {
let dir = match dir.to_str() {
Some(s) => {
let mut parts = s.splitn(2, '=');
match (parts.next(), parts.next()) {
(Some("native"), Some(path)) |
(Some("crate"), Some(path)) |
(Some("dependency"), Some(path)) |
(Some("framework"), Some(path)) |
(Some("all"), Some(path)) => path.into(),
(Some("native"), Some(path))
| (Some("crate"), Some(path))
| (Some("dependency"), Some(path))
| (Some("framework"), Some(path))
| (Some("all"), Some(path)) => path.into(),
_ => dir.clone(),
}
}
@ -636,16 +701,22 @@ fn filter_dynamic_search_path<'a, I>(paths :I, root_output: &PathBuf) -> Vec<Pat
if dir.starts_with(&root_output) {
search_path.push(dir);
} else {
debug!("Not including path {} in runtime library search path because it is \
outside target root {}", dir.display(), root_output.display());
debug!(
"Not including path {} in runtime library search path because it is \
outside target root {}",
dir.display(),
root_output.display()
);
}
}
search_path
}
fn prepare_rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
crate_types: &[&str],
unit: &Unit<'a>) -> CargoResult<ProcessBuilder> {
fn prepare_rustc<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
crate_types: &[&str],
unit: &Unit<'a>,
) -> CargoResult<ProcessBuilder> {
let mut base = cx.compilation.rustc_process(unit.pkg)?;
base.inherit_jobserver(&cx.jobserver);
build_base_args(cx, &mut base, unit, crate_types)?;
@ -653,9 +724,7 @@ fn prepare_rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
Ok(base)
}
fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>) -> CargoResult<Work> {
fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<Work> {
let mut rustdoc = cx.compilation.rustdoc_process(unit.pkg)?;
rustdoc.inherit_jobserver(&cx.jobserver);
rustdoc.arg("--crate-name").arg(&unit.target.crate_name());
@ -702,7 +771,9 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
}
}
state.running(&rustdoc);
rustdoc.exec().chain_err(|| format!("Could not document `{}`.", name))?;
rustdoc
.exec()
.chain_err(|| format!("Could not document `{}`.", name))?;
Ok(())
}))
}
@ -737,14 +808,27 @@ fn add_path_args(cx: &Context, unit: &Unit, cmd: &mut ProcessBuilder) {
cmd.cwd(cwd);
}
fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
cmd: &mut ProcessBuilder,
unit: &Unit<'a>,
crate_types: &[&str]) -> CargoResult<()> {
fn build_base_args<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
cmd: &mut ProcessBuilder,
unit: &Unit<'a>,
crate_types: &[&str],
) -> CargoResult<()> {
let Profile {
ref opt_level, ref lto, codegen_units, ref rustc_args, debuginfo,
debug_assertions, overflow_checks, rpath, test, doc: _doc,
run_custom_build, ref panic, check, ..
ref opt_level,
ref lto,
codegen_units,
ref rustc_args,
debuginfo,
debug_assertions,
overflow_checks,
rpath,
test,
doc: _doc,
run_custom_build,
ref panic,
check,
..
} = *unit.profile;
assert!(!run_custom_build);
@ -753,8 +837,12 @@ fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
add_path_args(cx, unit, cmd);
match cx.config.shell().color_choice() {
ColorChoice::Always => { cmd.arg("--color").arg("always"); }
ColorChoice::Never => { cmd.arg("--color").arg("never"); }
ColorChoice::Always => {
cmd.arg("--color").arg("always");
}
ColorChoice::Never => {
cmd.arg("--color").arg("never");
}
ColorChoice::CargoAuto => {}
}
@ -774,10 +862,8 @@ fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
cmd.arg("--emit=dep-info,link");
}
let prefer_dynamic = (unit.target.for_host() &&
!unit.target.is_custom_build()) ||
(crate_types.contains(&"dylib") &&
cx.ws.members().any(|p| p != unit.pkg));
let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build())
|| (crate_types.contains(&"dylib") && cx.ws.members().any(|p| p != unit.pkg));
if prefer_dynamic {
cmd.arg("-C").arg("prefer-dynamic");
}
@ -875,7 +961,8 @@ fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
cmd.arg("-C").arg(&format!("extra-filename=-{}", m));
}
None => {
cmd.arg("-C").arg(&format!("metadata={}", cx.target_short_hash(unit)));
cmd.arg("-C")
.arg(&format!("metadata={}", cx.target_short_hash(unit)));
}
}
@ -885,8 +972,7 @@ fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
cmd.arg("--out-dir").arg(&cx.out_dir(unit));
fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str,
val: Option<&OsStr>) {
fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) {
if let Some(val) = val {
let mut joined = OsString::from(prefix);
joined.push(val);
@ -895,20 +981,31 @@ fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
}
if unit.kind == Kind::Target {
opt(cmd, "--target", "", cx.requested_target().map(|s| s.as_ref()));
opt(
cmd,
"--target",
"",
cx.requested_target().map(|s| s.as_ref()),
);
}
opt(cmd, "-C", "ar=", cx.ar(unit.kind).map(|s| s.as_ref()));
opt(cmd, "-C", "linker=", cx.linker(unit.kind).map(|s| s.as_ref()));
opt(
cmd,
"-C",
"linker=",
cx.linker(unit.kind).map(|s| s.as_ref()),
);
cmd.args(&cx.incremental_args(unit)?);
Ok(())
}
fn build_deps_args<'a, 'cfg>(cmd: &mut ProcessBuilder,
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>) -> CargoResult<()> {
fn build_deps_args<'a, 'cfg>(
cmd: &mut ProcessBuilder,
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
) -> CargoResult<()> {
cmd.arg("-L").arg(&{
let mut deps = OsString::from("dependency=");
deps.push(cx.deps_dir(unit));
@ -930,17 +1027,24 @@ fn build_deps_args<'a, 'cfg>(cmd: &mut ProcessBuilder,
// If there is not one linkable target but should, rustc fails later
// on if there is an `extern crate` for it. This may turn into a hard
// error in the future, see PR #4797
if !dep_targets.iter().any(|u| !u.profile.doc && u.target.linkable()) {
if let Some(u) = dep_targets.iter()
.find(|u| !u.profile.doc && u.target.is_lib()) {
cx.config.shell().warn(format!("The package `{}` \
provides no linkable target. The compiler might raise an error while compiling \
`{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \
Cargo.toml. This warning might turn into a hard error in the future.",
u.target.crate_name(),
unit.target.crate_name(),
u.target.crate_name()))?;
}
if !dep_targets
.iter()
.any(|u| !u.profile.doc && u.target.linkable())
{
if let Some(u) = dep_targets
.iter()
.find(|u| !u.profile.doc && u.target.is_lib())
{
cx.config.shell().warn(format!(
"The package `{}` \
provides no linkable target. The compiler might raise an error while compiling \
`{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \
Cargo.toml. This warning might turn into a hard error in the future.",
u.target.crate_name(),
unit.target.crate_name(),
u.target.crate_name()
))?;
}
}
for dep in dep_targets {
@ -954,13 +1058,15 @@ Cargo.toml. This warning might turn into a hard error in the future.",
return Ok(());
fn link_to<'a, 'cfg>(cmd: &mut ProcessBuilder,
cx: &mut Context<'a, 'cfg>,
current: &Unit<'a>,
dep: &Unit<'a>) -> CargoResult<()> {
fn link_to<'a, 'cfg>(
cmd: &mut ProcessBuilder,
cx: &mut Context<'a, 'cfg>,
current: &Unit<'a>,
dep: &Unit<'a>,
) -> CargoResult<()> {
for &(ref dst, _, file_type) in cx.target_filenames(dep)?.iter() {
if file_type != TargetFileType::Linkable {
continue
continue;
}
let mut v = OsString::new();
@ -972,7 +1078,9 @@ Cargo.toml. This warning might turn into a hard error in the future.",
//
// This I believe mostly works out for now, but we'll likely want
// to tighten up this in the future.
let name = current.pkg.dependencies()
let name = current
.pkg
.dependencies()
.iter()
.filter(|d| d.matches_ignoring_source(dep.pkg.summary()))
.filter_map(|d| d.rename())
@ -991,9 +1099,9 @@ Cargo.toml. This warning might turn into a hard error in the future.",
fn envify(s: &str) -> String {
s.chars()
.flat_map(|c| c.to_uppercase())
.map(|c| if c == '-' {'_'} else {c})
.collect()
.flat_map(|c| c.to_uppercase())
.map(|c| if c == '-' { '_' } else { c })
.collect()
}
impl Kind {

View File

@ -1,10 +1,10 @@
use std::collections::{HashSet, BTreeSet};
use std::io::{Write, BufWriter};
use std::collections::{BTreeSet, HashSet};
use std::io::{BufWriter, Write};
use std::fs::File;
use std::path::{Path, PathBuf};
use ops::{Context, Unit};
use util::{CargoResult, internal};
use util::{internal, CargoResult};
use util::paths;
use ops::cargo_rustc::fingerprint;
@ -15,9 +15,12 @@ fn render_filename<P: AsRef<Path>>(path: P, basedir: Option<&str>) -> CargoResul
Some(base) => match path.strip_prefix(base) {
Ok(relpath) => relpath,
_ => path,
}
},
};
relpath.to_str().ok_or_else(|| internal("path not utf-8")).map(|f| f.replace(" ", "\\ "))
relpath
.to_str()
.ok_or_else(|| internal("path not utf-8"))
.map(|f| f.replace(" ", "\\ "))
}
fn add_deps_for_unit<'a, 'b>(
@ -25,9 +28,7 @@ fn add_deps_for_unit<'a, 'b>(
context: &mut Context<'a, 'b>,
unit: &Unit<'a>,
visited: &mut HashSet<Unit<'a>>,
)
-> CargoResult<()>
{
) -> CargoResult<()> {
if !visited.insert(*unit) {
return Ok(());
}
@ -42,8 +43,11 @@ fn add_deps_for_unit<'a, 'b>(
deps.insert(path);
}
} else {
debug!("can't find dep_info for {:?} {:?}",
unit.pkg.package_id(), unit.profile);
debug!(
"can't find dep_info for {:?} {:?}",
unit.pkg.package_id(),
unit.profile
);
return Err(internal("dep_info missing"));
}
}
@ -73,8 +77,12 @@ pub fn output_depinfo<'a, 'b>(context: &mut Context<'a, 'b>, unit: &Unit<'a>) ->
let basedir_string;
let basedir = match context.config.get_path("build.dep-info-basedir")? {
Some(value) => {
basedir_string = value.val.as_os_str().to_str().
ok_or_else(|| internal("build.dep-info-basedir path not utf-8"))?.to_string();
basedir_string = value
.val
.as_os_str()
.to_str()
.ok_or_else(|| internal("build.dep-info-basedir path not utf-8"))?
.to_string();
Some(basedir_string.as_str())
}
None => None,
@ -92,10 +100,8 @@ pub fn output_depinfo<'a, 'b>(context: &mut Context<'a, 'b>, unit: &Unit<'a>) ->
// If nothing changed don't recreate the file which could alter
// its mtime
if let Ok(previous) = fingerprint::parse_rustc_dep_info(&output_path) {
if previous.len() == 1 &&
previous[0].0 == target_fn &&
previous[0].1 == deps {
continue
if previous.len() == 1 && previous[0].0 == target_fn && previous[0].1 == deps {
continue;
}
}

View File

@ -1,7 +1,7 @@
use std::ffi::{OsString, OsStr};
use std::ffi::{OsStr, OsString};
use ops::{self, Compilation};
use util::{self, CargoTestError, Test, ProcessError};
use util::{self, CargoTestError, ProcessError, Test};
use util::errors::CargoResult;
use core::Workspace;
@ -12,13 +12,15 @@ pub struct TestOptions<'a> {
pub only_doc: bool,
}
pub fn run_tests(ws: &Workspace,
options: &TestOptions,
test_args: &[String]) -> CargoResult<Option<CargoTestError>> {
pub fn run_tests(
ws: &Workspace,
options: &TestOptions,
test_args: &[String],
) -> CargoResult<Option<CargoTestError>> {
let compilation = compile_tests(ws, options)?;
if options.no_run {
return Ok(None)
return Ok(None);
}
let (test, mut errors) = if options.only_doc {
assert!(options.compile_opts.filter.is_specific());
@ -29,7 +31,7 @@ pub fn run_tests(ws: &Workspace,
// If we have an error and want to fail fast, return
if !errors.is_empty() && !options.no_fail_fast {
return Ok(Some(CargoTestError::new(test, errors)))
return Ok(Some(CargoTestError::new(test, errors)));
}
// If a specific test was requested or we're not running any tests at all,
@ -37,7 +39,7 @@ pub fn run_tests(ws: &Workspace,
if options.compile_opts.filter.is_specific() {
match errors.len() {
0 => return Ok(None),
_ => return Ok(Some(CargoTestError::new(test, errors)))
_ => return Ok(Some(CargoTestError::new(test, errors))),
}
}
@ -51,15 +53,17 @@ pub fn run_tests(ws: &Workspace,
}
}
pub fn run_benches(ws: &Workspace,
options: &TestOptions,
args: &[String]) -> CargoResult<Option<CargoTestError>> {
pub fn run_benches(
ws: &Workspace,
options: &TestOptions,
args: &[String],
) -> CargoResult<Option<CargoTestError>> {
let mut args = args.to_vec();
args.push("--bench".to_string());
let compilation = compile_tests(ws, options)?;
if options.no_run {
return Ok(None)
return Ok(None);
}
let (test, errors) = run_unit_tests(options, &args, &compilation)?;
match errors.len() {
@ -68,21 +72,23 @@ pub fn run_benches(ws: &Workspace,
}
}
fn compile_tests<'a>(ws: &Workspace<'a>,
options: &TestOptions<'a>)
-> CargoResult<Compilation<'a>> {
fn compile_tests<'a>(
ws: &Workspace<'a>,
options: &TestOptions<'a>,
) -> CargoResult<Compilation<'a>> {
let mut compilation = ops::compile(ws, &options.compile_opts)?;
compilation.tests.sort_by(|a, b| {
(a.0.package_id(), &a.1, &a.2).cmp(&(b.0.package_id(), &b.1, &b.2))
});
compilation
.tests
.sort_by(|a, b| (a.0.package_id(), &a.1, &a.2).cmp(&(b.0.package_id(), &b.1, &b.2)));
Ok(compilation)
}
/// Run the unit and integration tests of a project.
fn run_unit_tests(options: &TestOptions,
test_args: &[String],
compilation: &Compilation)
-> CargoResult<(Test, Vec<ProcessError>)> {
fn run_unit_tests(
options: &TestOptions,
test_args: &[String],
compilation: &Compilation,
) -> CargoResult<(Test, Vec<ProcessError>)> {
let config = options.compile_opts.config;
let cwd = options.compile_opts.config.cwd();
@ -95,12 +101,12 @@ fn run_unit_tests(options: &TestOptions,
};
let mut cmd = compilation.target_process(exe, pkg)?;
cmd.args(test_args);
config.shell().concise(|shell| {
shell.status("Running", to_display.display().to_string())
})?;
config.shell().verbose(|shell| {
shell.status("Running", cmd.to_string())
})?;
config
.shell()
.concise(|shell| shell.status("Running", to_display.display().to_string()))?;
config
.shell()
.verbose(|shell| shell.status("Running", cmd.to_string()))?;
let result = cmd.exec();
@ -118,16 +124,27 @@ fn run_unit_tests(options: &TestOptions,
if errors.len() == 1 {
let (kind, name, pkg_name, e) = errors.pop().unwrap();
Ok((Test::UnitTest{kind, name, pkg_name}, vec![e]))
Ok((
Test::UnitTest {
kind,
name,
pkg_name,
},
vec![e],
))
} else {
Ok((Test::Multiple, errors.into_iter().map(|(_, _, _, e)| e).collect()))
Ok((
Test::Multiple,
errors.into_iter().map(|(_, _, _, e)| e).collect(),
))
}
}
fn run_doc_tests(options: &TestOptions,
test_args: &[String],
compilation: &Compilation)
-> CargoResult<(Test, Vec<ProcessError>)> {
fn run_doc_tests(
options: &TestOptions,
test_args: &[String],
compilation: &Compilation,
) -> CargoResult<(Test, Vec<ProcessError>)> {
let mut errors = Vec::new();
let config = options.compile_opts.config;
@ -137,16 +154,24 @@ fn run_doc_tests(options: &TestOptions,
}
let libs = compilation.to_doc_test.iter().map(|package| {
(package, package.targets().iter().filter(|t| t.doctested())
.map(|t| (t.src_path(), t.name(), t.crate_name())))
(
package,
package
.targets()
.iter()
.filter(|t| t.doctested())
.map(|t| (t.src_path(), t.name(), t.crate_name())),
)
});
for (package, tests) in libs {
for (lib, name, crate_name) in tests {
config.shell().status("Doc-tests", name)?;
let mut p = compilation.rustdoc_process(package)?;
p.arg("--test").arg(lib)
.arg("--crate-name").arg(&crate_name);
p.arg("--test")
.arg(lib)
.arg("--crate-name")
.arg(&crate_name);
for &rust_dep in &[&compilation.deps_output] {
let mut arg = OsString::from("dependency=");
@ -186,9 +211,8 @@ fn run_doc_tests(options: &TestOptions,
// dynamically as well, causing problems. As a result we only
// pass `--extern` for rlib deps and skip out on all other
// artifacts.
if lib.extension() != Some(OsStr::new("rlib")) &&
!target.for_host() {
continue
if lib.extension() != Some(OsStr::new("rlib")) && !target.for_host() {
continue;
}
let mut arg = OsString::from(target.crate_name());
arg.push("=");
@ -200,9 +224,9 @@ fn run_doc_tests(options: &TestOptions,
p.args(flags);
}
config.shell().verbose(|shell| {
shell.status("Running", p.to_string())
})?;
config
.shell()
.verbose(|shell| shell.status("Running", p.to_string()))?;
if let Err(e) = p.exec() {
let e = e.downcast::<ProcessError>()?;
errors.push(e);

View File

@ -2,7 +2,7 @@ use std::io::prelude::*;
use toml;
use core::{Resolve, resolver, Workspace};
use core::{resolver, Resolve, Workspace};
use core::resolver::WorkspaceResolve;
use util::Filesystem;
use util::errors::{CargoResult, CargoResultExt};
@ -10,24 +10,23 @@ use util::toml as cargo_toml;
pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
if !ws.root().join("Cargo.lock").exists() {
return Ok(None)
return Ok(None);
}
let root = Filesystem::new(ws.root().to_path_buf());
let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?;
let mut s = String::new();
f.read_to_string(&mut s).chain_err(|| {
format!("failed to read file: {}", f.path().display())
})?;
f.read_to_string(&mut s)
.chain_err(|| format!("failed to read file: {}", f.path().display()))?;
let resolve = (|| -> CargoResult<Option<Resolve>> {
let resolve : toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?;
let v: resolver::EncodableResolve = resolve.try_into()?;
Ok(Some(v.into_resolve(ws)?))
})().chain_err(|| {
format!("failed to parse lock file at: {}", f.path().display())
})?;
let resolve =
(|| -> CargoResult<Option<Resolve>> {
let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?;
let v: resolver::EncodableResolve = resolve.try_into()?;
Ok(Some(v.into_resolve(ws)?))
})()
.chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?;
Ok(resolve)
}
@ -71,7 +70,7 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()>
// helpful on read-only filesystems.
if let Ok(orig) = orig {
if are_equal_lockfiles(orig, &out, ws) {
return Ok(())
return Ok(());
}
}
@ -80,20 +79,27 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()>
bail!("can't update in the offline mode");
}
let flag = if ws.config().network_allowed() {"--locked"} else {"--frozen"};
bail!("the lock file needs to be updated but {} was passed to \
prevent this", flag);
let flag = if ws.config().network_allowed() {
"--locked"
} else {
"--frozen"
};
bail!(
"the lock file needs to be updated but {} was passed to \
prevent this",
flag
);
}
// Ok, if that didn't work just write it out
ws_root.open_rw("Cargo.lock", ws.config(), "Cargo.lock file").and_then(|mut f| {
f.file().set_len(0)?;
f.write_all(out.as_bytes())?;
Ok(())
}).chain_err(|| {
format!("failed to write {}",
ws.root().join("Cargo.lock").display())
})?;
ws_root
.open_rw("Cargo.lock", ws.config(), "Cargo.lock file")
.and_then(|mut f| {
f.file().set_len(0)?;
f.write_all(out.as_bytes())?;
Ok(())
})
.chain_err(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?;
Ok(())
}

View File

@ -3,27 +3,28 @@ pub use self::cargo_compile::{compile, compile_with_exec, compile_ws, CompileOpt
pub use self::cargo_compile::{CompileFilter, CompileMode, FilterRule, MessageFormat, Packages};
pub use self::cargo_read_manifest::{read_package, read_packages};
pub use self::cargo_rustc::{compile_targets, Compilation, Kind, Unit};
pub use self::cargo_rustc::{Context, is_bad_artifact_name};
pub use self::cargo_rustc::{BuildOutput, BuildConfig, TargetConfig};
pub use self::cargo_rustc::{Executor, DefaultExecutor};
pub use self::cargo_rustc::{is_bad_artifact_name, Context};
pub use self::cargo_rustc::{BuildConfig, BuildOutput, TargetConfig};
pub use self::cargo_rustc::{DefaultExecutor, Executor};
pub use self::cargo_run::run;
pub use self::cargo_install::{install, install_list, uninstall};
pub use self::cargo_new::{new, init, NewOptions, VersionControl};
pub use self::cargo_new::{init, new, NewOptions, VersionControl};
pub use self::cargo_doc::{doc, DocOptions};
pub use self::cargo_generate_lockfile::{generate_lockfile};
pub use self::cargo_generate_lockfile::{update_lockfile};
pub use self::cargo_generate_lockfile::generate_lockfile;
pub use self::cargo_generate_lockfile::update_lockfile;
pub use self::cargo_generate_lockfile::UpdateOptions;
pub use self::lockfile::{load_pkg_lockfile, write_pkg_lockfile};
pub use self::cargo_test::{run_tests, run_benches, TestOptions};
pub use self::cargo_test::{run_benches, run_tests, TestOptions};
pub use self::cargo_package::{package, PackageOpts};
pub use self::registry::{publish, registry_configuration, RegistryConfig};
pub use self::registry::{registry_login, search, needs_custom_http_transport, http_handle};
pub use self::registry::{http_handle, needs_custom_http_transport, registry_login, search};
pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts};
pub use self::registry::configure_http_handle;
pub use self::cargo_fetch::fetch;
pub use self::cargo_pkgid::pkgid;
pub use self::resolve::{resolve_ws, resolve_ws_precisely, resolve_ws_with_method, resolve_with_previous};
pub use self::cargo_output_metadata::{output_metadata, OutputMetadataOptions, ExportInfo};
pub use self::resolve::{resolve_with_previous, resolve_ws, resolve_ws_precisely,
resolve_ws_with_method};
pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions};
mod cargo_clean;
mod cargo_compile;

View File

@ -5,7 +5,7 @@ use std::time::Duration;
use curl::easy::{Easy, SslOpt};
use git2;
use registry::{Registry, NewCrate, NewCrateDependency};
use registry::{NewCrate, NewCrateDependency, Registry};
use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
@ -15,7 +15,7 @@ use core::{Package, SourceId, Workspace};
use core::dependency::Kind;
use core::manifest::ManifestMetadata;
use ops;
use sources::{RegistrySource};
use sources::RegistrySource;
use util::config::{self, Config};
use util::paths;
use util::ToUrl;
@ -53,8 +53,11 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
Some(ref registry) => allowed_registries.contains(registry),
None => false,
} {
bail!("some crates cannot be published.\n\
`{}` is marked as unpublishable", pkg.name());
bail!(
"some crates cannot be published.\n\
`{}` is marked as unpublishable",
pkg.name()
);
}
}
@ -62,40 +65,56 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
bail!("published crates cannot contain [patch] sections");
}
let (mut registry, reg_id) = registry(opts.config,
opts.token.clone(),
opts.index.clone(),
opts.registry.clone())?;
let (mut registry, reg_id) = registry(
opts.config,
opts.token.clone(),
opts.index.clone(),
opts.registry.clone(),
)?;
verify_dependencies(pkg, &reg_id)?;
// Prepare a tarball, with a non-surpressable warning if metadata
// is missing since this is being put online.
let tarball = ops::package(ws, &ops::PackageOpts {
config: opts.config,
verify: opts.verify,
list: false,
check_metadata: true,
allow_dirty: opts.allow_dirty,
target: opts.target.clone(),
jobs: opts.jobs,
registry: opts.registry.clone(),
})?.unwrap();
let tarball = ops::package(
ws,
&ops::PackageOpts {
config: opts.config,
verify: opts.verify,
list: false,
check_metadata: true,
allow_dirty: opts.allow_dirty,
target: opts.target.clone(),
jobs: opts.jobs,
registry: opts.registry.clone(),
},
)?.unwrap();
// Upload said tarball to the specified destination
opts.config.shell().status("Uploading", pkg.package_id().to_string())?;
transmit(opts.config, pkg, tarball.file(), &mut registry, &reg_id, opts.dry_run)?;
opts.config
.shell()
.status("Uploading", pkg.package_id().to_string())?;
transmit(
opts.config,
pkg,
tarball.file(),
&mut registry,
&reg_id,
opts.dry_run,
)?;
Ok(())
}
fn verify_dependencies(pkg: &Package, registry_src: &SourceId)
-> CargoResult<()> {
fn verify_dependencies(pkg: &Package, registry_src: &SourceId) -> CargoResult<()> {
for dep in pkg.dependencies().iter() {
if dep.source_id().is_path() {
if !dep.specified_req() {
bail!("all path dependencies must have a version specified \
when publishing.\ndependency `{}` does not specify \
a version", dep.name())
bail!(
"all path dependencies must have a version specified \
when publishing.\ndependency `{}` does not specify \
a version",
dep.name()
)
}
} else if dep.source_id() != registry_src {
if dep.source_id().is_registry() {
@ -108,58 +127,75 @@ fn verify_dependencies(pkg: &Package, registry_src: &SourceId)
(crate `{}` is pulled from {})", dep.name(), dep.name(), dep.source_id());
}
} else {
bail!("crates cannot be published to crates.io with dependencies sourced from \
a repository\neither publish `{}` as its own crate on crates.io and \
specify a crates.io version as a dependency or pull it into this \
repository and specify it with a path and version\n(crate `{}` has \
repository path `{}`)", dep.name(), dep.name(), dep.source_id());
bail!(
"crates cannot be published to crates.io with dependencies sourced from \
a repository\neither publish `{}` as its own crate on crates.io and \
specify a crates.io version as a dependency or pull it into this \
repository and specify it with a path and version\n(crate `{}` has \
repository path `{}`)",
dep.name(),
dep.name(),
dep.source_id()
);
}
}
}
Ok(())
}
fn transmit(config: &Config,
pkg: &Package,
tarball: &File,
registry: &mut Registry,
registry_id: &SourceId,
dry_run: bool) -> CargoResult<()> {
fn transmit(
config: &Config,
pkg: &Package,
tarball: &File,
registry: &mut Registry,
registry_id: &SourceId,
dry_run: bool,
) -> CargoResult<()> {
let deps = pkg.dependencies()
.iter()
.map(|dep| {
// If the dependency is from a different registry, then include the
// registry in the dependency.
let dep_registry_id = match dep.registry_id() {
Some(id) => id,
None => bail!("dependency missing registry ID"),
};
let dep_registry = if dep_registry_id != registry_id {
Some(dep_registry_id.url().to_string())
} else {
None
};
let deps = pkg.dependencies().iter().map(|dep| {
// If the dependency is from a different registry, then include the
// registry in the dependency.
let dep_registry_id = match dep.registry_id() {
Some(id) => id,
None => bail!("dependency missing registry ID"),
};
let dep_registry = if dep_registry_id != registry_id {
Some(dep_registry_id.url().to_string())
} else {
None
};
Ok(NewCrateDependency {
optional: dep.is_optional(),
default_features: dep.uses_default_features(),
name: dep.name().to_string(),
features: dep.features().to_vec(),
version_req: dep.version_req().to_string(),
target: dep.platform().map(|s| s.to_string()),
kind: match dep.kind() {
Kind::Normal => "normal",
Kind::Build => "build",
Kind::Development => "dev",
}.to_string(),
registry: dep_registry,
Ok(NewCrateDependency {
optional: dep.is_optional(),
default_features: dep.uses_default_features(),
name: dep.name().to_string(),
features: dep.features().to_vec(),
version_req: dep.version_req().to_string(),
target: dep.platform().map(|s| s.to_string()),
kind: match dep.kind() {
Kind::Normal => "normal",
Kind::Build => "build",
Kind::Development => "dev",
}.to_string(),
registry: dep_registry,
})
})
}).collect::<CargoResult<Vec<NewCrateDependency>>>()?;
.collect::<CargoResult<Vec<NewCrateDependency>>>()?;
let manifest = pkg.manifest();
let ManifestMetadata {
ref authors, ref description, ref homepage, ref documentation,
ref keywords, ref readme, ref repository, ref license, ref license_file,
ref categories, ref badges, ref links,
ref authors,
ref description,
ref homepage,
ref documentation,
ref keywords,
ref readme,
ref repository,
ref license,
ref license_file,
ref categories,
ref badges,
ref links,
} = *manifest.metadata();
let readme_content = match *readme {
Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?),
@ -177,79 +213,91 @@ fn transmit(config: &Config,
return Ok(());
}
let publish = registry.publish(&NewCrate {
name: pkg.name().to_string(),
vers: pkg.version().to_string(),
deps,
features: pkg.summary().features().clone(),
authors: authors.clone(),
description: description.clone(),
homepage: homepage.clone(),
documentation: documentation.clone(),
keywords: keywords.clone(),
categories: categories.clone(),
readme: readme_content,
readme_file: readme.clone(),
repository: repository.clone(),
license: license.clone(),
license_file: license_file.clone(),
badges: badges.clone(),
links: links.clone(),
}, tarball);
let publish = registry.publish(
&NewCrate {
name: pkg.name().to_string(),
vers: pkg.version().to_string(),
deps,
features: pkg.summary().features().clone(),
authors: authors.clone(),
description: description.clone(),
homepage: homepage.clone(),
documentation: documentation.clone(),
keywords: keywords.clone(),
categories: categories.clone(),
readme: readme_content,
readme_file: readme.clone(),
repository: repository.clone(),
license: license.clone(),
license_file: license_file.clone(),
badges: badges.clone(),
links: links.clone(),
},
tarball,
);
match publish {
Ok(warnings) => {
if !warnings.invalid_categories.is_empty() {
let msg = format!("\
the following are not valid category slugs and were \
ignored: {}. Please see https://crates.io/category_slugs \
for the list of all category slugs. \
", warnings.invalid_categories.join(", "));
let msg = format!(
"\
the following are not valid category slugs and were \
ignored: {}. Please see https://crates.io/category_slugs \
for the list of all category slugs. \
",
warnings.invalid_categories.join(", ")
);
config.shell().warn(&msg)?;
}
if !warnings.invalid_badges.is_empty() {
let msg = format!("\
the following are not valid badges and were ignored: {}. \
Either the badge type specified is unknown or a required \
attribute is missing. Please see \
http://doc.crates.io/manifest.html#package-metadata \
for valid badge types and their required attributes.",
warnings.invalid_badges.join(", "));
let msg = format!(
"\
the following are not valid badges and were ignored: {}. \
Either the badge type specified is unknown or a required \
attribute is missing. Please see \
http://doc.crates.io/manifest.html#package-metadata \
for valid badge types and their required attributes.",
warnings.invalid_badges.join(", ")
);
config.shell().warn(&msg)?;
}
Ok(())
},
}
Err(e) => Err(e),
}
}
pub fn registry_configuration(config: &Config,
registry: Option<String>) -> CargoResult<RegistryConfig> {
pub fn registry_configuration(
config: &Config,
registry: Option<String>,
) -> CargoResult<RegistryConfig> {
let (index, token) = match registry {
Some(registry) => {
(Some(config.get_registry_index(&registry)?.to_string()),
config.get_string(&format!("registries.{}.token", registry))?.map(|p| p.val))
}
Some(registry) => (
Some(config.get_registry_index(&registry)?.to_string()),
config
.get_string(&format!("registries.{}.token", registry))?
.map(|p| p.val),
),
None => {
// Checking out for default index and token
(config.get_string("registry.index")?.map(|p| p.val),
config.get_string("registry.token")?.map(|p| p.val))
(
config.get_string("registry.index")?.map(|p| p.val),
config.get_string("registry.token")?.map(|p| p.val),
)
}
};
Ok(RegistryConfig {
index,
token
})
Ok(RegistryConfig { index, token })
}
pub fn registry(config: &Config,
token: Option<String>,
index: Option<String>,
registry: Option<String>) -> CargoResult<(Registry, SourceId)> {
pub fn registry(
config: &Config,
token: Option<String>,
index: Option<String>,
registry: Option<String>,
) -> CargoResult<(Registry, SourceId)> {
// Parse all configuration options
let RegistryConfig {
token: token_config,
@ -263,9 +311,8 @@ pub fn registry(config: &Config,
};
let api_host = {
let mut src = RegistrySource::remote(&sid, config);
src.update().chain_err(|| {
format!("failed to update {}", sid)
})?;
src.update()
.chain_err(|| format!("failed to update {}", sid))?;
(src.config()?).unwrap().api.unwrap()
};
let handle = http_handle(config)?;
@ -275,8 +322,10 @@ pub fn registry(config: &Config,
/// Create a new HTTP handle with appropriate global configuration for cargo.
pub fn http_handle(config: &Config) -> CargoResult<Easy> {
if config.frozen() {
bail!("attempting to make an HTTP request, but --frozen was \
specified")
bail!(
"attempting to make an HTTP request, but --frozen was \
specified"
)
}
if !config.network_allowed() {
bail!("can't make HTTP request in the offline mode")
@ -332,11 +381,11 @@ pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<
/// via environment variables are picked up by libcurl.
fn http_proxy(config: &Config) -> CargoResult<Option<String>> {
if let Some(s) = config.get_string("http.proxy")? {
return Ok(Some(s.val))
return Ok(Some(s.val));
}
if let Ok(cfg) = git2::Config::open_default() {
if let Ok(s) = cfg.get_str("http.proxy") {
return Ok(Some(s.to_string()))
return Ok(Some(s.to_string()));
}
}
Ok(None)
@ -356,24 +405,22 @@ fn http_proxy_exists(config: &Config) -> CargoResult<bool> {
if http_proxy(config)?.is_some() {
Ok(true)
} else {
Ok(["http_proxy", "HTTP_PROXY",
"https_proxy", "HTTPS_PROXY"].iter().any(|v| env::var(v).is_ok()))
Ok(["http_proxy", "HTTP_PROXY", "https_proxy", "HTTPS_PROXY"]
.iter()
.any(|v| env::var(v).is_ok()))
}
}
fn http_timeout(config: &Config) -> CargoResult<Option<i64>> {
if let Some(s) = config.get_i64("http.timeout")? {
return Ok(Some(s.val))
return Ok(Some(s.val));
}
Ok(env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok()))
}
pub fn registry_login(config: &Config,
token: String,
registry: Option<String>) -> CargoResult<()> {
pub fn registry_login(config: &Config, token: String, registry: Option<String>) -> CargoResult<()> {
let RegistryConfig {
token: old_token,
..
token: old_token, ..
} = registry_configuration(config, registry.clone())?;
if let Some(old_token) = old_token {
@ -405,39 +452,41 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
}
};
let (mut registry, _) = registry(config,
opts.token.clone(),
opts.index.clone(),
opts.registry.clone())?;
let (mut registry, _) = registry(
config,
opts.token.clone(),
opts.index.clone(),
opts.registry.clone(),
)?;
if let Some(ref v) = opts.to_add {
let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
let msg = registry.add_owners(&name, &v).map_err(|e| {
format_err!("failed to invite owners to crate {}: {}", name, e)
})?;
let msg = registry
.add_owners(&name, &v)
.map_err(|e| format_err!("failed to invite owners to crate {}: {}", name, e))?;
config.shell().status("Owner", msg)?;
}
if let Some(ref v) = opts.to_remove {
let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
config.shell().status("Owner", format!("removing {:?} from crate {}",
v, name))?;
registry.remove_owners(&name, &v).chain_err(|| {
format!("failed to remove owners from crate {}", name)
})?;
config
.shell()
.status("Owner", format!("removing {:?} from crate {}", v, name))?;
registry
.remove_owners(&name, &v)
.chain_err(|| format!("failed to remove owners from crate {}", name))?;
}
if opts.list {
let owners = registry.list_owners(&name).chain_err(|| {
format!("failed to list owners of crate {}", name)
})?;
let owners = registry
.list_owners(&name)
.chain_err(|| format!("failed to list owners of crate {}", name))?;
for owner in owners.iter() {
print!("{}", owner.login);
match (owner.name.as_ref(), owner.email.as_ref()) {
(Some(name), Some(email)) => println!(" ({} <{}>)", name, email),
(Some(s), None) |
(None, Some(s)) => println!(" ({})", s),
(Some(s), None) | (None, Some(s)) => println!(" ({})", s),
(None, None) => println!(),
}
}
@ -446,13 +495,15 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
Ok(())
}
pub fn yank(config: &Config,
krate: Option<String>,
version: Option<String>,
token: Option<String>,
index: Option<String>,
undo: bool,
reg: Option<String>) -> CargoResult<()> {
pub fn yank(
config: &Config,
krate: Option<String>,
version: Option<String>,
token: Option<String>,
index: Option<String>,
undo: bool,
reg: Option<String>,
) -> CargoResult<()> {
let name = match krate {
Some(name) => name,
None => {
@ -463,31 +514,37 @@ pub fn yank(config: &Config,
};
let version = match version {
Some(v) => v,
None => bail!("a version must be specified to yank")
None => bail!("a version must be specified to yank"),
};
let (mut registry, _) = registry(config, token, index, reg)?;
if undo {
config.shell().status("Unyank", format!("{}:{}", name, version))?;
registry.unyank(&name, &version).chain_err(|| {
"failed to undo a yank"
})?;
config
.shell()
.status("Unyank", format!("{}:{}", name, version))?;
registry
.unyank(&name, &version)
.chain_err(|| "failed to undo a yank")?;
} else {
config.shell().status("Yank", format!("{}:{}", name, version))?;
registry.yank(&name, &version).chain_err(|| {
"failed to yank"
})?;
config
.shell()
.status("Yank", format!("{}:{}", name, version))?;
registry
.yank(&name, &version)
.chain_err(|| "failed to yank")?;
}
Ok(())
}
pub fn search(query: &str,
config: &Config,
index: Option<String>,
limit: u32,
reg: Option<String>) -> CargoResult<()> {
pub fn search(
query: &str,
config: &Config,
index: Option<String>,
limit: u32,
reg: Option<String>,
) -> CargoResult<()> {
fn truncate_with_ellipsis(s: &str, max_width: usize) -> String {
// We should truncate at grapheme-boundary and compute character-widths,
// yet the dependencies on unicode-segmentation and unicode-width are
@ -501,46 +558,51 @@ pub fn search(query: &str,
}
let (mut registry, _) = registry(config, None, index, reg)?;
let (crates, total_crates) = registry.search(query, limit).chain_err(|| {
"failed to retrieve search results from the registry"
})?;
let (crates, total_crates) = registry
.search(query, limit)
.chain_err(|| "failed to retrieve search results from the registry")?;
let names = crates.iter()
let names = crates
.iter()
.map(|krate| format!("{} = \"{}\"", krate.name, krate.max_version))
.collect::<Vec<String>>();
let description_margin = names.iter()
.map(|s| s.len() + 4)
.max()
.unwrap_or_default();
let description_margin = names.iter().map(|s| s.len() + 4).max().unwrap_or_default();
let description_length = cmp::max(80, 128 - description_margin);
let descriptions = crates.iter()
.map(|krate|
krate.description.as_ref().map(|desc|
truncate_with_ellipsis(&desc.replace("\n", " "), description_length)));
let descriptions = crates.iter().map(|krate| {
krate
.description
.as_ref()
.map(|desc| truncate_with_ellipsis(&desc.replace("\n", " "), description_length))
});
for (name, description) in names.into_iter().zip(descriptions) {
let line = match description {
Some(desc) => {
let space = repeat(' ').take(description_margin - name.len())
.collect::<String>();
let space = repeat(' ')
.take(description_margin - name.len())
.collect::<String>();
name + &space + "# " + &desc
}
None => name
None => name,
};
println!("{}", line);
}
let search_max_limit = 100;
if total_crates > u32::from(limit) && limit < search_max_limit {
println!("... and {} crates more (use --limit N to see more)",
total_crates - u32::from(limit));
println!(
"... and {} crates more (use --limit N to see more)",
total_crates - u32::from(limit)
);
} else if total_crates > u32::from(limit) && limit >= search_max_limit {
println!("... and {} crates more (go to http://crates.io/search?q={} to see more)",
total_crates - u32::from(limit),
percent_encode(query.as_bytes(), QUERY_ENCODE_SET));
println!(
"... and {} crates more (go to http://crates.io/search?q={} to see more)",
total_crates - u32::from(limit),
percent_encode(query.as_bytes(), QUERY_ENCODE_SET)
);
}
Ok(())

View File

@ -2,7 +2,7 @@ use std::collections::HashSet;
use core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace};
use core::registry::PackageRegistry;
use core::resolver::{self, Resolve, Method};
use core::resolver::{self, Method, Resolve};
use sources::PathSource;
use ops;
use util::profile;
@ -22,13 +22,14 @@ pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolv
/// Resolves dependencies for some packages of the workspace,
/// taking into account `paths` overrides and activated features.
pub fn resolve_ws_precisely<'a>(ws: &Workspace<'a>,
source: Option<Box<Source + 'a>>,
features: &[String],
all_features: bool,
no_default_features: bool,
specs: &[PackageIdSpec])
-> CargoResult<(PackageSet<'a>, Resolve)> {
pub fn resolve_ws_precisely<'a>(
ws: &Workspace<'a>,
source: Option<Box<Source + 'a>>,
features: &[String],
all_features: bool,
no_default_features: bool,
specs: &[PackageIdSpec],
) -> CargoResult<(PackageSet<'a>, Resolve)> {
let features = Method::split_features(features);
let method = if all_features {
Method::Everything
@ -43,11 +44,12 @@ pub fn resolve_ws_precisely<'a>(ws: &Workspace<'a>,
resolve_ws_with_method(ws, source, method, specs)
}
pub fn resolve_ws_with_method<'a>(ws: &Workspace<'a>,
source: Option<Box<Source + 'a>>,
method: Method,
specs: &[PackageIdSpec])
-> CargoResult<(PackageSet<'a>, Resolve)> {
pub fn resolve_ws_with_method<'a>(
ws: &Workspace<'a>,
source: Option<Box<Source + 'a>>,
method: Method,
specs: &[PackageIdSpec],
) -> CargoResult<(PackageSet<'a>, Resolve)> {
let mut registry = PackageRegistry::new(ws.config())?;
if let Some(source) = source {
registry.add_preloaded(source);
@ -68,10 +70,13 @@ pub fn resolve_ws_with_method<'a>(ws: &Workspace<'a>,
add_overrides(&mut registry, ws)?;
for &(ref replace_spec, ref dep) in ws.root_replace() {
if !resolve.iter().any(|r| replace_spec.matches(r) && !dep.matches_id(r)) {
ws.config().shell().warn(
format!("package replacement is not used: {}", replace_spec)
)?
if !resolve
.iter()
.any(|r| replace_spec.matches(r) && !dep.matches_id(r))
{
ws.config()
.shell()
.warn(format!("package replacement is not used: {}", replace_spec))?
}
}
@ -80,32 +85,38 @@ pub fn resolve_ws_with_method<'a>(ws: &Workspace<'a>,
ops::load_pkg_lockfile(ws)?
};
let resolved_with_overrides =
ops::resolve_with_previous(&mut registry,
ws,
method,
resolve.as_ref(),
None,
specs,
add_patches,
true)?;
let resolved_with_overrides = ops::resolve_with_previous(
&mut registry,
ws,
method,
resolve.as_ref(),
None,
specs,
add_patches,
true,
)?;
let packages = get_resolved_packages(&resolved_with_overrides, registry);
Ok((packages, resolved_with_overrides))
}
fn resolve_with_registry(ws: &Workspace, registry: &mut PackageRegistry, warn: bool)
-> CargoResult<Resolve> {
fn resolve_with_registry(
ws: &Workspace,
registry: &mut PackageRegistry,
warn: bool,
) -> CargoResult<Resolve> {
let prev = ops::load_pkg_lockfile(ws)?;
let resolve = resolve_with_previous(registry,
ws,
Method::Everything,
prev.as_ref(),
None,
&[],
true,
warn)?;
let resolve = resolve_with_previous(
registry,
ws,
Method::Everything,
prev.as_ref(),
None,
&[],
true,
warn,
)?;
if !ws.is_ephemeral() {
ops::write_pkg_lockfile(ws, &resolve)?;
@ -113,7 +124,6 @@ fn resolve_with_registry(ws: &Workspace, registry: &mut PackageRegistry, warn: b
Ok(resolve)
}
/// Resolve all dependencies for a package using an optional previous instance
/// of resolve to guide the resolution process.
///
@ -123,15 +133,16 @@ fn resolve_with_registry(ws: &Workspace, registry: &mut PackageRegistry, warn: b
///
/// The previous resolve normally comes from a lockfile. This function does not
/// read or write lockfiles from the filesystem.
pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
ws: &Workspace,
method: Method,
previous: Option<&'a Resolve>,
to_avoid: Option<&HashSet<&'a PackageId>>,
specs: &[PackageIdSpec],
register_patches: bool,
warn: bool)
-> CargoResult<Resolve> {
pub fn resolve_with_previous<'a>(
registry: &mut PackageRegistry,
ws: &Workspace,
method: Method,
previous: Option<&'a Resolve>,
to_avoid: Option<&HashSet<&'a PackageId>>,
specs: &[PackageIdSpec],
register_patches: bool,
warn: bool,
) -> CargoResult<Resolve> {
// Here we place an artificial limitation that all non-registry sources
// cannot be locked at more than one revision. This means that if a git
// repository provides more than one package, they must all be updated in
@ -141,9 +152,12 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
// different
let mut to_avoid_sources = HashSet::new();
if let Some(to_avoid) = to_avoid {
to_avoid_sources.extend(to_avoid.iter()
.map(|p| p.source_id())
.filter(|s| !s.is_registry()));
to_avoid_sources.extend(
to_avoid
.iter()
.map(|p| p.source_id())
.filter(|s| !s.is_registry()),
);
}
let ref keep = |p: &&'a PackageId| {
@ -177,9 +191,7 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
if let Some(r) = previous {
trace!("previous: {:?}", r);
for node in r.iter().filter(keep) {
let deps = r.deps_not_replaced(node)
.filter(keep)
.cloned().collect();
let deps = r.deps_not_replaced(node).filter(keep).cloned().collect();
registry.register_lock(node.clone(), deps);
}
}
@ -190,21 +202,24 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
Some(r) => r,
None => {
registry.patch(url, patches)?;
continue
continue;
}
};
let patches = patches.iter().map(|dep| {
let unused = previous.unused_patches();
let candidates = previous.iter().chain(unused);
match candidates.filter(keep).find(|id| dep.matches_id(id)) {
Some(id) => {
let mut dep = dep.clone();
dep.lock_to(id);
dep
let patches = patches
.iter()
.map(|dep| {
let unused = previous.unused_patches();
let candidates = previous.iter().chain(unused);
match candidates.filter(keep).find(|id| dep.matches_id(id)) {
Some(id) => {
let mut dep = dep.clone();
dep.lock_to(id);
dep
}
None => dep.clone(),
}
None => dep.clone(),
}
}).collect::<Vec<_>>();
})
.collect::<Vec<_>>();
registry.patch(url, &patches)?;
}
@ -248,7 +263,7 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
if specs.iter().any(|spec| spec.matches(member_id)) {
base
} else {
continue
continue;
}
}
}
@ -262,26 +277,23 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
let root_replace = ws.root_replace();
let replace = match previous {
Some(r) => {
root_replace.iter().map(|&(ref spec, ref dep)| {
Some(r) => root_replace
.iter()
.map(|&(ref spec, ref dep)| {
for (key, val) in r.replacements().iter() {
if spec.matches(key) && dep.matches_id(val) && keep(&val) {
let mut dep = dep.clone();
dep.lock_to(val);
return (spec.clone(), dep)
return (spec.clone(), dep);
}
}
(spec.clone(), dep.clone())
}).collect::<Vec<_>>()
}
})
.collect::<Vec<_>>(),
None => root_replace.to_vec(),
};
let mut resolved = resolver::resolve(&summaries,
&replace,
registry,
Some(ws.config()),
warn)?;
let mut resolved = resolver::resolve(&summaries, &replace, registry, Some(ws.config()), warn)?;
resolved.register_used_patches(registry.patches());
if let Some(previous) = previous {
resolved.merge_from(previous)?;
@ -291,11 +303,10 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
/// Read the `paths` configuration variable to discover all path overrides that
/// have been configured.
fn add_overrides<'a>(registry: &mut PackageRegistry<'a>,
ws: &Workspace<'a>) -> CargoResult<()> {
fn add_overrides<'a>(registry: &mut PackageRegistry<'a>, ws: &Workspace<'a>) -> CargoResult<()> {
let paths = match ws.config().get_list("paths")? {
Some(list) => list,
None => return Ok(())
None => return Ok(()),
};
let paths = paths.val.iter().map(|&(ref s, ref p)| {
@ -309,19 +320,19 @@ fn add_overrides<'a>(registry: &mut PackageRegistry<'a>,
let id = SourceId::for_path(&path)?;
let mut source = PathSource::new_recursive(&path, &id, ws.config());
source.update().chain_err(|| {
format!("failed to update path override `{}` \
(defined in `{}`)", path.display(),
definition.display())
format!(
"failed to update path override `{}` \
(defined in `{}`)",
path.display(),
definition.display()
)
})?;
registry.add_override(Box::new(source));
}
Ok(())
}
fn get_resolved_packages<'a>(resolve: &Resolve,
registry: PackageRegistry<'a>)
-> PackageSet<'a> {
fn get_resolved_packages<'a>(resolve: &Resolve, registry: PackageRegistry<'a>) -> PackageSet<'a> {
let ids: Vec<PackageId> = resolve.iter().cloned().collect();
registry.get(&ids)
}

View File

@ -9,7 +9,7 @@ use std::path::{Path, PathBuf};
use url::Url;
use core::{Source, SourceId, GitReference};
use core::{GitReference, Source, SourceId};
use sources::ReplacedSource;
use util::{Config, ToUrl};
use util::config::ConfigValue;
@ -58,10 +58,13 @@ impl<'cfg> SourceConfigMap<'cfg> {
id2name: HashMap::new(),
config,
};
base.add("crates-io", SourceConfig {
id: SourceId::crates_io(config)?,
replace_with: None,
});
base.add(
"crates-io",
SourceConfig {
id: SourceId::crates_io(config)?,
replace_with: None,
},
);
Ok(base)
}
@ -81,10 +84,14 @@ impl<'cfg> SourceConfigMap<'cfg> {
loop {
let cfg = match self.cfgs.get(name) {
Some(cfg) => cfg,
None => bail!("could not find a configured source with the \
name `{}` when attempting to lookup `{}` \
(configuration in `{}`)",
name, orig_name, path.display()),
None => bail!(
"could not find a configured source with the \
name `{}` when attempting to lookup `{}` \
(configuration in `{}`)",
name,
orig_name,
path.display()
),
};
match cfg.replace_with {
Some((ref s, ref p)) => {
@ -93,37 +100,47 @@ impl<'cfg> SourceConfigMap<'cfg> {
}
None if *id == cfg.id => return Ok(id.load(self.config)?),
None => {
new_id = cfg.id.with_precise(id.precise()
.map(|s| s.to_string()));
break
new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string()));
break;
}
}
debug!("following pointer to {}", name);
if name == orig_name {
bail!("detected a cycle of `replace-with` sources, the source \
`{}` is eventually replaced with itself \
(configuration in `{}`)", name, path.display())
bail!(
"detected a cycle of `replace-with` sources, the source \
`{}` is eventually replaced with itself \
(configuration in `{}`)",
name,
path.display()
)
}
}
let new_src = new_id.load(self.config)?;
let old_src = id.load(self.config)?;
if !new_src.supports_checksums() && old_src.supports_checksums() {
bail!("\
bail!(
"\
cannot replace `{orig}` with `{name}`, the source `{orig}` supports \
checksums, but `{name}` does not
a lock file compatible with `{orig}` cannot be generated in this situation
", orig = orig_name, name = name);
",
orig = orig_name,
name = name
);
}
if old_src.requires_precise() && id.precise().is_none() {
bail!("\
bail!(
"\
the source {orig} requires a lock file to be present first before it can be
used against vendored source code
remove the source replacement configuration, generate a lock file, and then
restore the source replacement configuration to continue the build
", orig = orig_name);
",
orig = orig_name
);
}
Ok(Box::new(ReplacedSource::new(id, &new_id, new_src)))
@ -142,8 +159,7 @@ restore the source replacement configuration to continue the build
srcs.push(SourceId::for_registry(&url)?);
}
if let Some(val) = table.get("local-registry") {
let (s, path) = val.string(&format!("source.{}.local-registry",
name))?;
let (s, path) = val.string(&format!("source.{}.local-registry", name))?;
let mut path = path.to_path_buf();
path.pop();
path.pop();
@ -151,8 +167,7 @@ restore the source replacement configuration to continue the build
srcs.push(SourceId::for_local_registry(&path)?);
}
if let Some(val) = table.get("directory") {
let (s, path) = val.string(&format!("source.{}.directory",
name))?;
let (s, path) = val.string(&format!("source.{}.directory", name))?;
let mut path = path.to_path_buf();
path.pop();
path.pop();
@ -171,17 +186,13 @@ restore the source replacement configuration to continue the build
};
let reference = match try("branch")? {
Some(b) => GitReference::Branch(b.0.to_string()),
None => {
match try("tag")? {
Some(b) => GitReference::Tag(b.0.to_string()),
None => {
match try("rev")? {
Some(b) => GitReference::Rev(b.0.to_string()),
None => GitReference::Branch("master".to_string()),
}
}
}
}
None => match try("tag")? {
Some(b) => GitReference::Tag(b.0.to_string()),
None => match try("rev")? {
Some(b) => GitReference::Rev(b.0.to_string()),
None => GitReference::Branch("master".to_string()),
},
},
};
srcs.push(SourceId::for_git(&url, reference)?);
}
@ -191,9 +202,11 @@ restore the source replacement configuration to continue the build
let mut srcs = srcs.into_iter();
let src = srcs.next().ok_or_else(|| {
format_err!("no source URL specified for `source.{}`, need \
either `registry` or `local-registry` defined",
name)
format_err!(
"no source URL specified for `source.{}`, need \
either `registry` or `local-registry` defined",
name
)
})?;
if srcs.next().is_some() {
bail!("more than one source URL specified for `source.{}`", name)
@ -201,24 +214,29 @@ restore the source replacement configuration to continue the build
let mut replace_with = None;
if let Some(val) = table.get("replace-with") {
let (s, path) = val.string(&format!("source.{}.replace-with",
name))?;
let (s, path) = val.string(&format!("source.{}.replace-with", name))?;
replace_with = Some((s.to_string(), path.to_path_buf()));
}
self.add(name, SourceConfig {
id: src,
replace_with,
});
self.add(
name,
SourceConfig {
id: src,
replace_with,
},
);
return Ok(());
fn url(cfg: &ConfigValue, key: &str) -> CargoResult<Url> {
let (url, path) = cfg.string(key)?;
let url = url.to_url().chain_err(|| {
format!("configuration key `{}` specified an invalid \
URL (in {})", key, path.display())
format!(
"configuration key `{}` specified an invalid \
URL (in {})",
key,
path.display()
)
})?;
Ok(url)
}

View File

@ -8,7 +8,7 @@ use hex;
use serde_json;
use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry};
use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary};
use sources::PathSource;
use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt};
@ -28,8 +28,7 @@ struct Checksum {
}
impl<'cfg> DirectorySource<'cfg> {
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config)
-> DirectorySource<'cfg> {
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> DirectorySource<'cfg> {
DirectorySource {
source_id: id.clone(),
root: path.to_path_buf(),
@ -46,9 +45,7 @@ impl<'cfg> Debug for DirectorySource<'cfg> {
}
impl<'cfg> Registry for DirectorySource<'cfg> {
fn query(&mut self,
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let packages = self.packages.values().map(|p| &p.0);
let matches = packages.filter(|pkg| dep.matches(pkg.summary()));
for summary in matches.map(|pkg| pkg.summary().clone()) {
@ -74,8 +71,10 @@ impl<'cfg> Source for DirectorySource<'cfg> {
fn update(&mut self) -> CargoResult<()> {
self.packages.clear();
let entries = self.root.read_dir().chain_err(|| {
format!("failed to read root of directory source: {}",
self.root.display())
format!(
"failed to read root of directory source: {}",
self.root.display()
)
})?;
for entry in entries {
@ -87,7 +86,7 @@ impl<'cfg> Source for DirectorySource<'cfg> {
// (rust-lang/cargo#3414).
if let Some(s) = path.file_name().and_then(|s| s.to_str()) {
if s.starts_with('.') {
continue
continue;
}
}
@ -107,7 +106,7 @@ impl<'cfg> Source for DirectorySource<'cfg> {
// downside of accidentally misconfigured vendor directories
// silently returning less crates.
if !path.join("Cargo.toml").exists() {
continue
continue;
}
let mut src = PathSource::new(&path, &self.source_id, self.config);
@ -116,17 +115,20 @@ impl<'cfg> Source for DirectorySource<'cfg> {
let cksum_file = path.join(".cargo-checksum.json");
let cksum = paths::read(&path.join(cksum_file)).chain_err(|| {
format!("failed to load checksum `.cargo-checksum.json` \
of {} v{}",
pkg.package_id().name(),
pkg.package_id().version())
format!(
"failed to load checksum `.cargo-checksum.json` \
of {} v{}",
pkg.package_id().name(),
pkg.package_id().version()
)
})?;
let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| {
format!("failed to decode `.cargo-checksum.json` of \
{} v{}",
pkg.package_id().name(),
pkg.package_id().version())
format!(
"failed to decode `.cargo-checksum.json` of \
{} v{}",
pkg.package_id().name(),
pkg.package_id().version()
)
})?;
let mut manifest = pkg.manifest().clone();
@ -143,9 +145,11 @@ impl<'cfg> Source for DirectorySource<'cfg> {
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
self.packages.get(id).map(|p| &p.0).cloned().ok_or_else(|| {
format_err!("failed to find package with id: {}", id)
})
self.packages
.get(id)
.map(|p| &p.0)
.cloned()
.ok_or_else(|| format_err!("failed to find package with id: {}", id))
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
@ -155,8 +159,7 @@ impl<'cfg> Source for DirectorySource<'cfg> {
fn verify(&self, id: &PackageId) -> CargoResult<()> {
let (pkg, cksum) = match self.packages.get(id) {
Some(&(ref pkg, ref cksum)) => (pkg, cksum),
None => bail!("failed to find entry for `{}` in directory source",
id),
None => bail!("failed to find entry for `{}` in directory source", id),
};
let mut buf = [0; 16 * 1024];
@ -172,23 +175,26 @@ impl<'cfg> Source for DirectorySource<'cfg> {
n => h.update(&buf[..n]),
}
}
})().chain_err(|| {
format!("failed to calculate checksum of: {}",
file.display())
})?;
})()
.chain_err(|| format!("failed to calculate checksum of: {}", file.display()))?;
let actual = hex::encode(h.finish());
if &*actual != cksum {
bail!("\
the listed checksum of `{}` has changed:\n\
expected: {}\n\
actual: {}\n\
\n\
directory sources are not intended to be edited, if \
modifications are required then it is recommended \
that [replace] is used with a forked copy of the \
source\
", file.display(), cksum, actual);
bail!(
"\
the listed checksum of `{}` has changed:\n\
expected: {}\n\
actual: {}\n\
\n\
directory sources are not intended to be edited, if \
modifications are required then it is recommended \
that [replace] is used with a forked copy of the \
source\
",
file.display(),
cksum,
actual
);
}
}

View File

@ -1,4 +1,4 @@
pub use self::utils::{GitRemote, GitDatabase, GitCheckout, GitRevision, fetch};
pub use self::source::{GitSource, canonicalize_url};
pub use self::utils::{fetch, GitCheckout, GitDatabase, GitRemote, GitRevision};
pub use self::source::{canonicalize_url, GitSource};
mod utils;
mod source;

View File

@ -4,7 +4,7 @@ use url::Url;
use core::source::{Source, SourceId};
use core::GitReference;
use core::{Package, PackageId, Summary, Registry, Dependency};
use core::{Dependency, Package, PackageId, Registry, Summary};
use util::Config;
use util::errors::CargoResult;
use util::hex::short_hash;
@ -24,8 +24,7 @@ pub struct GitSource<'cfg> {
}
impl<'cfg> GitSource<'cfg> {
pub fn new(source_id: &SourceId,
config: &'cfg Config) -> CargoResult<GitSource<'cfg>> {
pub fn new(source_id: &SourceId, config: &'cfg Config) -> CargoResult<GitSource<'cfg>> {
assert!(source_id.is_git(), "id is not git, id={}", source_id);
let remote = GitRemote::new(source_id.url());
@ -49,7 +48,9 @@ impl<'cfg> GitSource<'cfg> {
Ok(source)
}
pub fn url(&self) -> &Url { self.remote.url() }
pub fn url(&self) -> &Url {
self.remote.url()
}
pub fn read_packages(&mut self) -> CargoResult<Vec<Package>> {
if self.path_source.is_none() {
@ -61,13 +62,11 @@ impl<'cfg> GitSource<'cfg> {
fn ident(url: &Url) -> CargoResult<String> {
let url = canonicalize_url(url)?;
let ident = url.path_segments().and_then(|mut s| s.next_back()).unwrap_or("");
let ident = url.path_segments()
.and_then(|mut s| s.next_back())
.unwrap_or("");
let ident = if ident == "" {
"_empty"
} else {
ident
};
let ident = if ident == "" { "_empty" } else { ident };
Ok(format!("{}-{}", ident, short_hash(&url)))
}
@ -79,7 +78,10 @@ pub fn canonicalize_url(url: &Url) -> CargoResult<Url> {
// cannot-be-a-base-urls are not supported
// eg. github.com:rust-lang-nursery/rustfmt.git
if url.cannot_be_a_base() {
bail!("invalid url `{}`: cannot-be-a-base-URLs are not supported", url)
bail!(
"invalid url `{}`: cannot-be-a-base-URLs are not supported",
url
)
}
// Strip a trailing slash
@ -117,17 +119,16 @@ impl<'cfg> Debug for GitSource<'cfg> {
match self.reference.pretty_ref() {
Some(s) => write!(f, " ({})", s),
None => Ok(())
None => Ok(()),
}
}
}
impl<'cfg> Registry for GitSource<'cfg> {
fn query(&mut self,
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
let src = self.path_source.as_mut()
.expect("BUG: update() must be called before query()");
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let src = self.path_source
.as_mut()
.expect("BUG: update() must be called before query()");
src.query(dep, f)
}
@ -146,14 +147,18 @@ impl<'cfg> Source for GitSource<'cfg> {
}
fn update(&mut self) -> CargoResult<()> {
let lock = self.config.git_path()
.open_rw(".cargo-lock-git", self.config, "the git checkouts")?;
let lock =
self.config
.git_path()
.open_rw(".cargo-lock-git", self.config, "the git checkouts")?;
let db_path = lock.parent().join("db").join(&self.ident);
if self.config.cli_unstable().offline && !db_path.exists() {
bail!("can't checkout from '{}': you are in the offline mode (-Z offline)",
self.remote.url());
bail!(
"can't checkout from '{}': you are in the offline mode (-Z offline)",
self.remote.url()
);
}
// Resolve our reference to an actual revision, and check if the
@ -161,16 +166,18 @@ impl<'cfg> Source for GitSource<'cfg> {
// database pinned at that revision, and if we don't we issue an update
// to try to find the revision.
let actual_rev = self.remote.rev_for(&db_path, &self.reference);
let should_update = actual_rev.is_err() ||
self.source_id.precise().is_none();
let should_update = actual_rev.is_err() || self.source_id.precise().is_none();
let (db, actual_rev) = if should_update && !self.config.cli_unstable().offline {
self.config.shell().status("Updating",
format!("git repository `{}`", self.remote.url()))?;
self.config.shell().status(
"Updating",
format!("git repository `{}`", self.remote.url()),
)?;
trace!("updating git source `{:?}`", self.remote);
self.remote.checkout(&db_path, &self.reference, self.config)?
self.remote
.checkout(&db_path, &self.reference, self.config)?
} else {
(self.remote.db_at(&db_path)?, actual_rev.unwrap())
};
@ -180,8 +187,10 @@ impl<'cfg> Source for GitSource<'cfg> {
// https://github.com/servo/servo/pull/14397
let short_id = db.to_short_id(actual_rev.clone()).unwrap();
let checkout_path = lock.parent().join("checkouts")
.join(&self.ident).join(short_id.as_str());
let checkout_path = lock.parent()
.join("checkouts")
.join(&self.ident)
.join(short_id.as_str());
// Copy the database to the checkout location. After this we could drop
// the lock on the database as we no longer needed it, but we leave it
@ -191,9 +200,7 @@ impl<'cfg> Source for GitSource<'cfg> {
db.copy_to(actual_rev.clone(), &checkout_path, self.config)?;
let source_id = self.source_id.with_precise(Some(actual_rev.to_string()));
let path_source = PathSource::new_recursive(&checkout_path,
&source_id,
self.config);
let path_source = PathSource::new_recursive(&checkout_path, &source_id, self.config);
self.path_source = Some(path_source);
self.rev = Some(actual_rev);
@ -201,11 +208,15 @@ impl<'cfg> Source for GitSource<'cfg> {
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
trace!("getting packages for package id `{}` from `{:?}`", id,
self.remote);
self.path_source.as_mut()
.expect("BUG: update() must be called before get()")
.download(id)
trace!(
"getting packages for package id `{}` from `{:?}`",
id,
self.remote
);
self.path_source
.as_mut()
.expect("BUG: update() must be called before get()")
.download(id)
}
fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> {

View File

@ -11,9 +11,9 @@ use serde::ser::{self, Serialize};
use url::Url;
use core::GitReference;
use util::{ToUrl, internal, Config, network, Progress};
use util::{internal, network, Config, Progress, ToUrl};
use util::paths;
use util::errors::{CargoResult, CargoResultExt, CargoError};
use util::errors::{CargoError, CargoResult, CargoResultExt};
#[derive(PartialEq, Clone, Debug)]
pub struct GitRevision(git2::Oid);
@ -25,8 +25,9 @@ impl ser::Serialize for GitRevision {
}
fn serialize_str<T, S>(t: &T, s: S) -> Result<S::Ok, S::Error>
where T: fmt::Display,
S: ser::Serializer,
where
T: fmt::Display,
S: ser::Serializer,
{
t.to_string().serialize(s)
}
@ -49,8 +50,7 @@ impl GitShortID {
/// `GitDatabase`.
#[derive(PartialEq, Clone, Debug, Serialize)]
pub struct GitRemote {
#[serde(serialize_with = "serialize_str")]
url: Url,
#[serde(serialize_with = "serialize_str")] url: Url,
}
/// `GitDatabase` is a local clone of a remote repository's database. Multiple
@ -59,8 +59,7 @@ pub struct GitRemote {
pub struct GitDatabase {
remote: GitRemote,
path: PathBuf,
#[serde(skip_serializing)]
repo: git2::Repository,
#[serde(skip_serializing)] repo: git2::Repository,
}
/// `GitCheckout` is a local checkout of a particular revision. Calling
@ -71,8 +70,7 @@ pub struct GitCheckout<'a> {
database: &'a GitDatabase,
location: PathBuf,
revision: GitRevision,
#[serde(skip_serializing)]
repo: git2::Repository,
#[serde(skip_serializing)] repo: git2::Repository,
}
// Implementations
@ -86,22 +84,20 @@ impl GitRemote {
&self.url
}
pub fn rev_for(&self, path: &Path, reference: &GitReference)
-> CargoResult<GitRevision> {
pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult<GitRevision> {
reference.resolve(&self.db_at(path)?.repo)
}
pub fn checkout(&self,
into: &Path,
reference: &GitReference,
cargo_config: &Config)
-> CargoResult<(GitDatabase, GitRevision)>
{
pub fn checkout(
&self,
into: &Path,
reference: &GitReference,
cargo_config: &Config,
) -> CargoResult<(GitDatabase, GitRevision)> {
let mut repo_and_rev = None;
if let Ok(mut repo) = git2::Repository::open(into) {
self.fetch_into(&mut repo, cargo_config).chain_err(|| {
format!("failed to fetch into {}", into.display())
})?;
self.fetch_into(&mut repo, cargo_config)
.chain_err(|| format!("failed to fetch into {}", into.display()))?;
if let Ok(rev) = reference.resolve(&repo) {
repo_and_rev = Some((repo, rev));
}
@ -109,19 +105,21 @@ impl GitRemote {
let (repo, rev) = match repo_and_rev {
Some(pair) => pair,
None => {
let repo = self.clone_into(into, cargo_config).chain_err(|| {
format!("failed to clone into: {}", into.display())
})?;
let repo = self.clone_into(into, cargo_config)
.chain_err(|| format!("failed to clone into: {}", into.display()))?;
let rev = reference.resolve(&repo)?;
(repo, rev)
}
};
Ok((GitDatabase {
remote: self.clone(),
path: into.to_path_buf(),
repo,
}, rev))
Ok((
GitDatabase {
remote: self.clone(),
path: into.to_path_buf(),
repo,
},
rev,
))
}
pub fn db_at(&self, db_path: &Path) -> CargoResult<GitDatabase> {
@ -145,14 +143,23 @@ impl GitRemote {
}
fs::create_dir_all(dst)?;
let mut repo = git2::Repository::init_bare(dst)?;
fetch(&mut repo, &self.url, "refs/heads/*:refs/heads/*", cargo_config)?;
fetch(
&mut repo,
&self.url,
"refs/heads/*:refs/heads/*",
cargo_config,
)?;
Ok(repo)
}
}
impl GitDatabase {
pub fn copy_to(&self, rev: GitRevision, dest: &Path, cargo_config: &Config)
-> CargoResult<GitCheckout> {
pub fn copy_to(
&self,
rev: GitRevision,
dest: &Path,
cargo_config: &Config,
) -> CargoResult<GitCheckout> {
let mut checkout = None;
if let Ok(repo) = git2::Repository::open(dest) {
let mut co = GitCheckout::new(dest, self, rev.clone(), repo);
@ -193,26 +200,22 @@ impl GitDatabase {
impl GitReference {
fn resolve(&self, repo: &git2::Repository) -> CargoResult<GitRevision> {
let id = match *self {
GitReference::Tag(ref s) => {
(|| -> CargoResult<git2::Oid> {
let refname = format!("refs/tags/{}", s);
let id = repo.refname_to_id(&refname)?;
let obj = repo.find_object(id, None)?;
let obj = obj.peel(ObjectType::Commit)?;
Ok(obj.id())
})().chain_err(|| {
format!("failed to find tag `{}`", s)
})?
}
GitReference::Tag(ref s) => (|| -> CargoResult<git2::Oid> {
let refname = format!("refs/tags/{}", s);
let id = repo.refname_to_id(&refname)?;
let obj = repo.find_object(id, None)?;
let obj = obj.peel(ObjectType::Commit)?;
Ok(obj.id())
})()
.chain_err(|| format!("failed to find tag `{}`", s))?,
GitReference::Branch(ref s) => {
(|| {
let b = repo.find_branch(s, git2::BranchType::Local)?;
b.get().target().ok_or_else(|| {
format_err!("branch `{}` did not have a target", s)
})
})().chain_err(|| {
format!("failed to find branch `{}`", s)
})?
b.get()
.target()
.ok_or_else(|| format_err!("branch `{}` did not have a target", s))
})()
.chain_err(|| format!("failed to find branch `{}`", s))?
}
GitReference::Rev(ref s) => {
let obj = repo.revparse_single(s)?;
@ -227,10 +230,12 @@ impl GitReference {
}
impl<'a> GitCheckout<'a> {
fn new(path: &Path, database: &'a GitDatabase, revision: GitRevision,
repo: git2::Repository)
-> GitCheckout<'a>
{
fn new(
path: &Path,
database: &'a GitDatabase,
revision: GitRevision,
repo: git2::Repository,
) -> GitCheckout<'a> {
GitCheckout {
location: path.to_path_buf(),
database,
@ -239,16 +244,14 @@ impl<'a> GitCheckout<'a> {
}
}
fn clone_into(into: &Path,
database: &'a GitDatabase,
revision: GitRevision,
config: &Config)
-> CargoResult<GitCheckout<'a>>
{
fn clone_into(
into: &Path,
database: &'a GitDatabase,
revision: GitRevision,
config: &Config,
) -> CargoResult<GitCheckout<'a>> {
let dirname = into.parent().unwrap();
fs::create_dir_all(&dirname).chain_err(|| {
format!("Couldn't mkdir {}", dirname.display())
})?;
fs::create_dir_all(&dirname).chain_err(|| format!("Couldn't mkdir {}", dirname.display()))?;
if into.exists() {
paths::remove_dir_all(into)?;
}
@ -335,22 +338,25 @@ impl<'a> GitCheckout<'a> {
info!("update submodules for: {:?}", repo.workdir().unwrap());
for mut child in repo.submodules()? {
update_submodule(repo, &mut child, cargo_config)
.chain_err(|| {
format!("failed to update submodule `{}`",
child.name().unwrap_or(""))
})?;
update_submodule(repo, &mut child, cargo_config).chain_err(|| {
format!(
"failed to update submodule `{}`",
child.name().unwrap_or("")
)
})?;
}
Ok(())
}
fn update_submodule(parent: &git2::Repository,
child: &mut git2::Submodule,
cargo_config: &Config) -> CargoResult<()> {
fn update_submodule(
parent: &git2::Repository,
child: &mut git2::Submodule,
cargo_config: &Config,
) -> CargoResult<()> {
child.init(false)?;
let url = child.url().ok_or_else(|| {
internal("non-utf8 url for submodule")
})?;
let url = child
.url()
.ok_or_else(|| internal("non-utf8 url for submodule"))?;
// A submodule which is listed in .gitmodules but not actually
// checked out will not have a head id, so we should ignore it.
@ -370,7 +376,7 @@ impl<'a> GitCheckout<'a> {
let mut repo = match head_and_repo {
Ok((head, repo)) => {
if child.head_id() == head {
return update_submodules(&repo, cargo_config)
return update_submodules(&repo, cargo_config);
}
repo
}
@ -385,8 +391,11 @@ impl<'a> GitCheckout<'a> {
let refspec = "refs/heads/*:refs/heads/*";
let url = url.to_url()?;
fetch(&mut repo, &url, refspec, cargo_config).chain_err(|| {
internal(format!("failed to fetch submodule `{}` from {}",
child.name().unwrap_or(""), url))
internal(format!(
"failed to fetch submodule `{}` from {}",
child.name().unwrap_or(""),
url
))
})?;
let obj = repo.find_object(head, None)?;
@ -423,9 +432,9 @@ impl<'a> GitCheckout<'a> {
/// credentials until we give it a reason to not do so. To ensure we don't
/// just sit here looping forever we keep track of authentications we've
/// attempted and we don't try the same ones again.
fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
-> CargoResult<T>
where F: FnMut(&mut git2::Credentials) -> CargoResult<T>
fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F) -> CargoResult<T>
where
F: FnMut(&mut git2::Credentials) -> CargoResult<T>,
{
let mut cred_helper = git2::CredentialHelper::new(url);
cred_helper.config(cfg);
@ -459,7 +468,7 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
if allowed.contains(git2::CredentialType::USERNAME) {
debug_assert!(username.is_none());
ssh_username_requested = true;
return Err(git2::Error::from_str("gonna try usernames later"))
return Err(git2::Error::from_str("gonna try usernames later"));
}
// An "SSH_KEY" authentication indicates that we need some sort of SSH
@ -479,7 +488,7 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
let username = username.unwrap();
debug_assert!(!ssh_username_requested);
ssh_agent_attempts.push(username.to_string());
return git2::Cred::ssh_key_from_agent(username)
return git2::Cred::ssh_key_from_agent(username);
}
// Sometimes libgit2 will ask for a username/password in plaintext. This
@ -490,13 +499,13 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
if allowed.contains(git2::CredentialType::USER_PASS_PLAINTEXT) {
let r = git2::Cred::credential_helper(cfg, url, username);
cred_helper_bad = Some(r.is_err());
return r
return r;
}
// I'm... not sure what the DEFAULT kind of authentication is, but seems
// easy to support?
if allowed.contains(git2::CredentialType::DEFAULT) {
return git2::Cred::default()
return git2::Cred::default();
}
// Whelp, we tried our best
@ -540,7 +549,7 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
attempts += 1;
if attempts == 1 {
ssh_agent_attempts.push(s.to_string());
return git2::Cred::ssh_key_from_agent(&s)
return git2::Cred::ssh_key_from_agent(&s);
}
}
Err(git2::Error::from_str("no authentication available"))
@ -559,13 +568,13 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
// errors happened). Otherwise something else is funny so we bail
// out.
if attempts != 2 {
break
break;
}
}
}
if res.is_ok() || !any_attempts {
return res.map_err(From::from)
return res.map_err(From::from);
}
// In the case of an authentication failure (where we tried something) then
@ -573,23 +582,32 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
// tried.
let res = res.map_err(CargoError::from).chain_err(|| {
let mut msg = "failed to authenticate when downloading \
repository".to_string();
repository"
.to_string();
if !ssh_agent_attempts.is_empty() {
let names = ssh_agent_attempts.iter()
.map(|s| format!("`{}`", s))
.collect::<Vec<_>>()
.join(", ");
msg.push_str(&format!("\nattempted ssh-agent authentication, but \
none of the usernames {} succeeded", names));
let names = ssh_agent_attempts
.iter()
.map(|s| format!("`{}`", s))
.collect::<Vec<_>>()
.join(", ");
msg.push_str(&format!(
"\nattempted ssh-agent authentication, but \
none of the usernames {} succeeded",
names
));
}
if let Some(failed_cred_helper) = cred_helper_bad {
if failed_cred_helper {
msg.push_str("\nattempted to find username/password via \
git's `credential.helper` support, but failed");
msg.push_str(
"\nattempted to find username/password via \
git's `credential.helper` support, but failed",
);
} else {
msg.push_str("\nattempted to find username/password via \
`credential.helper`, but maybe the found \
credentials were incorrect");
msg.push_str(
"\nattempted to find username/password via \
`credential.helper`, but maybe the found \
credentials were incorrect",
);
}
}
msg
@ -597,9 +615,7 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
Ok(res)
}
fn reset(repo: &git2::Repository,
obj: &git2::Object,
config: &Config) -> CargoResult<()> {
fn reset(repo: &git2::Repository, obj: &git2::Object, config: &Config) -> CargoResult<()> {
let mut pb = Progress::new("Checkout", config);
let mut opts = git2::build::CheckoutBuilder::new();
opts.progress(|_, cur, max| {
@ -609,12 +625,12 @@ fn reset(repo: &git2::Repository,
Ok(())
}
pub fn with_fetch_options(git_config: &git2::Config,
url: &Url,
config: &Config,
cb: &mut FnMut(git2::FetchOptions) -> CargoResult<()>)
-> CargoResult<()>
{
pub fn with_fetch_options(
git_config: &git2::Config,
url: &Url,
config: &Config,
cb: &mut FnMut(git2::FetchOptions) -> CargoResult<()>,
) -> CargoResult<()> {
let mut progress = Progress::new("Fetch", config);
network::with_retry(config, || {
with_authentication(url.as_str(), git_config, |f| {
@ -622,7 +638,9 @@ pub fn with_fetch_options(git_config: &git2::Config,
rcb.credentials(f);
rcb.transfer_progress(|stats| {
progress.tick(stats.indexed_objects(), stats.total_objects()).is_ok()
progress
.tick(stats.indexed_objects(), stats.total_objects())
.is_ok()
});
// Create a local anonymous remote in the repository to fetch the
@ -636,13 +654,17 @@ pub fn with_fetch_options(git_config: &git2::Config,
})
}
pub fn fetch(repo: &mut git2::Repository,
url: &Url,
refspec: &str,
config: &Config) -> CargoResult<()> {
pub fn fetch(
repo: &mut git2::Repository,
url: &Url,
refspec: &str,
config: &Config,
) -> CargoResult<()> {
if config.frozen() {
bail!("attempting to update a git repository, but --frozen \
was specified")
bail!(
"attempting to update a git repository, but --frozen \
was specified"
)
}
if !config.network_allowed() {
bail!("can't update a git repository in the offline mode")
@ -655,7 +677,7 @@ pub fn fetch(repo: &mut git2::Repository,
let mut handle = config.http()?.borrow_mut();
debug!("attempting github fast path for {}", url);
if github_up_to_date(&mut handle, url, &oid) {
return Ok(())
return Ok(());
} else {
debug!("fast path failed, falling back to a git fetch");
}
@ -694,14 +716,16 @@ pub fn fetch(repo: &mut git2::Repository,
if !repo_reinitialized && err.class() == git2::ErrorClass::Reference {
repo_reinitialized = true;
debug!("looks like this is a corrupt repository, reinitializing \
and trying again");
debug!(
"looks like this is a corrupt repository, reinitializing \
and trying again"
);
if reinitialize(repo).is_ok() {
continue
continue;
}
}
return Err(err.into())
return Err(err.into());
}
Ok(())
})
@ -727,31 +751,38 @@ fn maybe_gc_repo(repo: &mut git2::Repository) -> CargoResult<()> {
Ok(e) => e.count(),
Err(_) => {
debug!("skipping gc as pack dir appears gone");
return Ok(())
return Ok(());
}
};
let max = env::var("__CARGO_PACKFILE_LIMIT").ok()
let max = env::var("__CARGO_PACKFILE_LIMIT")
.ok()
.and_then(|s| s.parse::<usize>().ok())
.unwrap_or(100);
if entries < max {
debug!("skipping gc as there's only {} pack files", entries);
return Ok(())
return Ok(());
}
// First up, try a literal `git gc` by shelling out to git. This is pretty
// likely to fail though as we may not have `git` installed. Note that
// libgit2 doesn't currently implement the gc operation, so there's no
// equivalent there.
match Command::new("git").arg("gc").current_dir(repo.path()).output() {
match Command::new("git")
.arg("gc")
.current_dir(repo.path())
.output()
{
Ok(out) => {
debug!("git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}",
out.status,
String::from_utf8_lossy(&out.stdout),
String::from_utf8_lossy(&out.stderr));
debug!(
"git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}",
out.status,
String::from_utf8_lossy(&out.stdout),
String::from_utf8_lossy(&out.stderr)
);
if out.status.success() {
let new = git2::Repository::open(repo.path())?;
mem::replace(repo, new);
return Ok(())
return Ok(());
}
}
Err(e) => debug!("git-gc failed to spawn: {}", e),
@ -774,7 +805,7 @@ fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> {
for entry in path.read_dir()? {
let entry = entry?;
if entry.file_name().to_str() == Some("tmp") {
continue
continue;
}
let path = entry.path();
drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path)));
@ -818,11 +849,13 @@ fn github_up_to_date(handle: &mut Easy, url: &Url, oid: &git2::Oid) -> bool {
let username = try!(pieces.next());
let repo = try!(pieces.next());
if pieces.next().is_some() {
return false
return false;
}
let url = format!("https://api.github.com/repos/{}/{}/commits/master",
username, repo);
let url = format!(
"https://api.github.com/repos/{}/{}/commits/master",
username, repo
);
try!(handle.get(true).ok());
try!(handle.url(&url).ok());
try!(handle.useragent("cargo").ok());

View File

@ -8,9 +8,9 @@ use glob::Pattern;
use ignore::Match;
use ignore::gitignore::GitignoreBuilder;
use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry};
use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary};
use ops;
use util::{self, CargoResult, internal};
use util::{self, internal, CargoResult};
use util::Config;
pub struct PathSource<'cfg> {
@ -27,8 +27,7 @@ impl<'cfg> PathSource<'cfg> {
///
/// This source will only return the package at precisely the `path`
/// specified, and it will be an error if there's not a package at `path`.
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config)
-> PathSource<'cfg> {
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> {
PathSource {
source_id: id.clone(),
path: path.to_path_buf(),
@ -47,11 +46,10 @@ impl<'cfg> PathSource<'cfg> {
///
/// Note that this should be used with care and likely shouldn't be chosen
/// by default!
pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config)
-> PathSource<'cfg> {
pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> {
PathSource {
recursive: true,
.. PathSource::new(root, id, config)
..PathSource::new(root, id, config)
}
}
@ -62,7 +60,7 @@ impl<'cfg> PathSource<'cfg> {
match self.packages.iter().find(|p| p.root() == &*self.path) {
Some(pkg) => Ok(pkg.clone()),
None => Err(internal("no package found in source"))
None => Err(internal("no package found in source")),
}
}
@ -115,9 +113,8 @@ impl<'cfg> PathSource<'cfg> {
} else {
p
};
Pattern::new(pattern).map_err(|e| {
format_err!("could not parse glob pattern `{}`: {}", p, e)
})
Pattern::new(pattern)
.map_err(|e| format_err!("could not parse glob pattern `{}`: {}", p, e))
};
let glob_exclude = pkg.manifest()
@ -134,7 +131,9 @@ impl<'cfg> PathSource<'cfg> {
let glob_should_package = |relative_path: &Path| -> bool {
fn glob_match(patterns: &Vec<Pattern>, relative_path: &Path) -> bool {
patterns.iter().any(|pattern| pattern.matches_path(relative_path))
patterns
.iter()
.any(|pattern| pattern.matches_path(relative_path))
}
// include and exclude options are mutually exclusive.
@ -162,10 +161,9 @@ impl<'cfg> PathSource<'cfg> {
let ignore_should_package = |relative_path: &Path| -> CargoResult<bool> {
// include and exclude options are mutually exclusive.
if no_include_option {
match ignore_exclude.matched_path_or_any_parents(
relative_path,
/* is_dir */ false,
) {
match ignore_exclude
.matched_path_or_any_parents(relative_path, /* is_dir */ false)
{
Match::None => Ok(true),
Match::Ignore(_) => Ok(false),
Match::Whitelist(pattern) => Err(format_err!(
@ -174,10 +172,9 @@ impl<'cfg> PathSource<'cfg> {
)),
}
} else {
match ignore_include.matched_path_or_any_parents(
relative_path,
/* is_dir */ false,
) {
match ignore_include
.matched_path_or_any_parents(relative_path, /* is_dir */ false)
{
Match::None => Ok(false),
Match::Ignore(_) => Ok(true),
Match::Whitelist(pattern) => Err(format_err!(
@ -198,42 +195,34 @@ impl<'cfg> PathSource<'cfg> {
if glob_should_package != ignore_should_package {
if glob_should_package {
if no_include_option {
self.config
.shell()
.warn(format!(
"Pattern matching for Cargo's include/exclude fields is changing and \
file `{}` WILL be excluded in a future Cargo version.\n\
See https://github.com/rust-lang/cargo/issues/4268 for more info",
relative_path.display()
))?;
self.config.shell().warn(format!(
"Pattern matching for Cargo's include/exclude fields is changing and \
file `{}` WILL be excluded in a future Cargo version.\n\
See https://github.com/rust-lang/cargo/issues/4268 for more info",
relative_path.display()
))?;
} else {
self.config
.shell()
.warn(format!(
"Pattern matching for Cargo's include/exclude fields is changing and \
file `{}` WILL NOT be included in a future Cargo version.\n\
See https://github.com/rust-lang/cargo/issues/4268 for more info",
relative_path.display()
))?;
self.config.shell().warn(format!(
"Pattern matching for Cargo's include/exclude fields is changing and \
file `{}` WILL NOT be included in a future Cargo version.\n\
See https://github.com/rust-lang/cargo/issues/4268 for more info",
relative_path.display()
))?;
}
} else if no_include_option {
self.config
.shell()
.warn(format!(
"Pattern matching for Cargo's include/exclude fields is changing and \
file `{}` WILL NOT be excluded in a future Cargo version.\n\
See https://github.com/rust-lang/cargo/issues/4268 for more info",
relative_path.display()
))?;
self.config.shell().warn(format!(
"Pattern matching for Cargo's include/exclude fields is changing and \
file `{}` WILL NOT be excluded in a future Cargo version.\n\
See https://github.com/rust-lang/cargo/issues/4268 for more info",
relative_path.display()
))?;
} else {
self.config
.shell()
.warn(format!(
"Pattern matching for Cargo's include/exclude fields is changing and \
file `{}` WILL be included in a future Cargo version.\n\
See https://github.com/rust-lang/cargo/issues/4268 for more info",
relative_path.display()
))?;
self.config.shell().warn(format!(
"Pattern matching for Cargo's include/exclude fields is changing and \
file `{}` WILL be included in a future Cargo version.\n\
See https://github.com/rust-lang/cargo/issues/4268 for more info",
relative_path.display()
))?;
}
}
@ -252,11 +241,12 @@ impl<'cfg> PathSource<'cfg> {
// Returns Some(_) if found sibling Cargo.toml and .git folder;
// otherwise caller should fall back on full file list.
fn discover_git_and_list_files(&self,
pkg: &Package,
root: &Path,
filter: &mut FnMut(&Path) -> CargoResult<bool>)
-> Option<CargoResult<Vec<PathBuf>>> {
fn discover_git_and_list_files(
&self,
pkg: &Package,
root: &Path,
filter: &mut FnMut(&Path) -> CargoResult<bool>,
) -> Option<CargoResult<Vec<PathBuf>>> {
// If this package is in a git repository, then we really do want to
// query the git repository as it takes into account items such as
// .gitignore. We're not quite sure where the git repository is,
@ -276,8 +266,7 @@ impl<'cfg> PathSource<'cfg> {
Ok(index) => index,
Err(err) => return Some(Err(err.into())),
};
let path = util::without_prefix(root, cur)
.unwrap().join("Cargo.toml");
let path = util::without_prefix(root, cur).unwrap().join("Cargo.toml");
if index.get_path(&path, 0).is_some() {
return Some(self.list_files_git(pkg, repo, filter));
}
@ -285,7 +274,7 @@ impl<'cfg> PathSource<'cfg> {
}
// don't cross submodule boundaries
if cur.join(".git").is_dir() {
break
break;
}
match cur.parent() {
Some(parent) => cur = parent,
@ -295,14 +284,16 @@ impl<'cfg> PathSource<'cfg> {
None
}
fn list_files_git(&self, pkg: &Package, repo: git2::Repository,
filter: &mut FnMut(&Path) -> CargoResult<bool>)
-> CargoResult<Vec<PathBuf>> {
fn list_files_git(
&self,
pkg: &Package,
repo: git2::Repository,
filter: &mut FnMut(&Path) -> CargoResult<bool>,
) -> CargoResult<Vec<PathBuf>> {
warn!("list_files_git {}", pkg.package_id());
let index = repo.index()?;
let root = repo.workdir().ok_or_else(|| {
internal("Can't list files on a bare repository.")
})?;
let root = repo.workdir()
.ok_or_else(|| internal("Can't list files on a bare repository."))?;
let pkg_path = pkg.root();
let mut ret = Vec::<PathBuf>::new();
@ -325,11 +316,9 @@ impl<'cfg> PathSource<'cfg> {
opts.pathspec(suffix);
}
let statuses = repo.statuses(Some(&mut opts))?;
let untracked = statuses.iter().filter_map(|entry| {
match entry.status() {
git2::Status::WT_NEW => Some((join(root, entry.path_bytes()), None)),
_ => None,
}
let untracked = statuses.iter().filter_map(|entry| match entry.status() {
git2::Status::WT_NEW => Some((join(root, entry.path_bytes()), None)),
_ => None,
});
let mut subpackages_found = Vec::new();
@ -341,15 +330,14 @@ impl<'cfg> PathSource<'cfg> {
// bit obove via the `pathspec` function call, but we need to filter
// the entries in the index as well.
if !file_path.starts_with(pkg_path) {
continue
continue;
}
match file_path.file_name().and_then(|s| s.to_str()) {
// Filter out Cargo.lock and target always, we don't want to
// package a lock file no one will ever read and we also avoid
// build artifacts
Some("Cargo.lock") |
Some("target") => continue,
Some("Cargo.lock") | Some("target") => continue,
// Keep track of all sub-packages found and also strip out all
// matches we've found so far. Note, though, that if we find
@ -360,7 +348,7 @@ impl<'cfg> PathSource<'cfg> {
warn!("subpackage found: {}", path.display());
ret.retain(|p| !p.starts_with(path));
subpackages_found.push(path.to_path_buf());
continue
continue;
}
}
@ -370,15 +358,14 @@ impl<'cfg> PathSource<'cfg> {
// If this file is part of any other sub-package we've found so far,
// skip it.
if subpackages_found.iter().any(|p| file_path.starts_with(p)) {
continue
continue;
}
if is_dir.unwrap_or_else(|| file_path.is_dir()) {
warn!(" found submodule {}", file_path.display());
let rel = util::without_prefix(&file_path, root).unwrap();
let rel = rel.to_str().ok_or_else(|| {
format_err!("invalid utf-8 filename: {}", rel.display())
})?;
let rel = rel.to_str()
.ok_or_else(|| format_err!("invalid utf-8 filename: {}", rel.display()))?;
// Git submodules are currently only named through `/` path
// separators, explicitly not `\` which windows uses. Who knew?
let rel = rel.replace(r"\", "/");
@ -410,32 +397,39 @@ impl<'cfg> PathSource<'cfg> {
use std::str;
match str::from_utf8(data) {
Ok(s) => Ok(path.join(s)),
Err(..) => Err(internal("cannot process path in git with a non \
unicode filename")),
Err(..) => Err(internal(
"cannot process path in git with a non \
unicode filename",
)),
}
}
}
fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> CargoResult<bool>)
-> CargoResult<Vec<PathBuf>> {
fn list_files_walk(
&self,
pkg: &Package,
filter: &mut FnMut(&Path) -> CargoResult<bool>,
) -> CargoResult<Vec<PathBuf>> {
let mut ret = Vec::new();
PathSource::walk(pkg.root(), &mut ret, true, filter)?;
Ok(ret)
}
fn walk(path: &Path, ret: &mut Vec<PathBuf>,
is_root: bool, filter: &mut FnMut(&Path) -> CargoResult<bool>)
-> CargoResult<()>
{
fn walk(
path: &Path,
ret: &mut Vec<PathBuf>,
is_root: bool,
filter: &mut FnMut(&Path) -> CargoResult<bool>,
) -> CargoResult<()> {
if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) {
if (*filter)(path)? {
ret.push(path.to_path_buf());
}
return Ok(())
return Ok(());
}
// Don't recurse into any sub-packages that we have
if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() {
return Ok(())
return Ok(());
}
// For package integration tests, we need to sort the paths in a deterministic order to
@ -451,7 +445,7 @@ impl<'cfg> PathSource<'cfg> {
let name = path.file_name().and_then(|s| s.to_str());
// Skip dotfile directories
if name.map(|s| s.starts_with('.')) == Some(true) {
continue
continue;
}
if is_root {
// Skip cargo artifacts
@ -473,9 +467,7 @@ impl<'cfg> Debug for PathSource<'cfg> {
}
impl<'cfg> Registry for PathSource<'cfg> {
fn query(&mut self,
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
for s in self.packages.iter().map(|p| p.summary()) {
if dep.matches(s) {
f(s.clone())
@ -512,9 +504,8 @@ impl<'cfg> Source for PathSource<'cfg> {
trace!("getting packages; id={}", id);
let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id);
pkg.cloned().ok_or_else(|| {
internal(format!("failed to find {} in path source", id))
})
pkg.cloned()
.ok_or_else(|| internal(format!("failed to find {} in path source", id)))
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
@ -530,9 +521,9 @@ impl<'cfg> Source for PathSource<'cfg> {
// condition where this path was rm'ed - either way,
// we can ignore the error and treat the path's mtime
// as 0.
let mtime = fs::metadata(&file).map(|meta| {
FileTime::from_last_modification_time(&meta)
}).unwrap_or(FileTime::zero());
let mtime = fs::metadata(&file)
.map(|meta| FileTime::from_last_modification_time(&meta))
.unwrap_or(FileTime::zero());
warn!("{} {}", mtime, file.display());
if mtime > max {
max = mtime;

View File

@ -6,10 +6,10 @@ use serde_json;
use semver::Version;
use core::dependency::Dependency;
use core::{SourceId, Summary, PackageId};
use core::{PackageId, SourceId, Summary};
use sources::registry::{RegistryPackage, INDEX_LOCK};
use sources::registry::RegistryData;
use util::{CargoResult, internal, Filesystem, Config};
use util::{internal, CargoResult, Config, Filesystem};
pub struct RegistryIndex<'cfg> {
source_id: SourceId,
@ -21,11 +21,12 @@ pub struct RegistryIndex<'cfg> {
}
impl<'cfg> RegistryIndex<'cfg> {
pub fn new(id: &SourceId,
path: &Filesystem,
config: &'cfg Config,
locked: bool)
-> RegistryIndex<'cfg> {
pub fn new(
id: &SourceId,
path: &Filesystem,
config: &'cfg Config,
locked: bool,
) -> RegistryIndex<'cfg> {
RegistryIndex {
source_id: id.clone(),
path: path.clone(),
@ -37,30 +38,30 @@ impl<'cfg> RegistryIndex<'cfg> {
}
/// Return the hash listed for a specified PackageId.
pub fn hash(&mut self,
pkg: &PackageId,
load: &mut RegistryData)
-> CargoResult<String> {
pub fn hash(&mut self, pkg: &PackageId, load: &mut RegistryData) -> CargoResult<String> {
let name = &*pkg.name();
let version = pkg.version();
if let Some(s) = self.hashes.get(name).and_then(|v| v.get(version)) {
return Ok(s.clone())
return Ok(s.clone());
}
// Ok, we're missing the key, so parse the index file to load it.
self.summaries(name, load)?;
self.hashes.get(name).and_then(|v| v.get(version)).ok_or_else(|| {
internal(format!("no hash listed for {}", pkg))
}).map(|s| s.clone())
self.hashes
.get(name)
.and_then(|v| v.get(version))
.ok_or_else(|| internal(format!("no hash listed for {}", pkg)))
.map(|s| s.clone())
}
/// Parse the on-disk metadata for the package provided
///
/// Returns a list of pairs of (summary, yanked) for the package name
/// specified.
pub fn summaries(&mut self,
name: &str,
load: &mut RegistryData)
-> CargoResult<&Vec<(Summary, bool)>> {
pub fn summaries(
&mut self,
name: &str,
load: &mut RegistryData,
) -> CargoResult<&Vec<(Summary, bool)>> {
if self.cache.contains_key(name) {
return Ok(&self.cache[name]);
}
@ -69,27 +70,25 @@ impl<'cfg> RegistryIndex<'cfg> {
Ok(&self.cache[name])
}
fn load_summaries(&mut self,
name: &str,
load: &mut RegistryData)
-> CargoResult<Vec<(Summary, bool)>> {
fn load_summaries(
&mut self,
name: &str,
load: &mut RegistryData,
) -> CargoResult<Vec<(Summary, bool)>> {
let (root, _lock) = if self.locked {
let lock = self.path.open_ro(Path::new(INDEX_LOCK),
self.config,
"the registry index");
let lock = self.path
.open_ro(Path::new(INDEX_LOCK), self.config, "the registry index");
match lock {
Ok(lock) => {
(lock.path().parent().unwrap().to_path_buf(), Some(lock))
}
Ok(lock) => (lock.path().parent().unwrap().to_path_buf(), Some(lock)),
Err(_) => return Ok(Vec::new()),
}
} else {
(self.path.clone().into_path_unlocked(), None)
};
let fs_name = name.chars().flat_map(|c| {
c.to_lowercase()
}).collect::<String>();
let fs_name = name.chars()
.flat_map(|c| c.to_lowercase())
.collect::<String>();
// see module comment for why this is structured the way it is
let path = match fs_name.len() {
@ -102,13 +101,10 @@ impl<'cfg> RegistryIndex<'cfg> {
let mut hit_closure = false;
let err = load.load(&root, Path::new(&path), &mut |contents| {
hit_closure = true;
let contents = str::from_utf8(contents).map_err(|_| {
format_err!("registry index file was not valid utf-8")
})?;
let contents = str::from_utf8(contents)
.map_err(|_| format_err!("registry index file was not valid utf-8"))?;
ret.reserve(contents.lines().count());
let lines = contents.lines()
.map(|s| s.trim())
.filter(|l| !l.is_empty());
let lines = contents.lines().map(|s| s.trim()).filter(|l| !l.is_empty());
let online = !self.config.cli_unstable().offline;
// Attempt forwards-compatibility on the index by ignoring
@ -117,7 +113,7 @@ impl<'cfg> RegistryIndex<'cfg> {
// interpretation of each line here and older cargo will simply
// ignore the new lines.
ret.extend(lines.filter_map(|line| {
self.parse_registry_package(line).ok().and_then(|v|{
self.parse_registry_package(line).ok().and_then(|v| {
if online || load.is_crate_downloaded(v.0.package_id()) {
Some(v)
} else {
@ -143,10 +139,15 @@ impl<'cfg> RegistryIndex<'cfg> {
/// package.
///
/// The returned boolean is whether or not the summary has been yanked.
fn parse_registry_package(&mut self, line: &str)
-> CargoResult<(Summary, bool)> {
fn parse_registry_package(&mut self, line: &str) -> CargoResult<(Summary, bool)> {
let RegistryPackage {
name, vers, cksum, deps, features, yanked, links
name,
vers,
cksum,
deps,
features,
yanked,
links,
} = super::DEFAULT_ID.set(&self.source_id, || {
serde_json::from_str::<RegistryPackage>(line)
})?;
@ -156,37 +157,37 @@ impl<'cfg> RegistryIndex<'cfg> {
if self.hashes.contains_key(&name[..]) {
self.hashes.get_mut(&name[..]).unwrap().insert(vers, cksum);
} else {
self.hashes.entry(name.into_owned())
self.hashes
.entry(name.into_owned())
.or_insert_with(HashMap::new)
.insert(vers, cksum);
}
Ok((summary, yanked.unwrap_or(false)))
}
pub fn query(&mut self,
dep: &Dependency,
load: &mut RegistryData,
f: &mut FnMut(Summary))
-> CargoResult<()> {
pub fn query(
&mut self,
dep: &Dependency,
load: &mut RegistryData,
f: &mut FnMut(Summary),
) -> CargoResult<()> {
let source_id = self.source_id.clone();
let summaries = self.summaries(&*dep.name(), load)?;
let summaries = summaries.iter().filter(|&&(_, yanked)| {
dep.source_id().precise().is_some() || !yanked
}).map(|s| s.0.clone());
let summaries = summaries
.iter()
.filter(|&&(_, yanked)| dep.source_id().precise().is_some() || !yanked)
.map(|s| s.0.clone());
// Handle `cargo update --precise` here. If specified, our own source
// will have a precise version listed of the form `<pkg>=<req>` where
// `<pkg>` is the name of a crate on this source and `<req>` is the
// version requested (argument to `--precise`).
let summaries = summaries.filter(|s| {
match source_id.precise() {
Some(p) if p.starts_with(&*dep.name()) &&
p[dep.name().len()..].starts_with('=') => {
let vers = &p[dep.name().len() + 1..];
s.version().to_string() == vers
}
_ => true,
let summaries = summaries.filter(|s| match source_id.precise() {
Some(p) if p.starts_with(&*dep.name()) && p[dep.name().len()..].starts_with('=') => {
let vers = &p[dep.name().len() + 1..];
s.version().to_string() == vers
}
_ => true,
});
for summary in summaries {

View File

@ -4,10 +4,10 @@ use std::path::Path;
use core::PackageId;
use hex;
use sources::registry::{RegistryData, RegistryConfig};
use sources::registry::{RegistryConfig, RegistryData};
use util::FileLock;
use util::paths;
use util::{Config, Sha256, Filesystem};
use util::{Config, Filesystem, Sha256};
use util::errors::{CargoResult, CargoResultExt};
pub struct LocalRegistry<'cfg> {
@ -18,9 +18,7 @@ pub struct LocalRegistry<'cfg> {
}
impl<'cfg> LocalRegistry<'cfg> {
pub fn new(root: &Path,
config: &'cfg Config,
name: &str) -> LocalRegistry<'cfg> {
pub fn new(root: &Path, config: &'cfg Config, name: &str) -> LocalRegistry<'cfg> {
LocalRegistry {
src_path: config.registry_source_path().join(name),
index_path: Filesystem::new(root.join("index")),
@ -35,10 +33,12 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
&self.index_path
}
fn load(&self,
root: &Path,
path: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()> {
fn load(
&self,
root: &Path,
path: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>,
) -> CargoResult<()> {
data(&paths::read_bytes(&root.join(path))?)
}
@ -54,29 +54,27 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
// these directories exist.
let root = self.root.clone().into_path_unlocked();
if !root.is_dir() {
bail!("local registry path is not a directory: {}",
root.display())
bail!("local registry path is not a directory: {}", root.display())
}
let index_path = self.index_path.clone().into_path_unlocked();
if !index_path.is_dir() {
bail!("local registry index path is not a directory: {}",
index_path.display())
bail!(
"local registry index path is not a directory: {}",
index_path.display()
)
}
Ok(())
}
fn download(&mut self, pkg: &PackageId, checksum: &str)
-> CargoResult<FileLock> {
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock> {
let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version());
let mut crate_file = self.root.open_ro(&crate_file,
self.config,
"crate file")?;
let mut crate_file = self.root.open_ro(&crate_file, self.config, "crate file")?;
// If we've already got an unpacked version of this crate, then skip the
// checksum below as it is in theory already verified.
let dst = format!("{}-{}", pkg.name(), pkg.version());
if self.src_path.join(dst).into_path_unlocked().exists() {
return Ok(crate_file)
return Ok(crate_file);
}
self.config.shell().status("Unpacking", pkg)?;
@ -86,11 +84,11 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
let mut state = Sha256::new();
let mut buf = [0; 64 * 1024];
loop {
let n = crate_file.read(&mut buf).chain_err(|| {
format!("failed to read `{}`", crate_file.path().display())
})?;
let n = crate_file
.read(&mut buf)
.chain_err(|| format!("failed to read `{}`", crate_file.path().display()))?;
if n == 0 {
break
break;
}
state.update(&buf[..n]);
}

View File

@ -162,17 +162,17 @@ use std::borrow::Cow;
use std::collections::BTreeMap;
use std::fmt;
use std::fs::File;
use std::path::{PathBuf, Path};
use std::path::{Path, PathBuf};
use flate2::read::GzDecoder;
use semver::Version;
use serde::de;
use tar::Archive;
use core::{Source, SourceId, PackageId, Package, Summary, Registry};
use core::{Package, PackageId, Registry, Source, SourceId, Summary};
use core::dependency::{Dependency, Kind};
use sources::PathSource;
use util::{CargoResult, Config, internal, FileLock, Filesystem};
use util::{internal, CargoResult, Config, FileLock, Filesystem};
use util::errors::CargoResultExt;
use util::hex;
use util::to_url::ToUrl;
@ -220,8 +220,7 @@ struct RegistryPackage<'a> {
features: BTreeMap<String, Vec<String>>,
cksum: String,
yanked: Option<bool>,
#[serde(default)]
links: Option<String>,
#[serde(default)] links: Option<String>,
}
struct DependencyList {
@ -242,17 +241,19 @@ struct RegistryDependency<'a> {
pub trait RegistryData {
fn index_path(&self) -> &Filesystem;
fn load(&self,
_root: &Path,
path: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()>;
fn load(
&self,
_root: &Path,
path: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>,
) -> CargoResult<()>;
fn config(&mut self) -> CargoResult<Option<RegistryConfig>>;
fn update_index(&mut self) -> CargoResult<()>;
fn download(&mut self,
pkg: &PackageId,
checksum: &str) -> CargoResult<FileLock>;
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock>;
fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool { true }
fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool {
true
}
}
mod index;
@ -266,35 +267,31 @@ fn short_name(id: &SourceId) -> String {
}
impl<'cfg> RegistrySource<'cfg> {
pub fn remote(source_id: &SourceId,
config: &'cfg Config) -> RegistrySource<'cfg> {
pub fn remote(source_id: &SourceId, config: &'cfg Config) -> RegistrySource<'cfg> {
let name = short_name(source_id);
let ops = remote::RemoteRegistry::new(source_id, config, &name);
RegistrySource::new(source_id, config, &name, Box::new(ops), true)
}
pub fn local(source_id: &SourceId,
path: &Path,
config: &'cfg Config) -> RegistrySource<'cfg> {
pub fn local(source_id: &SourceId, path: &Path, config: &'cfg Config) -> RegistrySource<'cfg> {
let name = short_name(source_id);
let ops = local::LocalRegistry::new(path, config, &name);
RegistrySource::new(source_id, config, &name, Box::new(ops), false)
}
fn new(source_id: &SourceId,
config: &'cfg Config,
name: &str,
ops: Box<RegistryData + 'cfg>,
index_locked: bool) -> RegistrySource<'cfg> {
fn new(
source_id: &SourceId,
config: &'cfg Config,
name: &str,
ops: Box<RegistryData + 'cfg>,
index_locked: bool,
) -> RegistrySource<'cfg> {
RegistrySource {
src_path: config.registry_source_path().join(name),
config,
source_id: source_id.clone(),
updated: false,
index: index::RegistryIndex::new(source_id,
ops.index_path(),
config,
index_locked),
index: index::RegistryIndex::new(source_id, ops.index_path(), config, index_locked),
index_locked,
ops,
}
@ -311,12 +308,9 @@ impl<'cfg> RegistrySource<'cfg> {
/// compiled.
///
/// No action is taken if the source looks like it's already unpacked.
fn unpack_package(&self,
pkg: &PackageId,
tarball: &FileLock)
-> CargoResult<PathBuf> {
let dst = self.src_path.join(&format!("{}-{}", pkg.name(),
pkg.version()));
fn unpack_package(&self, pkg: &PackageId, tarball: &FileLock) -> CargoResult<PathBuf> {
let dst = self.src_path
.join(&format!("{}-{}", pkg.name(), pkg.version()));
dst.create_dir()?;
// Note that we've already got the `tarball` locked above, and that
// implies a lock on the unpacked destination as well, so this access
@ -324,7 +318,7 @@ impl<'cfg> RegistrySource<'cfg> {
let dst = dst.into_path_unlocked();
let ok = dst.join(".cargo-ok");
if ok.exists() {
return Ok(dst)
return Ok(dst);
}
let gz = GzDecoder::new(tarball.file());
@ -333,7 +327,8 @@ impl<'cfg> RegistrySource<'cfg> {
let parent = dst.parent().unwrap();
for entry in tar.entries()? {
let mut entry = entry.chain_err(|| "failed to iterate over archive")?;
let entry_path = entry.path()
let entry_path = entry
.path()
.chain_err(|| "failed to read entry path")?
.into_owned();
@ -344,15 +339,18 @@ impl<'cfg> RegistrySource<'cfg> {
// crates.io should also block uploads with these sorts of tarballs,
// but be extra sure by adding a check here as well.
if !entry_path.starts_with(prefix) {
bail!("invalid tarball downloaded, contains \
a file at {:?} which isn't under {:?}",
entry_path, prefix)
bail!(
"invalid tarball downloaded, contains \
a file at {:?} which isn't under {:?}",
entry_path,
prefix
)
}
// Once that's verified, unpack the entry as usual.
entry.unpack_in(parent).chain_err(|| {
format!("failed to unpack entry at `{}`", entry_path.display())
})?;
entry
.unpack_in(parent)
.chain_err(|| format!("failed to unpack entry at `{}`", entry_path.display()))?;
}
File::create(&ok)?;
Ok(dst.clone())
@ -361,18 +359,14 @@ impl<'cfg> RegistrySource<'cfg> {
fn do_update(&mut self) -> CargoResult<()> {
self.ops.update_index()?;
let path = self.ops.index_path();
self.index = index::RegistryIndex::new(&self.source_id,
path,
self.config,
self.index_locked);
self.index =
index::RegistryIndex::new(&self.source_id, path, self.config, self.index_locked);
Ok(())
}
}
impl<'cfg> Registry for RegistrySource<'cfg> {
fn query(&mut self,
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
// If this is a precise dependency, then it came from a lockfile and in
// theory the registry is known to contain this version. If, however, we
// come back with no summaries, then our registry may need to be
@ -384,7 +378,7 @@ impl<'cfg> Registry for RegistrySource<'cfg> {
f(s);
})?;
if called {
return Ok(())
return Ok(());
} else {
self.do_update()?;
}
@ -424,9 +418,8 @@ impl<'cfg> Source for RegistrySource<'cfg> {
fn download(&mut self, package: &PackageId) -> CargoResult<Package> {
let hash = self.index.hash(package, &mut *self.ops)?;
let path = self.ops.download(package, &hash)?;
let path = self.unpack_package(package, &path).chain_err(|| {
internal(format!("failed to unpack package `{}`", package))
})?;
let path = self.unpack_package(package, &path)
.chain_err(|| internal(format!("failed to unpack package `{}`", package)))?;
let mut src = PathSource::new(&path, &self.source_id, self.config);
src.update()?;
let pkg = src.download(package)?;
@ -436,9 +429,11 @@ impl<'cfg> Source for RegistrySource<'cfg> {
// *summary* loaded from the Cargo.toml we just downloaded with the one
// we loaded from the index.
let summaries = self.index.summaries(&*package.name(), &mut *self.ops)?;
let summary = summaries.iter().map(|s| &s.0).find(|s| {
s.package_id() == package
}).expect("summary not found");
let summary = summaries
.iter()
.map(|s| &s.0)
.find(|s| s.package_id() == package)
.expect("summary not found");
let mut manifest = pkg.manifest().clone();
manifest.set_summary(summary.clone());
Ok(Package::new(manifest, pkg.manifest_path()))
@ -463,7 +458,8 @@ scoped_thread_local!(static DEFAULT_ID: SourceId);
impl<'de> de::Deserialize<'de> for DependencyList {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: de::Deserializer<'de>,
where
D: de::Deserializer<'de>,
{
return deserializer.deserialize_seq(Visitor);
@ -477,16 +473,15 @@ impl<'de> de::Deserialize<'de> for DependencyList {
}
fn visit_seq<A>(self, mut seq: A) -> Result<DependencyList, A::Error>
where A: de::SeqAccess<'de>,
where
A: de::SeqAccess<'de>,
{
let mut ret = Vec::new();
if let Some(size) = seq.size_hint() {
ret.reserve(size);
}
while let Some(element) = seq.next_element::<RegistryDependency>()? {
ret.push(parse_registry_dependency(element).map_err(|e| {
de::Error::custom(e)
})?);
ret.push(parse_registry_dependency(element).map_err(|e| de::Error::custom(e))?);
}
Ok(DependencyList { inner: ret })
@ -496,18 +491,22 @@ impl<'de> de::Deserialize<'de> for DependencyList {
}
/// Converts an encoded dependency in the registry to a cargo dependency
fn parse_registry_dependency(dep: RegistryDependency)
-> CargoResult<Dependency> {
fn parse_registry_dependency(dep: RegistryDependency) -> CargoResult<Dependency> {
let RegistryDependency {
name, req, mut features, optional, default_features, target, kind, registry
name,
req,
mut features,
optional,
default_features,
target,
kind,
registry,
} = dep;
let id = if let Some(registry) = registry {
SourceId::for_registry(&registry.to_url()?)?
} else {
DEFAULT_ID.with(|id| {
id.clone()
})
DEFAULT_ID.with(|id| id.clone())
};
let mut dep = Dependency::parse_no_deprecated(&name, Some(&req), &id)?;
@ -530,10 +529,10 @@ fn parse_registry_dependency(dep: RegistryDependency)
features.retain(|s| !s.is_empty());
dep.set_optional(optional)
.set_default_features(default_features)
.set_features(features)
.set_platform(platform)
.set_kind(kind);
.set_default_features(default_features)
.set_features(features)
.set_platform(platform)
.set_kind(kind);
Ok(dep)
}

View File

@ -1,4 +1,4 @@
use std::cell::{RefCell, Ref, Cell};
use std::cell::{Cell, Ref, RefCell};
use std::fmt::Write as FmtWrite;
use std::io::SeekFrom;
use std::io::prelude::*;
@ -13,10 +13,10 @@ use lazycell::LazyCell;
use core::{PackageId, SourceId};
use sources::git;
use sources::registry::{RegistryData, RegistryConfig, INDEX_LOCK, CRATE_TEMPLATE, VERSION_TEMPLATE};
use sources::registry::{RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE};
use util::network;
use util::{FileLock, Filesystem};
use util::{Config, Sha256, ToUrl, Progress};
use util::{Config, Progress, Sha256, ToUrl};
use util::errors::{CargoResult, CargoResultExt, HttpNot200};
pub struct RemoteRegistry<'cfg> {
@ -30,8 +30,7 @@ pub struct RemoteRegistry<'cfg> {
}
impl<'cfg> RemoteRegistry<'cfg> {
pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str)
-> RemoteRegistry<'cfg> {
pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> {
RemoteRegistry {
index_path: config.registry_index_path().join(name),
cache_path: config.registry_cache_path().join(name),
@ -49,13 +48,13 @@ impl<'cfg> RemoteRegistry<'cfg> {
// Fast path without a lock
if let Ok(repo) = git2::Repository::open(&path) {
return Ok(repo)
return Ok(repo);
}
// Ok, now we need to lock and try the whole thing over again.
let lock = self.index_path.open_rw(Path::new(INDEX_LOCK),
self.config,
"the registry index")?;
let lock =
self.index_path
.open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?;
match git2::Repository::open(&path) {
Ok(repo) => Ok(repo),
Err(_) => {
@ -90,7 +89,7 @@ impl<'cfg> RemoteRegistry<'cfg> {
{
let tree = self.tree.borrow();
if tree.is_some() {
return Ok(Ref::map(tree, |s| s.as_ref().unwrap()))
return Ok(Ref::map(tree, |s| s.as_ref().unwrap()));
}
}
let repo = self.repo()?;
@ -109,9 +108,7 @@ impl<'cfg> RemoteRegistry<'cfg> {
// (`RemoteRegistry`) so we then just need to ensure that the tree is
// destroyed first in the destructor, hence the destructor on
// `RemoteRegistry` below.
let tree = unsafe {
mem::transmute::<git2::Tree, git2::Tree<'static>>(tree)
};
let tree = unsafe { mem::transmute::<git2::Tree, git2::Tree<'static>>(tree) };
*self.tree.borrow_mut() = Some(tree);
Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap()))
}
@ -122,10 +119,12 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
&self.index_path
}
fn load(&self,
_root: &Path,
path: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()> {
fn load(
&self,
_root: &Path,
path: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>,
) -> CargoResult<()> {
// Note that the index calls this method and the filesystem is locked
// in the index, so we don't need to worry about an `update_index`
// happening in a different process.
@ -142,9 +141,9 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
fn config(&mut self) -> CargoResult<Option<RegistryConfig>> {
self.repo()?; // create intermediate dirs and initialize the repo
let _lock = self.index_path.open_ro(Path::new(INDEX_LOCK),
self.config,
"the registry index")?;
let _lock =
self.index_path
.open_ro(Path::new(INDEX_LOCK), self.config, "the registry index")?;
let mut config = None;
self.load(Path::new(""), Path::new("config.json"), &mut |json| {
config = Some(serde_json::from_slice(json)?);
@ -172,23 +171,23 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
self.repo()?;
self.head.set(None);
*self.tree.borrow_mut() = None;
let _lock = self.index_path.open_rw(Path::new(INDEX_LOCK),
self.config,
"the registry index")?;
self.config.shell().status("Updating", self.source_id.display_registry())?;
let _lock =
self.index_path
.open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?;
self.config
.shell()
.status("Updating", self.source_id.display_registry())?;
// git fetch origin master
let url = self.source_id.url();
let refspec = "refs/heads/master:refs/remotes/origin/master";
let repo = self.repo.borrow_mut().unwrap();
git::fetch(repo, url, refspec, self.config).chain_err(|| {
format!("failed to fetch `{}`", url)
})?;
git::fetch(repo, url, refspec, self.config)
.chain_err(|| format!("failed to fetch `{}`", url))?;
Ok(())
}
fn download(&mut self, pkg: &PackageId, checksum: &str)
-> CargoResult<FileLock> {
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock> {
let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
let path = Path::new(&filename);
@ -201,13 +200,13 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) {
let meta = dst.file().metadata()?;
if meta.len() > 0 {
return Ok(dst)
return Ok(dst);
}
}
let mut dst = self.cache_path.open_rw(path, self.config, &filename)?;
let meta = dst.file().metadata()?;
if meta.len() > 0 {
return Ok(dst)
return Ok(dst);
}
self.config.shell().status("Downloading", pkg)?;
@ -216,8 +215,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) {
write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap();
}
let url = url
.replace(CRATE_TEMPLATE, &*pkg.name())
let url = url.replace(CRATE_TEMPLATE, &*pkg.name())
.replace(VERSION_TEMPLATE, &pkg.version().to_string())
.to_url()?;
@ -251,7 +249,10 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
let code = handle.response_code()?;
if code != 200 && code != 0 {
let url = handle.effective_url()?.unwrap_or(&url);
Err(HttpNot200 { code, url: url.to_string() }.into())
Err(HttpNot200 {
code,
url: url.to_string(),
}.into())
} else {
Ok(())
}
@ -267,19 +268,17 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
Ok(dst)
}
fn is_crate_downloaded(&self, pkg: &PackageId) -> bool {
let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
let path = Path::new(&filename);
if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) {
if let Ok(meta) = dst.file().metadata(){
if let Ok(meta) = dst.file().metadata() {
return meta.len() > 0;
}
}
false
}
}
impl<'cfg> Drop for RemoteRegistry<'cfg> {

View File

@ -1,4 +1,4 @@
use core::{Source, Registry, PackageId, Package, Dependency, Summary, SourceId};
use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary};
use util::errors::{CargoResult, CargoResultExt};
pub struct ReplacedSource<'cfg> {
@ -8,9 +8,11 @@ pub struct ReplacedSource<'cfg> {
}
impl<'cfg> ReplacedSource<'cfg> {
pub fn new(to_replace: &SourceId,
replace_with: &SourceId,
src: Box<Source + 'cfg>) -> ReplacedSource<'cfg> {
pub fn new(
to_replace: &SourceId,
replace_with: &SourceId,
src: Box<Source + 'cfg>,
) -> ReplacedSource<'cfg> {
ReplacedSource {
to_replace: to_replace.clone(),
replace_with: replace_with.clone(),
@ -20,18 +22,15 @@ impl<'cfg> ReplacedSource<'cfg> {
}
impl<'cfg> Registry for ReplacedSource<'cfg> {
fn query(&mut self,
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
let (replace_with, to_replace) = (&self.replace_with, &self.to_replace);
let dep = dep.clone().map_source(to_replace, replace_with);
self.inner.query(&dep, &mut |summary| {
f(summary.map_source(replace_with, to_replace))
}).chain_err(|| {
format!("failed to query replaced source {}",
self.to_replace)
})?;
self.inner
.query(&dep, &mut |summary| {
f(summary.map_source(replace_with, to_replace))
})
.chain_err(|| format!("failed to query replaced source {}", self.to_replace))?;
Ok(())
}
@ -50,19 +49,17 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
}
fn update(&mut self) -> CargoResult<()> {
self.inner.update().chain_err(|| {
format!("failed to update replaced source {}",
self.to_replace)
})?;
self.inner
.update()
.chain_err(|| format!("failed to update replaced source {}", self.to_replace))?;
Ok(())
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
let id = id.with_source_id(&self.replace_with);
let pkg = self.inner.download(&id).chain_err(|| {
format!("failed to download replaced source {}",
self.to_replace)
})?;
let pkg = self.inner
.download(&id)
.chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
Ok(pkg.map_source(&self.replace_with, &self.to_replace))
}

View File

@ -77,8 +77,10 @@ impl FromStr for CfgExpr {
let mut p = Parser::new(s);
let e = p.expr()?;
if p.t.next().is_some() {
bail!("can only have one cfg-expression, consider using all() or \
any() explicitly")
bail!(
"can only have one cfg-expression, consider using all() or \
any() explicitly"
)
}
Ok(e)
}
@ -121,8 +123,7 @@ impl<'a> Parser<'a> {
fn expr(&mut self) -> CargoResult<CfgExpr> {
match self.t.peek() {
Some(&Ok(Token::Ident(op @ "all"))) |
Some(&Ok(Token::Ident(op @ "any"))) => {
Some(&Ok(Token::Ident(op @ "all"))) | Some(&Ok(Token::Ident(op @ "any"))) => {
self.t.next();
let mut e = Vec::new();
self.eat(Token::LeftParen)?;
@ -130,7 +131,7 @@ impl<'a> Parser<'a> {
e.push(self.expr()?);
if !self.try(Token::Comma) {
self.eat(Token::RightParen)?;
break
break;
}
}
if op == "all" {
@ -147,11 +148,11 @@ impl<'a> Parser<'a> {
Ok(CfgExpr::Not(Box::new(e)))
}
Some(&Ok(..)) => self.cfg().map(CfgExpr::Value),
Some(&Err(..)) => {
Err(self.t.next().unwrap().err().unwrap())
}
None => bail!("expected start of a cfg expression, \
found nothing"),
Some(&Err(..)) => Err(self.t.next().unwrap().err().unwrap()),
None => bail!(
"expected start of a cfg expression, \
found nothing"
),
}
}
@ -161,8 +162,7 @@ impl<'a> Parser<'a> {
let e = if self.try(Token::Equals) {
let val = match self.t.next() {
Some(Ok(Token::String(s))) => s,
Some(Ok(t)) => bail!("expected a string, found {}",
t.classify()),
Some(Ok(t)) => bail!("expected a string, found {}", t.classify()),
Some(Err(e)) => return Err(e),
None => bail!("expected a string, found nothing"),
};
@ -190,8 +190,7 @@ impl<'a> Parser<'a> {
fn eat(&mut self, token: Token<'a>) -> CargoResult<()> {
match self.t.next() {
Some(Ok(ref t)) if token == *t => Ok(()),
Some(Ok(t)) => bail!("expected {}, found {}", token.classify(),
t.classify()),
Some(Ok(t)) => bail!("expected {}, found {}", token.classify(), t.classify()),
Some(Err(e)) => Err(e),
None => bail!("expected {}, but cfg expr ended", token.classify()),
}
@ -212,28 +211,31 @@ impl<'a> Iterator for Tokenizer<'a> {
Some((start, '"')) => {
while let Some((end, ch)) = self.s.next() {
if ch == '"' {
return Some(Ok(Token::String(&self.orig[start+1..end])))
return Some(Ok(Token::String(&self.orig[start + 1..end])));
}
}
return Some(Err(format_err!("unterminated string in cfg")))
return Some(Err(format_err!("unterminated string in cfg")));
}
Some((start, ch)) if is_ident_start(ch) => {
while let Some(&(end, ch)) = self.s.peek() {
if !is_ident_rest(ch) {
return Some(Ok(Token::Ident(&self.orig[start..end])))
return Some(Ok(Token::Ident(&self.orig[start..end])));
} else {
self.s.next();
}
}
return Some(Ok(Token::Ident(&self.orig[start..])))
return Some(Ok(Token::Ident(&self.orig[start..])));
}
Some((_, ch)) => {
return Some(Err(format_err!("unexpected character in \
cfg `{}`, expected parens, \
a comma, an identifier, or \
a string", ch)))
return Some(Err(format_err!(
"unexpected character in \
cfg `{}`, expected parens, \
a comma, an identifier, or \
a string",
ch
)))
}
None => return None
None => return None,
}
}
}

View File

@ -19,12 +19,12 @@ use toml;
use lazycell::LazyCell;
use core::shell::Verbosity;
use core::{Shell, CliUnstable, SourceId};
use core::{CliUnstable, Shell, SourceId};
use ops;
use url::Url;
use util::ToUrl;
use util::Rustc;
use util::errors::{CargoResult, CargoResultExt, CargoError, internal};
use util::errors::{internal, CargoError, CargoResult, CargoResultExt};
use util::paths;
use util::toml as cargo_toml;
use util::Filesystem;
@ -68,9 +68,7 @@ pub struct Config {
}
impl Config {
pub fn new(shell: Shell,
cwd: PathBuf,
homedir: PathBuf) -> Config {
pub fn new(shell: Shell, cwd: PathBuf, homedir: PathBuf) -> Config {
static mut GLOBAL_JOBSERVER: *mut jobserver::Client = 0 as *mut _;
static INIT: Once = ONCE_INIT;
@ -108,18 +106,21 @@ impl Config {
pub fn default() -> CargoResult<Config> {
let shell = Shell::new();
let cwd = env::current_dir().chain_err(|| {
"couldn't get the current directory of the process"
})?;
let cwd =
env::current_dir().chain_err(|| "couldn't get the current directory of the process")?;
let homedir = homedir(&cwd).ok_or_else(|| {
format_err!("Cargo couldn't find your home directory. \
This probably means that $HOME was not set.")
format_err!(
"Cargo couldn't find your home directory. \
This probably means that $HOME was not set."
)
})?;
Ok(Config::new(shell, cwd, homedir))
}
/// The user's cargo home directory (OS-dependent)
pub fn home(&self) -> &Filesystem { &self.home_path }
pub fn home(&self) -> &Filesystem {
&self.home_path
}
/// The cargo git directory (`<cargo_home>/git`)
pub fn git_path(&self) -> Filesystem {
@ -148,66 +149,74 @@ impl Config {
/// Get the path to the `rustdoc` executable
pub fn rustdoc(&self) -> CargoResult<&Path> {
self.rustdoc.try_borrow_with(|| self.get_tool("rustdoc")).map(AsRef::as_ref)
self.rustdoc
.try_borrow_with(|| self.get_tool("rustdoc"))
.map(AsRef::as_ref)
}
/// Get the path to the `rustc` executable
pub fn rustc(&self) -> CargoResult<&Rustc> {
self.rustc.try_borrow_with(|| Rustc::new(self.get_tool("rustc")?,
self.maybe_get_tool("rustc_wrapper")?))
self.rustc.try_borrow_with(|| {
Rustc::new(
self.get_tool("rustc")?,
self.maybe_get_tool("rustc_wrapper")?,
)
})
}
/// Get the path to the `cargo` executable
pub fn cargo_exe(&self) -> CargoResult<&Path> {
self.cargo_exe.try_borrow_with(|| {
fn from_current_exe() -> CargoResult<PathBuf> {
// Try fetching the path to `cargo` using env::current_exe().
// The method varies per operating system and might fail; in particular,
// it depends on /proc being mounted on Linux, and some environments
// (like containers or chroots) may not have that available.
let exe = env::current_exe()?.canonicalize()?;
Ok(exe)
}
fn from_argv() -> CargoResult<PathBuf> {
// Grab argv[0] and attempt to resolve it to an absolute path.
// If argv[0] has one component, it must have come from a PATH lookup,
// so probe PATH in that case.
// Otherwise, it has multiple components and is either:
// - a relative path (e.g. `./cargo`, `target/debug/cargo`), or
// - an absolute path (e.g. `/usr/local/bin/cargo`).
// In either case, Path::canonicalize will return the full absolute path
// to the target if it exists
let argv0 = env::args_os()
.map(PathBuf::from)
.next()
.ok_or(format_err!("no argv[0]"))?;
if argv0.components().count() == 1 {
probe_path(argv0)
} else {
Ok(argv0.canonicalize()?)
self.cargo_exe
.try_borrow_with(|| {
fn from_current_exe() -> CargoResult<PathBuf> {
// Try fetching the path to `cargo` using env::current_exe().
// The method varies per operating system and might fail; in particular,
// it depends on /proc being mounted on Linux, and some environments
// (like containers or chroots) may not have that available.
let exe = env::current_exe()?.canonicalize()?;
Ok(exe)
}
}
fn probe_path(argv0: PathBuf) -> CargoResult<PathBuf> {
let paths = env::var_os("PATH").ok_or(format_err!("no PATH"))?;
for path in env::split_paths(&paths) {
let candidate = PathBuf::from(path).join(&argv0);
if candidate.is_file() {
// PATH may have a component like "." in it, so we still need to
// canonicalize.
return Ok(candidate.canonicalize()?)
fn from_argv() -> CargoResult<PathBuf> {
// Grab argv[0] and attempt to resolve it to an absolute path.
// If argv[0] has one component, it must have come from a PATH lookup,
// so probe PATH in that case.
// Otherwise, it has multiple components and is either:
// - a relative path (e.g. `./cargo`, `target/debug/cargo`), or
// - an absolute path (e.g. `/usr/local/bin/cargo`).
// In either case, Path::canonicalize will return the full absolute path
// to the target if it exists
let argv0 = env::args_os()
.map(PathBuf::from)
.next()
.ok_or(format_err!("no argv[0]"))?;
if argv0.components().count() == 1 {
probe_path(argv0)
} else {
Ok(argv0.canonicalize()?)
}
}
bail!("no cargo executable candidate found in PATH")
}
fn probe_path(argv0: PathBuf) -> CargoResult<PathBuf> {
let paths = env::var_os("PATH").ok_or(format_err!("no PATH"))?;
for path in env::split_paths(&paths) {
let candidate = PathBuf::from(path).join(&argv0);
if candidate.is_file() {
// PATH may have a component like "." in it, so we still need to
// canonicalize.
return Ok(candidate.canonicalize()?);
}
}
let exe = from_current_exe()
.or_else(|_| from_argv())
.chain_err(|| "couldn't get the path to cargo executable")?;
Ok(exe)
}).map(AsRef::as_ref)
bail!("no cargo executable candidate found in PATH")
}
let exe = from_current_exe()
.or_else(|_| from_argv())
.chain_err(|| "couldn't get the path to cargo executable")?;
Ok(exe)
})
.map(AsRef::as_ref)
}
pub fn values(&self) -> CargoResult<&HashMap<String, ConfigValue>> {
@ -224,7 +233,9 @@ impl Config {
}
}
pub fn cwd(&self) -> &Path { &self.cwd }
pub fn cwd(&self) -> &Path {
&self.cwd
}
pub fn target_dir(&self) -> CargoResult<Option<Filesystem>> {
if let Some(dir) = env::var_os("CARGO_TARGET_DIR") {
@ -252,16 +263,19 @@ impl Config {
None => return Ok(None),
}
}
CV::Integer(_, ref path) |
CV::String(_, ref path) |
CV::List(_, ref path) |
CV::Boolean(_, ref path) => {
let idx = key.split('.').take(i)
.fold(0, |n, s| n + s.len()) + i - 1;
CV::Integer(_, ref path)
| CV::String(_, ref path)
| CV::List(_, ref path)
| CV::Boolean(_, ref path) => {
let idx = key.split('.').take(i).fold(0, |n, s| n + s.len()) + i - 1;
let key_so_far = &key[..idx];
bail!("expected table for configuration key `{}`, \
but found {} in {}",
key_so_far, val.desc(), path.display())
bail!(
"expected table for configuration key `{}`, \
but found {} in {}",
key_so_far,
val.desc(),
path.display()
)
}
}
}
@ -269,35 +283,32 @@ impl Config {
}
fn get_env<V: FromStr>(&self, key: &str) -> CargoResult<Option<Value<V>>>
where CargoError: From<V::Err>
where
CargoError: From<V::Err>,
{
let key = key.replace(".", "_")
.replace("-", "_")
.chars()
.flat_map(|c| c.to_uppercase())
.collect::<String>();
.replace("-", "_")
.chars()
.flat_map(|c| c.to_uppercase())
.collect::<String>();
match env::var(&format!("CARGO_{}", key)) {
Ok(value) => {
Ok(Some(Value {
val: value.parse()?,
definition: Definition::Environment,
}))
}
Ok(value) => Ok(Some(Value {
val: value.parse()?,
definition: Definition::Environment,
})),
Err(..) => Ok(None),
}
}
pub fn get_string(&self, key: &str) -> CargoResult<Option<Value<String>>> {
if let Some(v) = self.get_env(key)? {
return Ok(Some(v))
return Ok(Some(v));
}
match self.get(key)? {
Some(CV::String(i, path)) => {
Ok(Some(Value {
val: i,
definition: Definition::Path(path),
}))
}
Some(CV::String(i, path)) => Ok(Some(Value {
val: i,
definition: Definition::Path(path),
})),
Some(val) => self.expected("string", key, val),
None => Ok(None),
}
@ -305,23 +316,20 @@ impl Config {
pub fn get_bool(&self, key: &str) -> CargoResult<Option<Value<bool>>> {
if let Some(v) = self.get_env(key)? {
return Ok(Some(v))
return Ok(Some(v));
}
match self.get(key)? {
Some(CV::Boolean(b, path)) => {
Ok(Some(Value {
val: b,
definition: Definition::Path(path),
}))
}
Some(CV::Boolean(b, path)) => Ok(Some(Value {
val: b,
definition: Definition::Path(path),
})),
Some(val) => self.expected("bool", key, val),
None => Ok(None),
}
}
fn string_to_path(&self, value: String, definition: &Definition) -> PathBuf {
let is_path = value.contains('/') ||
(cfg!(windows) && value.contains('\\'));
let is_path = value.contains('/') || (cfg!(windows) && value.contains('\\'));
if is_path {
definition.root(self).join(value)
} else {
@ -334,79 +342,74 @@ impl Config {
if let Some(val) = self.get_string(key)? {
Ok(Some(Value {
val: self.string_to_path(val.val, &val.definition),
definition: val.definition
definition: val.definition,
}))
} else {
Ok(None)
}
}
pub fn get_path_and_args(&self, key: &str)
-> CargoResult<Option<Value<(PathBuf, Vec<String>)>>> {
pub fn get_path_and_args(
&self,
key: &str,
) -> CargoResult<Option<Value<(PathBuf, Vec<String>)>>> {
if let Some(mut val) = self.get_list_or_split_string(key)? {
if !val.val.is_empty() {
return Ok(Some(Value {
val: (self.string_to_path(val.val.remove(0), &val.definition), val.val),
definition: val.definition
val: (
self.string_to_path(val.val.remove(0), &val.definition),
val.val,
),
definition: val.definition,
}));
}
}
Ok(None)
}
pub fn get_list(&self, key: &str)
-> CargoResult<Option<Value<Vec<(String, PathBuf)>>>> {
pub fn get_list(&self, key: &str) -> CargoResult<Option<Value<Vec<(String, PathBuf)>>>> {
match self.get(key)? {
Some(CV::List(i, path)) => {
Ok(Some(Value {
val: i,
definition: Definition::Path(path),
}))
}
Some(CV::List(i, path)) => Ok(Some(Value {
val: i,
definition: Definition::Path(path),
})),
Some(val) => self.expected("list", key, val),
None => Ok(None),
}
}
pub fn get_list_or_split_string(&self, key: &str)
-> CargoResult<Option<Value<Vec<String>>>> {
pub fn get_list_or_split_string(&self, key: &str) -> CargoResult<Option<Value<Vec<String>>>> {
match self.get_env::<String>(key) {
Ok(Some(value)) =>
Ok(Some(value)) => {
return Ok(Some(Value {
val: value.val.split(' ').map(str::to_string).collect(),
definition: value.definition
})),
definition: value.definition,
}))
}
Err(err) => return Err(err),
Ok(None) => (),
}
match self.get(key)? {
Some(CV::List(i, path)) => {
Ok(Some(Value {
val: i.into_iter().map(|(s, _)| s).collect(),
definition: Definition::Path(path),
}))
}
Some(CV::String(i, path)) => {
Ok(Some(Value {
val: i.split(' ').map(str::to_string).collect(),
definition: Definition::Path(path),
}))
}
Some(CV::List(i, path)) => Ok(Some(Value {
val: i.into_iter().map(|(s, _)| s).collect(),
definition: Definition::Path(path),
})),
Some(CV::String(i, path)) => Ok(Some(Value {
val: i.split(' ').map(str::to_string).collect(),
definition: Definition::Path(path),
})),
Some(val) => self.expected("list or string", key, val),
None => Ok(None),
}
}
pub fn get_table(&self, key: &str)
-> CargoResult<Option<Value<HashMap<String, CV>>>> {
pub fn get_table(&self, key: &str) -> CargoResult<Option<Value<HashMap<String, CV>>>> {
match self.get(key)? {
Some(CV::Table(i, path)) => {
Ok(Some(Value {
val: i,
definition: Definition::Path(path),
}))
}
Some(CV::Table(i, path)) => Ok(Some(Value {
val: i,
definition: Definition::Path(path),
})),
Some(val) => self.expected("table", key, val),
None => Ok(None),
}
@ -414,15 +417,13 @@ impl Config {
pub fn get_i64(&self, key: &str) -> CargoResult<Option<Value<i64>>> {
if let Some(v) = self.get_env(key)? {
return Ok(Some(v))
return Ok(Some(v));
}
match self.get(key)? {
Some(CV::Integer(i, path)) => {
Ok(Some(Value {
val: i,
definition: Definition::Path(path),
}))
}
Some(CV::Integer(i, path)) => Ok(Some(Value {
val: i,
definition: Definition::Path(path),
})),
Some(val) => self.expected("integer", key, val),
None => Ok(None),
}
@ -433,8 +434,11 @@ impl Config {
Some(v) => {
let value = v.val;
if value < 0 {
bail!("net.retry must be positive, but found {} in {}",
v.val, v.definition)
bail!(
"net.retry must be positive, but found {} in {}",
v.val,
v.definition
)
} else {
Ok(value)
}
@ -444,20 +448,21 @@ impl Config {
}
pub fn expected<T>(&self, ty: &str, key: &str, val: CV) -> CargoResult<T> {
val.expected(ty, key).map_err(|e| {
format_err!("invalid configuration for key `{}`\n{}", key, e)
})
val.expected(ty, key)
.map_err(|e| format_err!("invalid configuration for key `{}`\n{}", key, e))
}
pub fn configure(&mut self,
verbose: u32,
quiet: Option<bool>,
color: &Option<String>,
frozen: bool,
locked: bool,
unstable_flags: &[String]) -> CargoResult<()> {
pub fn configure(
&mut self,
verbose: u32,
quiet: Option<bool>,
color: &Option<String>,
frozen: bool,
locked: bool,
unstable_flags: &[String],
) -> CargoResult<()> {
let extra_verbose = verbose >= 2;
let verbose = if verbose == 0 {None} else {Some(true)};
let verbose = if verbose == 0 { None } else { Some(true) };
// Ignore errors in the configuration files.
let cfg_verbose = self.get_bool("term.verbose").unwrap_or(None).map(|v| v.val);
@ -466,8 +471,7 @@ impl Config {
let color = color.as_ref().or_else(|| cfg_color.as_ref());
let verbosity = match (verbose, cfg_verbose, quiet) {
(Some(true), _, None) |
(None, Some(true), None) => Verbosity::Verbose,
(Some(true), _, None) | (None, Some(true), None) => Verbosity::Verbose,
// command line takes precedence over configuration, so ignore the
// configuration.
@ -482,11 +486,10 @@ impl Config {
// Can't actually get `Some(false)` as a value from the command
// line, so just ignore them here to appease exhaustiveness checking
// in match statements.
(Some(false), _, _) |
(_, _, Some(false)) |
(None, Some(false), None) |
(None, None, None) => Verbosity::Normal,
(Some(false), _, _)
| (_, _, Some(false))
| (None, Some(false), None)
| (None, None, None) => Verbosity::Normal,
};
self.shell().set_verbosity(verbosity);
@ -526,23 +529,19 @@ impl Config {
walk_tree(&self.cwd, |path| {
let mut contents = String::new();
let mut file = File::open(&path)?;
file.read_to_string(&mut contents).chain_err(|| {
format!("failed to read configuration file `{}`",
path.display())
})?;
let toml = cargo_toml::parse(&contents,
path,
self).chain_err(|| {
format!("could not parse TOML configuration in `{}`",
path.display())
file.read_to_string(&mut contents)
.chain_err(|| format!("failed to read configuration file `{}`", path.display()))?;
let toml = cargo_toml::parse(&contents, path, self).chain_err(|| {
format!("could not parse TOML configuration in `{}`", path.display())
})?;
let value = CV::from_toml(path, toml).chain_err(|| {
format!("failed to load TOML configuration from `{}`",
path.display())
})?;
cfg.merge(value).chain_err(|| {
format!("failed to merge configuration at `{}`", path.display())
format!(
"failed to load TOML configuration from `{}`",
path.display()
)
})?;
cfg.merge(value)
.chain_err(|| format!("failed to merge configuration at `{}`", path.display()))?;
Ok(())
}).chain_err(|| "Couldn't load Cargo configuration")?;
@ -555,16 +554,18 @@ impl Config {
/// Gets the index for a registry.
pub fn get_registry_index(&self, registry: &str) -> CargoResult<Url> {
Ok(match self.get_string(&format!("registries.{}.index", registry))? {
Some(index) => {
let url = index.val.to_url()?;
if url.username() != "" || url.password().is_some() {
bail!("Registry URLs may not contain credentials");
Ok(
match self.get_string(&format!("registries.{}.index", registry))? {
Some(index) => {
let url = index.val.to_url()?;
if url.username() != "" || url.password().is_some() {
bail!("Registry URLs may not contain credentials");
}
url
}
url
}
None => bail!("No index found for registry: `{}`", registry),
})
None => bail!("No index found for registry: `{}`", registry),
},
)
}
/// Loads credentials config from the credentials file into the ConfigValue object, if present.
@ -578,17 +579,24 @@ impl Config {
let mut contents = String::new();
let mut file = File::open(&credentials)?;
file.read_to_string(&mut contents).chain_err(|| {
format!("failed to read configuration file `{}`", credentials.display())
format!(
"failed to read configuration file `{}`",
credentials.display()
)
})?;
let toml = cargo_toml::parse(&contents,
&credentials,
self).chain_err(|| {
format!("could not parse TOML configuration in `{}`", credentials.display())
let toml = cargo_toml::parse(&contents, &credentials, self).chain_err(|| {
format!(
"could not parse TOML configuration in `{}`",
credentials.display()
)
})?;
let mut value = CV::from_toml(&credentials, toml).chain_err(|| {
format!("failed to load TOML configuration from `{}`", credentials.display())
format!(
"failed to load TOML configuration from `{}`",
credentials.display()
)
})?;
// backwards compatibility for old .cargo/credentials layout
@ -618,7 +626,9 @@ impl Config {
/// Look for a path for `tool` in an environment variable or config path, but return `None`
/// if it's not present.
fn maybe_get_tool(&self, tool: &str) -> CargoResult<Option<PathBuf>> {
let var = tool.chars().flat_map(|c| c.to_uppercase()).collect::<String>();
let var = tool.chars()
.flat_map(|c| c.to_uppercase())
.collect::<String>();
if let Some(tool_path) = env::var_os(&var) {
let maybe_relative = match tool_path.to_str() {
Some(s) => s.contains("/") || s.contains("\\"),
@ -629,7 +639,7 @@ impl Config {
} else {
PathBuf::from(tool_path)
};
return Ok(Some(path))
return Ok(Some(path));
}
let var = format!("build.{}", tool);
@ -652,9 +662,8 @@ impl Config {
}
pub fn http(&self) -> CargoResult<&RefCell<Easy>> {
let http = self.easy.try_borrow_with(|| {
ops::http_handle(self).map(RefCell::new)
})?;
let http = self.easy
.try_borrow_with(|| ops::http_handle(self).map(RefCell::new))?;
{
let mut http = http.borrow_mut();
http.reset();
@ -664,7 +673,8 @@ impl Config {
}
pub fn crates_io_source_id<F>(&self, f: F) -> CargoResult<SourceId>
where F: FnMut() -> CargoResult<SourceId>
where
F: FnMut() -> CargoResult<SourceId>,
{
Ok(self.crates_io_source_id.try_borrow_with(f)?.clone())
}
@ -673,10 +683,10 @@ impl Config {
#[derive(Eq, PartialEq, Clone, Copy)]
pub enum Location {
Project,
Global
Global,
}
#[derive(Eq,PartialEq,Clone,Deserialize)]
#[derive(Eq, PartialEq, Clone, Deserialize)]
pub enum ConfigValue {
Integer(i64, PathBuf),
String(String, PathBuf),
@ -698,16 +708,15 @@ pub enum Definition {
impl fmt::Debug for ConfigValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CV::Integer(i, ref path) => write!(f, "{} (from {})", i,
path.display()),
CV::Boolean(b, ref path) => write!(f, "{} (from {})", b,
path.display()),
CV::String(ref s, ref path) => write!(f, "{} (from {})", s,
path.display()),
CV::Integer(i, ref path) => write!(f, "{} (from {})", i, path.display()),
CV::Boolean(b, ref path) => write!(f, "{} (from {})", b, path.display()),
CV::String(ref s, ref path) => write!(f, "{} (from {})", s, path.display()),
CV::List(ref list, ref path) => {
write!(f, "[")?;
for (i, &(ref s, ref path)) in list.iter().enumerate() {
if i > 0 { write!(f, ", ")?; }
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{} (from {})", s, path.display())?;
}
write!(f, "] (from {})", path.display())
@ -738,25 +747,29 @@ impl ConfigValue {
toml::Value::String(val) => Ok(CV::String(val, path.to_path_buf())),
toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())),
toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())),
toml::Value::Array(val) => {
Ok(CV::List(val.into_iter().map(|toml| {
match toml {
toml::Value::Array(val) => Ok(CV::List(
val.into_iter()
.map(|toml| match toml {
toml::Value::String(val) => Ok((val, path.to_path_buf())),
v => bail!("expected string but found {} in list",
v.type_str()),
}
}).collect::<CargoResult<_>>()?, path.to_path_buf()))
}
toml::Value::Table(val) => {
Ok(CV::Table(val.into_iter().map(|(key, value)| {
let value = CV::from_toml(path, value).chain_err(|| {
format!("failed to parse key `{}`", key)
})?;
Ok((key, value))
}).collect::<CargoResult<_>>()?, path.to_path_buf()))
}
v => bail!("found TOML configuration value of unknown type `{}`",
v.type_str()),
v => bail!("expected string but found {} in list", v.type_str()),
})
.collect::<CargoResult<_>>()?,
path.to_path_buf(),
)),
toml::Value::Table(val) => Ok(CV::Table(
val.into_iter()
.map(|(key, value)| {
let value = CV::from_toml(path, value)
.chain_err(|| format!("failed to parse key `{}`", key))?;
Ok((key, value))
})
.collect::<CargoResult<_>>()?,
path.to_path_buf(),
)),
v => bail!(
"found TOML configuration value of unknown type `{}`",
v.type_str()
),
}
}
@ -765,21 +778,20 @@ impl ConfigValue {
CV::Boolean(s, _) => toml::Value::Boolean(s),
CV::String(s, _) => toml::Value::String(s),
CV::Integer(i, _) => toml::Value::Integer(i),
CV::List(l, _) => toml::Value::Array(l
.into_iter()
.map(|(s, _)| toml::Value::String(s))
.collect()),
CV::Table(l, _) => toml::Value::Table(l.into_iter()
.map(|(k, v)| (k, v.into_toml()))
.collect()),
CV::List(l, _) => {
toml::Value::Array(l.into_iter().map(|(s, _)| toml::Value::String(s)).collect())
}
CV::Table(l, _) => {
toml::Value::Table(l.into_iter().map(|(k, v)| (k, v.into_toml())).collect())
}
}
}
fn merge(&mut self, from: ConfigValue) -> CargoResult<()> {
match (self, from) {
(&mut CV::String(..), CV::String(..)) |
(&mut CV::Integer(..), CV::Integer(..)) |
(&mut CV::Boolean(..), CV::Boolean(..)) => {}
(&mut CV::String(..), CV::String(..))
| (&mut CV::Integer(..), CV::Integer(..))
| (&mut CV::Boolean(..), CV::Boolean(..)) => {}
(&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => {
let new = mem::replace(new, Vec::new());
old.extend(new.into_iter());
@ -792,23 +804,29 @@ impl ConfigValue {
let path = value.definition_path().to_path_buf();
let entry = entry.get_mut();
entry.merge(value).chain_err(|| {
format!("failed to merge key `{}` between \
files:\n \
file 1: {}\n \
file 2: {}",
key,
entry.definition_path().display(),
path.display())
format!(
"failed to merge key `{}` between \
files:\n \
file 1: {}\n \
file 2: {}",
key,
entry.definition_path().display(),
path.display()
)
})?;
}
Vacant(entry) => { entry.insert(value); }
Vacant(entry) => {
entry.insert(value);
}
};
}
}
(expected, found) => {
return Err(internal(format!("expected {}, but found {}",
expected.desc(), found.desc())))
return Err(internal(format!(
"expected {}, but found {}",
expected.desc(),
found.desc()
)))
}
}
@ -829,8 +847,7 @@ impl ConfigValue {
}
}
pub fn table(&self, key: &str)
-> CargoResult<(&HashMap<String, ConfigValue>, &Path)> {
pub fn table(&self, key: &str) -> CargoResult<(&HashMap<String, ConfigValue>, &Path)> {
match *self {
CV::Table(ref table, ref p) => Ok((table, p)),
_ => self.expected("table", key),
@ -862,19 +879,23 @@ impl ConfigValue {
}
pub fn definition_path(&self) -> &Path {
match *self {
CV::Boolean(_, ref p) |
CV::Integer(_, ref p) |
CV::String(_, ref p) |
CV::List(_, ref p) |
CV::Table(_, ref p) => p
match *self {
CV::Boolean(_, ref p)
| CV::Integer(_, ref p)
| CV::String(_, ref p)
| CV::List(_, ref p)
| CV::Table(_, ref p) => p,
}
}
pub fn expected<T>(&self, wanted: &str, key: &str) -> CargoResult<T> {
bail!("expected a {}, but found a {} for `{}` in {}",
wanted, self.desc(), key,
self.definition_path().display())
bail!(
"expected a {}, but found a {} for `{}` in {}",
wanted,
self.desc(),
key,
self.definition_path().display()
)
}
}
@ -901,7 +922,8 @@ pub fn homedir(cwd: &Path) -> Option<PathBuf> {
}
fn walk_tree<F>(pwd: &Path, mut walk: F) -> CargoResult<()>
where F: FnMut(&Path) -> CargoResult<()>
where
F: FnMut(&Path) -> CargoResult<()>,
{
let mut stash: HashSet<PathBuf> = HashSet::new();
@ -917,8 +939,10 @@ fn walk_tree<F>(pwd: &Path, mut walk: F) -> CargoResult<()>
// in our history to be sure we pick up that standard location for
// information.
let home = homedir(pwd).ok_or_else(|| {
format_err!("Cargo couldn't find your home directory. \
This probably means that $HOME was not set.")
format_err!(
"Cargo couldn't find your home directory. \
This probably means that $HOME was not set."
)
})?;
let config = home.join("config");
if !stash.contains(&config) && fs::metadata(&config).is_ok() {
@ -928,13 +952,11 @@ fn walk_tree<F>(pwd: &Path, mut walk: F) -> CargoResult<()>
Ok(())
}
pub fn save_credentials(cfg: &Config,
token: String,
registry: Option<String>) -> CargoResult<()> {
pub fn save_credentials(cfg: &Config, token: String, registry: Option<String>) -> CargoResult<()> {
let mut file = {
cfg.home_path.create_dir()?;
cfg.home_path.open_rw(Path::new("credentials"), cfg,
"credentials' config file")?
cfg.home_path
.open_rw(Path::new("credentials"), cfg, "credentials' config file")?
};
let (key, value) = {
@ -947,7 +969,10 @@ pub fn save_credentials(cfg: &Config,
if let Some(registry) = registry {
let mut map = HashMap::new();
map.insert(registry, table);
("registries".into(), CV::Table(map, file.path().to_path_buf()))
(
"registries".into(),
CV::Table(map, file.path().to_path_buf()),
)
} else {
("registry".into(), table)
}
@ -955,7 +980,10 @@ pub fn save_credentials(cfg: &Config,
let mut contents = String::new();
file.read_to_string(&mut contents).chain_err(|| {
format!("failed to read configuration file `{}`", file.path().display())
format!(
"failed to read configuration file `{}`",
file.path().display()
)
})?;
let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?;
@ -964,12 +992,12 @@ pub fn save_credentials(cfg: &Config,
if let Some(token) = toml.as_table_mut().unwrap().remove("token") {
let mut map = HashMap::new();
map.insert("token".to_string(), token);
toml.as_table_mut().unwrap().insert("registry".into(), map.into());
toml.as_table_mut()
.unwrap()
.insert("registry".into(), map.into());
}
toml.as_table_mut()
.unwrap()
.insert(key, value.into_toml());
toml.as_table_mut().unwrap().insert(key, value.into_toml());
let contents = toml.to_string();
file.seek(SeekFrom::Start(0))?;
@ -980,7 +1008,7 @@ pub fn save_credentials(cfg: &Config,
return Ok(());
#[cfg(unix)]
fn set_permissions(file: & File, mode: u32) -> CargoResult<()> {
fn set_permissions(file: &File, mode: u32) -> CargoResult<()> {
use std::os::unix::fs::PermissionsExt;
let mut perms = file.metadata()?.permissions();
@ -991,7 +1019,7 @@ pub fn save_credentials(cfg: &Config,
#[cfg(not(unix))]
#[allow(unused)]
fn set_permissions(file: & File, mode: u32) -> CargoResult<()> {
fn set_permissions(file: &File, mode: u32) -> CargoResult<()> {
Ok(())
}
}

View File

@ -8,7 +8,7 @@ use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::{HashMap, HashSet};
use std::hash::Hash;
pub use self::Freshness::{Fresh, Dirty};
pub use self::Freshness::{Dirty, Fresh};
#[derive(Debug)]
pub struct DependencyQueue<K: Eq + Hash, V> {
@ -51,7 +51,10 @@ pub enum Freshness {
impl Freshness {
pub fn combine(&self, other: Freshness) -> Freshness {
match *self { Fresh => other, Dirty => Dirty }
match *self {
Fresh => other,
Dirty => Dirty,
}
}
}
@ -77,11 +80,7 @@ impl<K: Hash + Eq + Clone, V> DependencyQueue<K, V> {
///
/// It is assumed that any dependencies of this package will eventually also
/// be added to the dependency queue.
pub fn queue(&mut self,
fresh: Freshness,
key: K,
value: V,
dependencies: &[K]) -> &mut V {
pub fn queue(&mut self, fresh: Freshness, key: K, value: V, dependencies: &[K]) -> &mut V {
let slot = match self.dep_map.entry(key.clone()) {
Occupied(v) => return &mut v.into_mut().1,
Vacant(v) => v,
@ -94,8 +93,9 @@ impl<K: Hash + Eq + Clone, V> DependencyQueue<K, V> {
let mut my_dependencies = HashSet::new();
for dep in dependencies {
my_dependencies.insert(dep.clone());
let rev = self.reverse_dep_map.entry(dep.clone())
.or_insert_with(HashSet::new);
let rev = self.reverse_dep_map
.entry(dep.clone())
.or_insert_with(HashSet::new);
rev.insert(key.clone());
}
&mut slot.insert((my_dependencies, value)).1
@ -122,12 +122,13 @@ impl<K: Hash + Eq + Clone, V> DependencyQueue<K, V> {
results.insert(key.clone(), IN_PROGRESS);
let depth = 1 + map.get(&key)
.into_iter()
.flat_map(|it| it)
.map(|dep| depth(dep, map, results))
.max()
.unwrap_or(0);
let depth = 1
+ map.get(&key)
.into_iter()
.flat_map(|it| it)
.map(|dep| depth(dep, map, results))
.max()
.unwrap_or(0);
*results.get_mut(key).unwrap() = depth;
@ -150,16 +151,21 @@ impl<K: Hash + Eq + Clone, V> DependencyQueue<K, V> {
// TODO: it'd be best here to throw in a heuristic of crate size as
// well. For example how long did this crate historically take to
// compile? How large is its source code? etc.
let next = self.dep_map.iter()
let next = self.dep_map
.iter()
.filter(|&(_, &(ref deps, _))| deps.is_empty())
.map(|(key, _)| key.clone())
.max_by_key(|k| self.depth[k]);
let key = match next {
Some(key) => key,
None => return None
None => return None,
};
let (_, data) = self.dep_map.remove(&key).unwrap();
let fresh = if self.dirty.contains(&key) {Dirty} else {Fresh};
let fresh = if self.dirty.contains(&key) {
Dirty
} else {
Fresh
};
self.pending.insert(key.clone());
Some((fresh, key, data))
}

View File

@ -1,7 +1,7 @@
#![allow(unknown_lints)]
use std::fmt;
use std::process::{Output, ExitStatus};
use std::process::{ExitStatus, Output};
use std::str;
use core::{TargetKind, Workspace};
@ -13,16 +13,19 @@ pub type CargoResult<T> = Result<T, Error>;
pub trait CargoResultExt<T, E> {
fn chain_err<F, D>(self, f: F) -> Result<T, Context<D>>
where F: FnOnce() -> D,
D: fmt::Display + Send + Sync + 'static;
where
F: FnOnce() -> D,
D: fmt::Display + Send + Sync + 'static;
}
impl<T, E> CargoResultExt<T, E> for Result<T, E>
where E: Into<Error>,
where
E: Into<Error>,
{
fn chain_err<F, D>(self, f: F) -> Result<T, Context<D>>
where F: FnOnce() -> D,
D: fmt::Display + Send + Sync + 'static,
where
F: FnOnce() -> D,
D: fmt::Display + Send + Sync + 'static,
{
self.map_err(|failure| {
let context = f();
@ -93,7 +96,11 @@ pub struct CargoTestError {
pub enum Test {
Multiple,
Doc,
UnitTest{kind: TargetKind, name: String, pkg_name: String}
UnitTest {
kind: TargetKind,
name: String,
pkg_name: String,
},
}
impl CargoTestError {
@ -101,9 +108,11 @@ impl CargoTestError {
if errors.is_empty() {
panic!("Cannot create CargoTestError from empty Vec")
}
let desc = errors.iter().map(|error| error.desc.clone())
.collect::<Vec<String>>()
.join("\n");
let desc = errors
.iter()
.map(|error| error.desc.clone())
.collect::<Vec<String>>()
.join("\n");
CargoTestError {
test,
desc,
@ -114,7 +123,11 @@ impl CargoTestError {
pub fn hint(&self, ws: &Workspace) -> String {
match self.test {
Test::UnitTest{ref kind, ref name, ref pkg_name} => {
Test::UnitTest {
ref kind,
ref name,
ref pkg_name,
} => {
let pkg_info = if ws.members().count() > 1 && ws.is_virtual() {
format!("-p {} ", pkg_name)
} else {
@ -122,21 +135,24 @@ impl CargoTestError {
};
match *kind {
TargetKind::Bench =>
format!("test failed, to rerun pass '{}--bench {}'", pkg_info, name),
TargetKind::Bin =>
format!("test failed, to rerun pass '{}--bin {}'", pkg_info, name),
TargetKind::Lib(_) =>
format!("test failed, to rerun pass '{}--lib'", pkg_info),
TargetKind::Test =>
format!("test failed, to rerun pass '{}--test {}'", pkg_info, name),
TargetKind::ExampleBin | TargetKind::ExampleLib(_) =>
format!("test failed, to rerun pass '{}--example {}", pkg_info, name),
_ => "test failed.".into()
TargetKind::Bench => {
format!("test failed, to rerun pass '{}--bench {}'", pkg_info, name)
}
TargetKind::Bin => {
format!("test failed, to rerun pass '{}--bin {}'", pkg_info, name)
}
TargetKind::Lib(_) => format!("test failed, to rerun pass '{}--lib'", pkg_info),
TargetKind::Test => {
format!("test failed, to rerun pass '{}--test {}'", pkg_info, name)
}
TargetKind::ExampleBin | TargetKind::ExampleLib(_) => {
format!("test failed, to rerun pass '{}--example {}", pkg_info, name)
}
_ => "test failed.".into(),
}
},
}
Test::Doc => "test failed, to rerun pass '--doc'".into(),
_ => "test failed.".into()
_ => "test failed.".into(),
}
}
}
@ -150,17 +166,25 @@ pub type CliResult = Result<(), CliError>;
pub struct CliError {
pub error: Option<CargoError>,
pub unknown: bool,
pub exit_code: i32
pub exit_code: i32,
}
impl CliError {
pub fn new(error: CargoError, code: i32) -> CliError {
let unknown = error.downcast_ref::<Internal>().is_some();
CliError { error: Some(error), exit_code: code, unknown }
CliError {
error: Some(error),
exit_code: code,
unknown,
}
}
pub fn code(code: i32) -> CliError {
CliError { error: None, exit_code: code, unknown: false }
CliError {
error: None,
exit_code: code,
unknown: false,
}
}
}
@ -177,14 +201,14 @@ impl From<clap::Error> for CliError {
}
}
// =============================================================================
// Construction helpers
pub fn process_error(msg: &str,
status: Option<&ExitStatus>,
output: Option<&Output>) -> ProcessError
{
pub fn process_error(
msg: &str,
status: Option<&ExitStatus>,
output: Option<&Output>,
) -> ProcessError {
let exit = match status {
Some(s) => status_to_string(s),
None => "never executed".to_string(),

View File

@ -1,16 +1,16 @@
use std::fs::{self, File, OpenOptions};
use std::io::{Seek, Read, Write, SeekFrom};
use std::io::{Read, Seek, SeekFrom, Write};
use std::io;
use std::path::{Path, PathBuf, Display};
use std::path::{Display, Path, PathBuf};
use termcolor::Color::Cyan;
use fs2::{FileExt, lock_contended_error};
use fs2::{lock_contended_error, FileExt};
#[allow(unused_imports)]
use libc;
use util::Config;
use util::paths;
use util::errors::{CargoResult, CargoResultExt, CargoError};
use util::errors::{CargoError, CargoResult, CargoResultExt};
pub struct FileLock {
f: Option<File>,
@ -55,7 +55,7 @@ impl FileLock {
for entry in path.parent().unwrap().read_dir()? {
let entry = entry?;
if Some(&entry.file_name()[..]) == path.file_name() {
continue
continue;
}
let kind = entry.file_type()?;
if kind.is_dir() {
@ -160,17 +160,17 @@ impl Filesystem {
///
/// The returned file can be accessed to look at the path and also has
/// read/write access to the underlying file.
pub fn open_rw<P>(&self,
path: P,
config: &Config,
msg: &str) -> CargoResult<FileLock>
where P: AsRef<Path>
pub fn open_rw<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
where
P: AsRef<Path>,
{
self.open(path.as_ref(),
OpenOptions::new().read(true).write(true).create(true),
State::Exclusive,
config,
msg)
self.open(
path.as_ref(),
OpenOptions::new().read(true).write(true).create(true),
State::Exclusive,
config,
msg,
)
}
/// Opens shared access to a file, returning the locked version of a file.
@ -182,55 +182,60 @@ impl Filesystem {
/// The returned file can be accessed to look at the path and also has read
/// access to the underlying file. Any writes to the file will return an
/// error.
pub fn open_ro<P>(&self,
path: P,
config: &Config,
msg: &str) -> CargoResult<FileLock>
where P: AsRef<Path>
pub fn open_ro<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
where
P: AsRef<Path>,
{
self.open(path.as_ref(),
OpenOptions::new().read(true),
State::Shared,
config,
msg)
self.open(
path.as_ref(),
OpenOptions::new().read(true),
State::Shared,
config,
msg,
)
}
fn open(&self,
path: &Path,
opts: &OpenOptions,
state: State,
config: &Config,
msg: &str) -> CargoResult<FileLock> {
fn open(
&self,
path: &Path,
opts: &OpenOptions,
state: State,
config: &Config,
msg: &str,
) -> CargoResult<FileLock> {
let path = self.root.join(path);
// If we want an exclusive lock then if we fail because of NotFound it's
// likely because an intermediate directory didn't exist, so try to
// create the directory and then continue.
let f = opts.open(&path).or_else(|e| {
if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
create_dir_all(path.parent().unwrap())?;
opts.open(&path)
} else {
Err(e)
}
}).chain_err(|| {
format!("failed to open: {}", path.display())
})?;
let f = opts.open(&path)
.or_else(|e| {
if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
create_dir_all(path.parent().unwrap())?;
opts.open(&path)
} else {
Err(e)
}
})
.chain_err(|| format!("failed to open: {}", path.display()))?;
match state {
State::Exclusive => {
acquire(config, msg, &path,
&|| f.try_lock_exclusive(),
&|| f.lock_exclusive())?;
acquire(config, msg, &path, &|| f.try_lock_exclusive(), &|| {
f.lock_exclusive()
})?;
}
State::Shared => {
acquire(config, msg, &path,
&|| f.try_lock_shared(),
&|| f.lock_shared())?;
acquire(config, msg, &path, &|| f.try_lock_shared(), &|| {
f.lock_shared()
})?;
}
State::Unlocked => {}
}
Ok(FileLock { f: Some(f), path, state })
Ok(FileLock {
f: Some(f),
path,
state,
})
}
}
@ -261,12 +266,13 @@ impl PartialEq<Filesystem> for Path {
///
/// Returns an error if the lock could not be acquired or if any error other
/// than a contention error happens.
fn acquire(config: &Config,
msg: &str,
path: &Path,
try: &Fn() -> io::Result<()>,
block: &Fn() -> io::Result<()>) -> CargoResult<()> {
fn acquire(
config: &Config,
msg: &str,
path: &Path,
try: &Fn() -> io::Result<()>,
block: &Fn() -> io::Result<()>,
) -> CargoResult<()> {
// File locking on Unix is currently implemented via `flock`, which is known
// to be broken on NFS. We could in theory just ignore errors that happen on
// NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking
@ -278,7 +284,7 @@ fn acquire(config: &Config,
//
// [1]: https://github.com/rust-lang/cargo/issues/2615
if is_on_nfs_mount(path) {
return Ok(())
return Ok(());
}
match try() {
@ -289,25 +295,29 @@ fn acquire(config: &Config,
// implement file locking. We detect that here via the return value of
// locking (e.g. inspecting errno).
#[cfg(unix)]
Err(ref e) if e.raw_os_error() == Some(libc::ENOTSUP) => return Ok(()),
Err(ref e) if e.raw_os_error() == Some(libc::ENOTSUP) =>
{
return Ok(())
}
#[cfg(target_os = "linux")]
Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) => return Ok(()),
Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) =>
{
return Ok(())
}
Err(e) => {
if e.raw_os_error() != lock_contended_error().raw_os_error() {
let e = CargoError::from(e);
let cx = format!("failed to lock file: {}", path.display());
return Err(e.context(cx).into())
return Err(e.context(cx).into());
}
}
}
let msg = format!("waiting for file lock on {}", msg);
config.shell().status_with_color("Blocking", &msg, Cyan)?;
block().chain_err(|| {
format!("failed to lock file: {}", path.display())
})?;
block().chain_err(|| format!("failed to lock file: {}", path.display()))?;
return Ok(());
#[cfg(all(target_os = "linux", not(target_env = "musl")))]
@ -341,7 +351,7 @@ fn create_dir_all(path: &Path) -> io::Result<()> {
Err(e) => {
if e.kind() == io::ErrorKind::NotFound {
if let Some(p) = path.parent() {
return create_dir_all(p).and_then(|()| create_dir(path))
return create_dir_all(p).and_then(|()| create_dir(path));
}
}
Err(e)

View File

@ -4,12 +4,12 @@ use std::collections::hash_set::{HashSet, Iter};
use std::collections::hash_map::{HashMap, Keys};
pub struct Graph<N> {
nodes: HashMap<N, HashSet<N>>
nodes: HashMap<N, HashSet<N>>,
}
enum Mark {
InProgress,
Done
Done,
}
pub type Nodes<'a, N> = Keys<'a, N, HashSet<N>>;
@ -17,17 +17,23 @@ pub type Edges<'a, N> = Iter<'a, N>;
impl<N: Eq + Hash + Clone> Graph<N> {
pub fn new() -> Graph<N> {
Graph { nodes: HashMap::new() }
Graph {
nodes: HashMap::new(),
}
}
pub fn add(&mut self, node: N, children: &[N]) {
self.nodes.entry(node)
self.nodes
.entry(node)
.or_insert_with(HashSet::new)
.extend(children.iter().cloned());
}
pub fn link(&mut self, node: N, child: N) {
self.nodes.entry(node).or_insert_with(HashSet::new).insert(child);
self.nodes
.entry(node)
.or_insert_with(HashSet::new)
.insert(child);
}
pub fn get_nodes(&self) -> &HashMap<N, HashSet<N>> {
@ -118,12 +124,16 @@ impl<N: fmt::Display + Eq + Hash> fmt::Debug for Graph<N> {
}
impl<N: Eq + Hash> PartialEq for Graph<N> {
fn eq(&self, other: &Graph<N>) -> bool { self.nodes.eq(&other.nodes) }
fn eq(&self, other: &Graph<N>) -> bool {
self.nodes.eq(&other.nodes)
}
}
impl<N: Eq + Hash> Eq for Graph<N> {}
impl<N: Eq + Hash + Clone> Clone for Graph<N> {
fn clone(&self) -> Graph<N> {
Graph { nodes: self.nodes.clone() }
Graph {
nodes: self.nodes.clone(),
}
}
}

View File

@ -1,12 +1,12 @@
#![allow(deprecated)]
use hex;
use std::hash::{Hasher, Hash, SipHasher};
use std::hash::{Hash, Hasher, SipHasher};
pub fn to_hex(num: u64) -> String {
hex::encode(&[
(num >> 0) as u8,
(num >> 8) as u8,
(num >> 0) as u8,
(num >> 8) as u8,
(num >> 16) as u8,
(num >> 24) as u8,
(num >> 32) as u8,

View File

@ -21,7 +21,7 @@ pub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult<PathBuf> {
loop {
let manifest = current.join(file);
if fs::metadata(&manifest).is_ok() {
return Ok(manifest)
return Ok(manifest);
}
match current.parent() {
@ -30,13 +30,15 @@ pub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult<PathBuf> {
}
}
bail!("could not find `{}` in `{}` or any parent directory",
file, pwd.display())
bail!(
"could not find `{}` in `{}` or any parent directory",
file,
pwd.display()
)
}
/// Find the root Cargo.toml
pub fn find_root_manifest_for_wd(manifest_path: Option<&str>, cwd: &Path)
-> CargoResult<PathBuf> {
pub fn find_root_manifest_for_wd(manifest_path: Option<&str>, cwd: &Path) -> CargoResult<PathBuf> {
match manifest_path {
Some(path) => {
let absolute_path = paths::normalize_path(&cwd.join(&path));
@ -47,7 +49,7 @@ pub fn find_root_manifest_for_wd(manifest_path: Option<&str>, cwd: &Path)
bail!("manifest path `{}` does not exist", path)
}
Ok(absolute_path)
},
}
None => find_project_manifest(cwd, "Cargo.toml"),
}
}

View File

@ -86,7 +86,7 @@ mod imp {
let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
if job.is_null() {
return None
return None;
}
let job = Handle { inner: job };
@ -96,14 +96,15 @@ mod imp {
// our children will reside in the job once we spawn a process.
let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
info = mem::zeroed();
info.BasicLimitInformation.LimitFlags =
JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
let r = SetInformationJobObject(job.inner,
JobObjectExtendedLimitInformation,
&mut info as *mut _ as LPVOID,
mem::size_of_val(&info) as DWORD);
info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
let r = SetInformationJobObject(
job.inner,
JobObjectExtendedLimitInformation,
&mut info as *mut _ as LPVOID,
mem::size_of_val(&info) as DWORD,
);
if r == 0 {
return None
return None;
}
// Assign our process to this job object, meaning that our children will
@ -111,7 +112,7 @@ mod imp {
let me = GetCurrentProcess();
let r = AssignProcessToJobObject(job.inner, me);
if r == 0 {
return None
return None;
}
Some(Setup { job })
@ -140,13 +141,13 @@ mod imp {
let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
info = mem::zeroed();
let r = SetInformationJobObject(
self.job.inner,
JobObjectExtendedLimitInformation,
&mut info as *mut _ as LPVOID,
mem::size_of_val(&info) as DWORD);
self.job.inner,
JobObjectExtendedLimitInformation,
&mut info as *mut _ as LPVOID,
mem::size_of_val(&info) as DWORD,
);
if r == 0 {
info!("failed to configure job object to defaults: {}",
last_err());
info!("failed to configure job object to defaults: {}", last_err());
}
}
}
@ -162,14 +163,15 @@ mod imp {
let mut jobs: Jobs = mem::zeroed();
let r = QueryInformationJobObject(
self.job.inner,
JobObjectBasicProcessIdList,
&mut jobs as *mut _ as LPVOID,
mem::size_of_val(&jobs) as DWORD,
0 as *mut _);
self.job.inner,
JobObjectBasicProcessIdList,
&mut jobs as *mut _ as LPVOID,
mem::size_of_val(&jobs) as DWORD,
0 as *mut _,
);
if r == 0 {
info!("failed to query job object: {}", last_err());
return false
return false;
}
let mut killed = false;
@ -177,46 +179,44 @@ mod imp {
assert!(list.len() > 0);
info!("found {} remaining processes", list.len() - 1);
let list = list.iter().filter(|&&id| {
// let's not kill ourselves
id as DWORD != GetCurrentProcessId()
}).filter_map(|&id| {
// Open the process with the necessary rights, and if this
// fails then we probably raced with the process exiting so we
// ignore the problem.
let flags = PROCESS_QUERY_INFORMATION |
PROCESS_TERMINATE |
SYNCHRONIZE;
let p = OpenProcess(flags, FALSE, id as DWORD);
if p.is_null() {
None
} else {
Some(Handle { inner: p })
}
}).filter(|p| {
// Test if this process was actually in the job object or not.
// If it's not then we likely raced with something else
// recycling this PID, so we just skip this step.
let mut res = 0;
let r = IsProcessInJob(p.inner, self.job.inner, &mut res);
if r == 0 {
info!("failed to test is process in job: {}", last_err());
return false
}
res == TRUE
});
let list = list.iter()
.filter(|&&id| {
// let's not kill ourselves
id as DWORD != GetCurrentProcessId()
})
.filter_map(|&id| {
// Open the process with the necessary rights, and if this
// fails then we probably raced with the process exiting so we
// ignore the problem.
let flags = PROCESS_QUERY_INFORMATION | PROCESS_TERMINATE | SYNCHRONIZE;
let p = OpenProcess(flags, FALSE, id as DWORD);
if p.is_null() {
None
} else {
Some(Handle { inner: p })
}
})
.filter(|p| {
// Test if this process was actually in the job object or not.
// If it's not then we likely raced with something else
// recycling this PID, so we just skip this step.
let mut res = 0;
let r = IsProcessInJob(p.inner, self.job.inner, &mut res);
if r == 0 {
info!("failed to test is process in job: {}", last_err());
return false;
}
res == TRUE
});
for p in list {
// Load the file which this process was spawned from. We then
// later use this for identification purposes.
let mut buf = [0; 1024];
let r = GetProcessImageFileNameW(p.inner,
buf.as_mut_ptr(),
buf.len() as DWORD);
let r = GetProcessImageFileNameW(p.inner, buf.as_mut_ptr(), buf.len() as DWORD);
if r == 0 {
info!("failed to get image name: {}", last_err());
continue
continue;
}
let s = OsString::from_wide(&buf[..r as usize]);
info!("found remaining: {:?}", s);
@ -235,7 +235,7 @@ mod imp {
if let Some(s) = s.to_str() {
if s.contains("mspdbsrv") {
info!("\toops, this is mspdbsrv");
continue
continue;
}
}
@ -252,7 +252,7 @@ mod imp {
let r = WaitForSingleObject(p.inner, INFINITE);
if r != 0 {
info!("failed to wait for process to die: {}", last_err());
return false
return false;
}
killed = true;
}
@ -263,7 +263,9 @@ mod imp {
impl Drop for Handle {
fn drop(&mut self) {
unsafe { CloseHandle(self.inner); }
unsafe {
CloseHandle(self.inner);
}
}
}
}

View File

@ -1,19 +1,21 @@
use std::cmp;
pub fn lev_distance(me: &str, t: &str) -> usize {
if me.is_empty() { return t.chars().count(); }
if t.is_empty() { return me.chars().count(); }
if me.is_empty() {
return t.chars().count();
}
if t.is_empty() {
return me.chars().count();
}
let mut dcol = (0..t.len() + 1).collect::<Vec<_>>();
let mut t_last = 0;
for (i, sc) in me.chars().enumerate() {
let mut current = i;
dcol[0] = current + 1;
for (j, tc) in t.chars().enumerate() {
let next = dcol[j + 1];
if sc == tc {
@ -33,11 +35,12 @@ pub fn lev_distance(me: &str, t: &str) -> usize {
#[test]
fn test_lev_distance() {
use std::char::{ from_u32, MAX };
use std::char::{from_u32, MAX};
// Test bytelength agnosticity
for c in (0u32..MAX as u32)
.filter_map(|i| from_u32(i))
.map(|i| i.to_string()) {
.filter_map(|i| from_u32(i))
.map(|i| i.to_string())
{
assert_eq!(lev_distance(&c, &c), 0);
}

View File

@ -1,7 +1,7 @@
use serde::ser;
use serde_json::{self, Value};
use core::{PackageId, Target, Profile};
use core::{PackageId, Profile, Target};
pub trait Message: ser::Serialize {
fn reason(&self) -> &str;

View File

@ -1,21 +1,21 @@
pub use self::cfg::{Cfg, CfgExpr};
pub use self::config::{Config, ConfigValue, homedir};
pub use self::dependency_queue::{DependencyQueue, Fresh, Dirty, Freshness};
pub use self::errors::{CargoResult, CargoResultExt, CargoError, Test, CliResult};
pub use self::errors::{CliError, ProcessError, CargoTestError};
pub use self::errors::{process_error, internal};
pub use self::config::{homedir, Config, ConfigValue};
pub use self::dependency_queue::{DependencyQueue, Dirty, Fresh, Freshness};
pub use self::errors::{CargoError, CargoResult, CargoResultExt, CliResult, Test};
pub use self::errors::{CargoTestError, CliError, ProcessError};
pub use self::errors::{internal, process_error};
pub use self::flock::{FileLock, Filesystem};
pub use self::graph::Graph;
pub use self::hex::{to_hex, short_hash, hash_u64};
pub use self::lev_distance::{lev_distance};
pub use self::paths::{join_paths, path2bytes, bytes2path, dylib_path};
pub use self::paths::{normalize_path, dylib_path_envvar, without_prefix};
pub use self::hex::{short_hash, to_hex, hash_u64};
pub use self::lev_distance::lev_distance;
pub use self::paths::{dylib_path, join_paths, bytes2path, path2bytes};
pub use self::paths::{dylib_path_envvar, normalize_path, without_prefix};
pub use self::process_builder::{process, ProcessBuilder};
pub use self::rustc::Rustc;
pub use self::sha256::Sha256;
pub use self::to_semver::ToSemver;
pub use self::to_url::ToUrl;
pub use self::vcs::{GitRepo, HgRepo, PijulRepo, FossilRepo};
pub use self::vcs::{FossilRepo, GitRepo, HgRepo, PijulRepo};
pub use self::read2::read2;
pub use self::progress::Progress;

View File

@ -10,23 +10,21 @@ fn maybe_spurious(err: &Error) -> bool {
for e in err.causes() {
if let Some(git_err) = e.downcast_ref::<git2::Error>() {
match git_err.class() {
git2::ErrorClass::Net |
git2::ErrorClass::Os => return true,
_ => ()
git2::ErrorClass::Net | git2::ErrorClass::Os => return true,
_ => (),
}
}
if let Some(curl_err) = e.downcast_ref::<curl::Error>() {
if curl_err.is_couldnt_connect() ||
curl_err.is_couldnt_resolve_proxy() ||
curl_err.is_couldnt_resolve_host() ||
curl_err.is_operation_timedout() ||
curl_err.is_recv_error() {
return true
if curl_err.is_couldnt_connect() || curl_err.is_couldnt_resolve_proxy()
|| curl_err.is_couldnt_resolve_host()
|| curl_err.is_operation_timedout() || curl_err.is_recv_error()
{
return true;
}
}
if let Some(not_200) = e.downcast_ref::<HttpNot200>() {
if 500 <= not_200.code && not_200.code < 600 {
return true
return true;
}
}
}
@ -47,15 +45,19 @@ fn maybe_spurious(err: &Error) -> bool {
/// cargo_result = network::with_retry(&config, || something.download());
/// ```
pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>
where F: FnMut() -> CargoResult<T>
where
F: FnMut() -> CargoResult<T>,
{
let mut remaining = config.net_retry()?;
loop {
match callback() {
Ok(ret) => return Ok(ret),
Err(ref e) if maybe_spurious(e) && remaining > 0 => {
let msg = format!("spurious network error ({} tries \
remaining): {}", remaining, e);
let msg = format!(
"spurious network error ({} tries \
remaining): {}",
remaining, e
);
config.shell().warn(msg)?;
remaining -= 1;
}
@ -67,8 +69,14 @@ pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>
#[test]
fn with_retry_repeats_the_call_then_works() {
//Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry
let error1 = HttpNot200 { code: 501, url: "Uri".to_string() }.into();
let error2 = HttpNot200 { code: 502, url: "Uri".to_string() }.into();
let error1 = HttpNot200 {
code: 501,
url: "Uri".to_string(),
}.into();
let error2 = HttpNot200 {
code: 502,
url: "Uri".to_string(),
}.into();
let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
let config = Config::default().unwrap();
let result = with_retry(&config, || results.pop().unwrap());
@ -81,9 +89,15 @@ fn with_retry_finds_nested_spurious_errors() {
//Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry
//String error messages are not considered spurious
let error1 = CargoError::from(HttpNot200 { code: 501, url: "Uri".to_string() });
let error1 = CargoError::from(HttpNot200 {
code: 501,
url: "Uri".to_string(),
});
let error1 = CargoError::from(error1.context("A non-spurious wrapping err"));
let error2 = CargoError::from(HttpNot200 { code: 502, url: "Uri".to_string() });
let error2 = CargoError::from(HttpNot200 {
code: 502,
url: "Uri".to_string(),
});
let error2 = CargoError::from(error2.context("A second chained error"));
let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
let config = Config::default().unwrap();

Some files were not shown because too many files have changed in this diff Show More