cargo fmt

This commit is contained in:
Alex Crichton 2018-03-14 08:17:44 -07:00
parent a13a33c33b
commit 1e6828485e
186 changed files with 31579 additions and 16776 deletions

View file

@ -1,14 +1,14 @@
extern crate cargo; extern crate cargo;
extern crate clap;
extern crate env_logger; extern crate env_logger;
#[macro_use] #[macro_use]
extern crate failure; extern crate failure;
extern crate git2_curl; extern crate git2_curl;
extern crate toml;
extern crate log; extern crate log;
#[macro_use] #[macro_use]
extern crate serde_derive; extern crate serde_derive;
extern crate serde_json; extern crate serde_json;
extern crate clap; extern crate toml;
use std::env; use std::env;
use std::fs; use std::fs;
@ -16,14 +16,13 @@ use std::path::{Path, PathBuf};
use std::collections::BTreeSet; use std::collections::BTreeSet;
use cargo::core::shell::Shell; use cargo::core::shell::Shell;
use cargo::util::{self, CliResult, lev_distance, Config, CargoResult}; use cargo::util::{self, lev_distance, CargoResult, CliResult, Config};
use cargo::util::{CliError, ProcessError}; use cargo::util::{CliError, ProcessError};
mod cli; mod cli;
mod command_prelude; mod command_prelude;
mod commands; mod commands;
fn main() { fn main() {
env_logger::init(); env_logger::init();
@ -53,7 +52,8 @@ fn aliased_command(config: &Config, command: &str) -> CargoResult<Option<Vec<Str
match config.get_string(&alias_name) { match config.get_string(&alias_name) {
Ok(value) => { Ok(value) => {
if let Some(record) = value { if let Some(record) = value {
let alias_commands = record.val let alias_commands = record
.val
.split_whitespace() .split_whitespace()
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect(); .collect();
@ -63,10 +63,8 @@ fn aliased_command(config: &Config, command: &str) -> CargoResult<Option<Vec<Str
Err(_) => { Err(_) => {
let value = config.get_list(&alias_name)?; let value = config.get_list(&alias_name)?;
if let Some(record) = value { if let Some(record) = value {
let alias_commands: Vec<String> = record.val let alias_commands: Vec<String> =
.iter() record.val.iter().map(|s| s.0.to_string()).collect();
.map(|s| s.0.to_string())
.collect();
result = Ok(Some(alias_commands)); result = Ok(Some(alias_commands));
} }
} }
@ -95,10 +93,10 @@ fn list_commands(config: &Config) -> BTreeSet<(String, Option<String>)> {
} }
if is_executable(entry.path()) { if is_executable(entry.path()) {
let end = filename.len() - suffix.len(); let end = filename.len() - suffix.len();
commands.insert( commands.insert((
(filename[prefix.len()..end].to_string(), filename[prefix.len()..end].to_string(),
Some(path.display().to_string())) Some(path.display().to_string()),
); ));
} }
} }
} }
@ -110,7 +108,6 @@ fn list_commands(config: &Config) -> BTreeSet<(String, Option<String>)> {
commands commands
} }
fn find_closest(config: &Config, cmd: &str) -> Option<String> { fn find_closest(config: &Config, cmd: &str) -> Option<String> {
let cmds = list_commands(config); let cmds = list_commands(config);
// Only consider candidates with a lev_distance of 3 or less so we don't // Only consider candidates with a lev_distance of 3 or less so we don't
@ -133,14 +130,14 @@ fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&str]) -> Cli
Some(command) => command, Some(command) => command,
None => { None => {
let err = match find_closest(config, cmd) { let err = match find_closest(config, cmd) {
Some(closest) => { Some(closest) => format_err!(
format_err!("no such subcommand: `{}`\n\n\tDid you mean `{}`?\n", "no such subcommand: `{}`\n\n\tDid you mean `{}`?\n",
cmd, cmd,
closest) closest
} ),
None => format_err!("no such subcommand: `{}`", cmd), None => format_err!("no such subcommand: `{}`", cmd),
}; };
return Err(CliError::new(err, 101)) return Err(CliError::new(err, 101));
} }
}; };
@ -148,7 +145,8 @@ fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&str]) -> Cli
let err = match util::process(&command) let err = match util::process(&command)
.env(cargo::CARGO_ENV, cargo_exe) .env(cargo::CARGO_ENV, cargo_exe)
.args(&args[1..]) .args(&args[1..])
.exec_replace() { .exec_replace()
{
Ok(()) => return Ok(()), Ok(()) => return Ok(()),
Err(e) => e, Err(e) => e,
}; };
@ -170,7 +168,9 @@ fn is_executable<P: AsRef<Path>>(path: P) -> bool {
} }
#[cfg(windows)] #[cfg(windows)]
fn is_executable<P: AsRef<Path>>(path: P) -> bool { fn is_executable<P: AsRef<Path>>(path: P) -> bool {
fs::metadata(path).map(|metadata| metadata.is_file()).unwrap_or(false) fs::metadata(path)
.map(|metadata| metadata.is_file())
.unwrap_or(false)
} }
fn search_directories(config: &Config) -> Vec<PathBuf> { fn search_directories(config: &Config) -> Vec<PathBuf> {

View file

@ -2,7 +2,7 @@ extern crate clap;
use clap::{AppSettings, Arg, ArgMatches}; use clap::{AppSettings, Arg, ArgMatches};
use cargo::{self, Config, CliResult}; use cargo::{self, CliResult, Config};
use super::list_commands; use super::list_commands;
use super::commands; use super::commands;
@ -15,10 +15,10 @@ pub fn main(config: &mut Config) -> CliResult {
let version = cargo::version(); let version = cargo::version();
println!("{}", version); println!("{}", version);
if is_verbose { if is_verbose {
println!("release: {}.{}.{}", println!(
version.major, "release: {}.{}.{}",
version.minor, version.major, version.minor, version.patch
version.patch); );
if let Some(ref cfg) = version.cfg_info { if let Some(ref cfg) = version.cfg_info {
if let Some(ref ci) = cfg.commit_info { if let Some(ref ci) = cfg.commit_info {
println!("commit-hash: {}", ci.commit_hash); println!("commit-hash: {}", ci.commit_hash);
@ -51,8 +51,7 @@ pub fn main(config: &mut Config) -> CliResult {
return Ok(()); return Ok(());
} }
if args.subcommand_name().is_none() { if args.subcommand_name().is_none() {}
}
execute_subcommand(config, args) execute_subcommand(config, args)
} }
@ -60,11 +59,16 @@ pub fn main(config: &mut Config) -> CliResult {
fn execute_subcommand(config: &mut Config, args: ArgMatches) -> CliResult { fn execute_subcommand(config: &mut Config, args: ArgMatches) -> CliResult {
config.configure( config.configure(
args.occurrences_of("verbose") as u32, args.occurrences_of("verbose") as u32,
if args.is_present("quiet") { Some(true) } else { None }, if args.is_present("quiet") {
Some(true)
} else {
None
},
&args.value_of("color").map(|s| s.to_string()), &args.value_of("color").map(|s| s.to_string()),
args.is_present("frozen"), args.is_present("frozen"),
args.is_present("locked"), args.is_present("locked"),
&args.values_of_lossy("unstable-features").unwrap_or_default(), &args.values_of_lossy("unstable-features")
.unwrap_or_default(),
)?; )?;
let (cmd, args) = match args.subcommand() { let (cmd, args) = match args.subcommand() {
@ -80,7 +84,11 @@ fn execute_subcommand(config: &mut Config, args: ArgMatches) -> CliResult {
} }
if let Some(mut alias) = super::aliased_command(config, cmd)? { if let Some(mut alias) = super::aliased_command(config, cmd)? {
alias.extend(args.values_of("").unwrap_or_default().map(|s| s.to_string())); alias.extend(
args.values_of("")
.unwrap_or_default()
.map(|s| s.to_string()),
);
let args = cli() let args = cli()
.setting(AppSettings::NoBinaryName) .setting(AppSettings::NoBinaryName)
.get_matches_from_safe(alias)?; .get_matches_from_safe(alias)?;
@ -91,7 +99,6 @@ fn execute_subcommand(config: &mut Config, args: ArgMatches) -> CliResult {
super::execute_external_subcommand(config, cmd, &ext_args) super::execute_external_subcommand(config, cmd, &ext_args)
} }
fn cli() -> App { fn cli() -> App {
let app = App::new("cargo") let app = App::new("cargo")
.settings(&[ .settings(&[
@ -101,7 +108,8 @@ fn cli() -> App {
AppSettings::AllowExternalSubcommands, AppSettings::AllowExternalSubcommands,
]) ])
.about("") .about("")
.template("\ .template(
"\
Rust's package manager Rust's package manager
USAGE: USAGE:
@ -126,44 +134,39 @@ Some common cargo commands are (see all commands with --list):
install Install a Rust binary install Install a Rust binary
uninstall Uninstall a Rust binary uninstall Uninstall a Rust binary
See 'cargo help <command>' for more information on a specific command." See 'cargo help <command>' for more information on a specific command.",
) )
.arg(opt("version", "Print version info and exit").short("V"))
.arg(opt("list", "List installed commands"))
.arg(opt("explain", "Run `rustc --explain CODE`").value_name("CODE"))
.arg( .arg(
opt("version", "Print version info and exit") opt(
.short("V") "verbose",
) "Use verbose output (-vv very verbose/build.rs output)",
.arg( ).short("v")
opt("list", "List installed commands") .multiple(true)
) .global(true),
.arg(
opt("explain", "Run `rustc --explain CODE`")
.value_name("CODE")
)
.arg(
opt("verbose", "Use verbose output (-vv very verbose/build.rs output)")
.short("v").multiple(true).global(true)
) )
.arg( .arg(
opt("quiet", "No output printed to stdout") opt("quiet", "No output printed to stdout")
.short("q").global(true) .short("q")
.global(true),
) )
.arg( .arg(
opt("color", "Coloring: auto, always, never") opt("color", "Coloring: auto, always, never")
.value_name("WHEN").global(true) .value_name("WHEN")
.global(true),
) )
.arg(opt("frozen", "Require Cargo.lock and cache are up to date").global(true))
.arg(opt("locked", "Require Cargo.lock is up to date").global(true))
.arg( .arg(
opt("frozen", "Require Cargo.lock and cache are up to date") Arg::with_name("unstable-features")
.global(true) .help("Unstable (nightly-only) flags to Cargo")
.short("Z")
.value_name("FLAG")
.multiple(true)
.global(true),
) )
.arg( .subcommands(commands::builtin());
opt("locked", "Require Cargo.lock is up to date")
.global(true)
)
.arg(
Arg::with_name("unstable-features").help("Unstable (nightly-only) flags to Cargo")
.short("Z").value_name("FLAG").multiple(true).global(true)
)
.subcommands(commands::builtin())
;
app app
} }

View file

@ -3,12 +3,12 @@ use std::path::PathBuf;
use clap::{self, SubCommand}; use clap::{self, SubCommand};
use cargo::CargoResult; use cargo::CargoResult;
use cargo::core::Workspace; use cargo::core::Workspace;
use cargo::ops::{CompileMode, CompileOptions, CompileFilter, Packages, MessageFormat, use cargo::ops::{CompileFilter, CompileMode, CompileOptions, MessageFormat, NewOptions, Packages,
VersionControl, NewOptions}; VersionControl};
use cargo::util::important_paths::find_root_manifest_for_wd; use cargo::util::important_paths::find_root_manifest_for_wd;
pub use clap::{Arg, ArgMatches, AppSettings}; pub use clap::{AppSettings, Arg, ArgMatches};
pub use cargo::{Config, CliResult, CliError}; pub use cargo::{CliError, CliResult, Config};
pub type App = clap::App<'static, 'static>; pub type App = clap::App<'static, 'static>;
@ -16,8 +16,12 @@ pub trait AppExt: Sized {
fn _arg(self, arg: Arg<'static, 'static>) -> Self; fn _arg(self, arg: Arg<'static, 'static>) -> Self;
fn arg_package(self, package: &'static str, all: &'static str, exclude: &'static str) -> Self { fn arg_package(self, package: &'static str, all: &'static str, exclude: &'static str) -> Self {
self._arg(opt("package", package).short("p").value_name("SPEC").multiple(true)) self._arg(
._arg(opt("all", all)) opt("package", package)
.short("p")
.value_name("SPEC")
.multiple(true),
)._arg(opt("all", all))
._arg(opt("exclude", exclude).value_name("SPEC").multiple(true)) ._arg(opt("exclude", exclude).value_name("SPEC").multiple(true))
} }
@ -28,7 +32,8 @@ pub trait AppExt: Sized {
fn arg_jobs(self) -> Self { fn arg_jobs(self) -> Self {
self._arg( self._arg(
opt("jobs", "Number of parallel jobs, defaults to # of CPUs") opt("jobs", "Number of parallel jobs, defaults to # of CPUs")
.short("j").value_name("N") .short("j")
.value_name("N"),
) )
} }
@ -55,12 +60,7 @@ pub trait AppExt: Sized {
._arg(opt("all-targets", all)) ._arg(opt("all-targets", all))
} }
fn arg_targets_lib_bin( fn arg_targets_lib_bin(self, lib: &'static str, bin: &'static str, bins: &'static str) -> Self {
self,
lib: &'static str,
bin: &'static str,
bins: &'static str,
) -> Self {
self._arg(opt("lib", lib)) self._arg(opt("lib", lib))
._arg(opt("bin", bin).value_name("NAME").multiple(true)) ._arg(opt("bin", bin).value_name("NAME").multiple(true))
._arg(opt("bins", bins)) ._arg(opt("bins", bins))
@ -79,23 +79,19 @@ pub trait AppExt: Sized {
._arg(opt("examples", examples)) ._arg(opt("examples", examples))
} }
fn arg_targets_bin_example( fn arg_targets_bin_example(self, bin: &'static str, example: &'static str) -> Self {
self,
bin: &'static str,
example: &'static str,
) -> Self {
self._arg(opt("bin", bin).value_name("NAME").multiple(true)) self._arg(opt("bin", bin).value_name("NAME").multiple(true))
._arg(opt("example", example).value_name("NAME").multiple(true)) ._arg(opt("example", example).value_name("NAME").multiple(true))
} }
fn arg_features(self) -> Self { fn arg_features(self) -> Self {
self self._arg(
._arg( opt("features", "Space-separated list of features to activate").value_name("FEATURES"),
opt("features", "Space-separated list of features to activate") )._arg(opt("all-features", "Activate all available features"))
.value_name("FEATURES") ._arg(opt(
) "no-default-features",
._arg(opt("all-features", "Activate all available features")) "Do not activate the `default` feature",
._arg(opt("no-default-features", "Do not activate the `default` feature")) ))
} }
fn arg_release(self, release: &'static str) -> Self { fn arg_release(self, release: &'static str) -> Self {
@ -115,38 +111,38 @@ pub trait AppExt: Sized {
opt("message-format", "Error format") opt("message-format", "Error format")
.value_name("FMT") .value_name("FMT")
.case_insensitive(true) .case_insensitive(true)
.possible_values(&["human", "json"]).default_value("human") .possible_values(&["human", "json"])
.default_value("human"),
) )
} }
fn arg_new_opts(self) -> Self { fn arg_new_opts(self) -> Self {
self._arg( self._arg(
opt("vcs", "\ opt(
Initialize a new repository for the given version \ "vcs",
control system (git, hg, pijul, or fossil) or do not \ "\
initialize any version control at all (none), overriding \ Initialize a new repository for the given version \
a global configuration.") control system (git, hg, pijul, or fossil) or do not \
.value_name("VCS") initialize any version control at all (none), overriding \
.possible_values(&["git", "hg", "pijul", "fossil", "none"]) a global configuration.",
) ).value_name("VCS")
._arg(opt("bin", "Use a binary (application) template [default]")) .possible_values(&["git", "hg", "pijul", "fossil", "none"]),
)._arg(opt("bin", "Use a binary (application) template [default]"))
._arg(opt("lib", "Use a library template")) ._arg(opt("lib", "Use a library template"))
._arg( ._arg(
opt("name", "Set the resulting package name, defaults to the directory name") opt(
.value_name("NAME") "name",
"Set the resulting package name, defaults to the directory name",
).value_name("NAME"),
) )
} }
fn arg_index(self) -> Self { fn arg_index(self) -> Self {
self self._arg(opt("index", "Registry index to upload the package to").value_name("INDEX"))
._arg(
opt("index", "Registry index to upload the package to")
.value_name("INDEX")
)
._arg( ._arg(
opt("host", "DEPRECATED, renamed to '--index'") opt("host", "DEPRECATED, renamed to '--index'")
.value_name("HOST") .value_name("HOST")
.hidden(true) .hidden(true),
) )
} }
} }
@ -162,24 +158,20 @@ pub fn opt(name: &'static str, help: &'static str) -> Arg<'static, 'static> {
} }
pub fn subcommand(name: &'static str) -> App { pub fn subcommand(name: &'static str) -> App {
SubCommand::with_name(name) SubCommand::with_name(name).settings(&[
.settings(&[ AppSettings::UnifiedHelpMessage,
AppSettings::UnifiedHelpMessage, AppSettings::DeriveDisplayOrder,
AppSettings::DeriveDisplayOrder, AppSettings::DontCollapseArgsInUsage,
AppSettings::DontCollapseArgsInUsage, ])
])
} }
pub trait ArgMatchesExt { pub trait ArgMatchesExt {
fn value_of_u32(&self, name: &str) -> CargoResult<Option<u32>> { fn value_of_u32(&self, name: &str) -> CargoResult<Option<u32>> {
let arg = match self._value_of(name) { let arg = match self._value_of(name) {
None => None, None => None,
Some(arg) => Some(arg.parse::<u32>().map_err(|_| { Some(arg) => Some(arg.parse::<u32>().map_err(|_| {
clap::Error::value_validation_auto( clap::Error::value_validation_auto(format!("could not parse `{}` as a number", arg))
format!("could not parse `{}` as a number", arg) })?),
)
})?)
}; };
Ok(arg) Ok(arg)
} }
@ -209,7 +201,7 @@ pub trait ArgMatchesExt {
fn compile_options<'a>( fn compile_options<'a>(
&self, &self,
config: &'a Config, config: &'a Config,
mode: CompileMode mode: CompileMode,
) -> CargoResult<CompileOptions<'a>> { ) -> CargoResult<CompileOptions<'a>> {
let spec = Packages::from_flags( let spec = Packages::from_flags(
self._is_present("all"), self._is_present("all"),
@ -240,12 +232,18 @@ pub trait ArgMatchesExt {
spec, spec,
mode, mode,
release: self._is_present("release"), release: self._is_present("release"),
filter: CompileFilter::new(self._is_present("lib"), filter: CompileFilter::new(
self._values_of("bin"), self._is_present("bins"), self._is_present("lib"),
self._values_of("test"), self._is_present("tests"), self._values_of("bin"),
self._values_of("example"), self._is_present("examples"), self._is_present("bins"),
self._values_of("bench"), self._is_present("benches"), self._values_of("test"),
self._is_present("all-targets")), self._is_present("tests"),
self._values_of("example"),
self._is_present("examples"),
self._values_of("bench"),
self._is_present("benches"),
self._is_present("all-targets"),
),
message_format, message_format,
target_rustdoc_args: None, target_rustdoc_args: None,
target_rustc_args: None, target_rustc_args: None,
@ -256,7 +254,7 @@ pub trait ArgMatchesExt {
fn compile_options_for_single_package<'a>( fn compile_options_for_single_package<'a>(
&self, &self,
config: &'a Config, config: &'a Config,
mode: CompileMode mode: CompileMode,
) -> CargoResult<CompileOptions<'a>> { ) -> CargoResult<CompileOptions<'a>> {
let mut compile_opts = self.compile_options(config, mode)?; let mut compile_opts = self.compile_options(config, mode)?;
compile_opts.spec = Packages::Packages(self._values_of("package")); compile_opts.spec = Packages::Packages(self._values_of("package"));
@ -272,19 +270,23 @@ pub trait ArgMatchesExt {
"none" => VersionControl::NoVcs, "none" => VersionControl::NoVcs,
vcs => panic!("Impossible vcs: {:?}", vcs), vcs => panic!("Impossible vcs: {:?}", vcs),
}); });
NewOptions::new(vcs, NewOptions::new(
self._is_present("bin"), vcs,
self._is_present("lib"), self._is_present("bin"),
self._value_of("path").unwrap().to_string(), self._is_present("lib"),
self._value_of("name").map(|s| s.to_string())) self._value_of("path").unwrap().to_string(),
self._value_of("name").map(|s| s.to_string()),
)
} }
fn registry(&self, config: &Config) -> CargoResult<Option<String>> { fn registry(&self, config: &Config) -> CargoResult<Option<String>> {
match self._value_of("registry") { match self._value_of("registry") {
Some(registry) => { Some(registry) => {
if !config.cli_unstable().unstable_options { if !config.cli_unstable().unstable_options {
return Err(format_err!("registry option is an unstable feature and \ return Err(format_err!(
requires -Zunstable-options to use.").into()); "registry option is an unstable feature and \
requires -Zunstable-options to use."
).into());
} }
Ok(Some(registry.to_string())) Ok(Some(registry.to_string()))
} }
@ -313,7 +315,7 @@ about this warning.";
config.shell().warn(&msg)?; config.shell().warn(&msg)?;
Some(host.to_string()) Some(host.to_string())
} }
None => self._value_of("index").map(|s| s.to_string()) None => self._value_of("index").map(|s| s.to_string()),
}; };
Ok(index) Ok(index)
} }
@ -331,7 +333,8 @@ impl<'a> ArgMatchesExt for ArgMatches<'a> {
} }
fn _values_of(&self, name: &str) -> Vec<String> { fn _values_of(&self, name: &str) -> Vec<String> {
self.values_of(name).unwrap_or_default() self.values_of(name)
.unwrap_or_default()
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect() .collect()
} }
@ -342,7 +345,8 @@ impl<'a> ArgMatchesExt for ArgMatches<'a> {
} }
pub fn values(args: &ArgMatches, name: &str) -> Vec<String> { pub fn values(args: &ArgMatches, name: &str) -> Vec<String> {
args.values_of(name).unwrap_or_default() args.values_of(name)
.unwrap_or_default()
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect() .collect()
} }

View file

@ -7,16 +7,15 @@ pub fn cli() -> App {
.setting(AppSettings::TrailingVarArg) .setting(AppSettings::TrailingVarArg)
.about("Execute all benchmarks of a local package") .about("Execute all benchmarks of a local package")
.arg( .arg(
Arg::with_name("BENCHNAME").help( Arg::with_name("BENCHNAME")
"If specified, only run benches containing this string in their names" .help("If specified, only run benches containing this string in their names"),
)
) )
.arg( .arg(
Arg::with_name("args").help( Arg::with_name("args")
"Arguments for the bench binary" .help("Arguments for the bench binary")
).multiple(true).last(true) .multiple(true)
.last(true),
) )
.arg_targets_all( .arg_targets_all(
"Benchmark only this package's library", "Benchmark only this package's library",
"Benchmark only the specified binary", "Benchmark only the specified binary",
@ -29,10 +28,7 @@ pub fn cli() -> App {
"Benchmark all benches", "Benchmark all benches",
"Benchmark all targets (default)", "Benchmark all targets (default)",
) )
.arg(opt("no-run", "Compile, but don't run benchmarks"))
.arg(
opt("no-run", "Compile, but don't run benchmarks")
)
.arg_package( .arg_package(
"Package to run benchmarks for", "Package to run benchmarks for",
"Benchmark all packages in the workspace", "Benchmark all packages in the workspace",
@ -43,10 +39,12 @@ pub fn cli() -> App {
.arg_target_triple("Build for the target triple") .arg_target_triple("Build for the target triple")
.arg_manifest_path() .arg_manifest_path()
.arg_message_format() .arg_message_format()
.arg( .arg(opt(
opt("no-fail-fast", "Run all benchmarks regardless of failure") "no-fail-fast",
) "Run all benchmarks regardless of failure",
.after_help("\ ))
.after_help(
"\
All of the trailing arguments are passed to the benchmark binaries generated All of the trailing arguments are passed to the benchmark binaries generated
for filtering benchmarks and generally providing options configuring how they for filtering benchmarks and generally providing options configuring how they
run. run.
@ -64,7 +62,8 @@ The --jobs argument affects the building of the benchmark executable but does
not affect how many jobs are used when running the benchmarks. not affect how many jobs are used when running the benchmarks.
Compilation can be customized with the `bench` profile in the manifest. Compilation can be customized with the `bench` profile in the manifest.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
@ -80,17 +79,23 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
}; };
let mut bench_args = vec![]; let mut bench_args = vec![];
bench_args.extend(args.value_of("BENCHNAME").into_iter().map(|s| s.to_string())); bench_args.extend(
bench_args.extend(args.values_of("args").unwrap_or_default().map(|s| s.to_string())); args.value_of("BENCHNAME")
.into_iter()
.map(|s| s.to_string()),
);
bench_args.extend(
args.values_of("args")
.unwrap_or_default()
.map(|s| s.to_string()),
);
let err = ops::run_benches(&ws, &ops, &bench_args)?; let err = ops::run_benches(&ws, &ops, &bench_args)?;
match err { match err {
None => Ok(()), None => Ok(()),
Some(err) => { Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) {
Err(match err.exit.as_ref().and_then(|e| e.code()) { Some(i) => CliError::new(format_err!("bench failed"), i),
Some(i) => CliError::new(format_err!("bench failed"), i), None => CliError::new(err.into(), 101),
None => CliError::new(err.into(), 101) }),
})
}
} }
} }

View file

@ -3,7 +3,8 @@ use command_prelude::*;
use cargo::ops::{self, CompileMode}; use cargo::ops::{self, CompileMode};
pub fn cli() -> App { pub fn cli() -> App {
subcommand("build").alias("b") subcommand("build")
.alias("b")
.about("Compile a local package and all of its dependencies") .about("Compile a local package and all of its dependencies")
.arg_package( .arg_package(
"Package to build", "Package to build",
@ -28,7 +29,8 @@ pub fn cli() -> App {
.arg_target_triple("Build for the target triple") .arg_target_triple("Build for the target triple")
.arg_manifest_path() .arg_manifest_path()
.arg_message_format() .arg_message_format()
.after_help("\ .after_help(
"\
If the --package argument is given, then SPEC is a package id specification If the --package argument is given, then SPEC is a package id specification
which indicates which package should be built. If it is not given, then the which indicates which package should be built. If it is not given, then the
current package is built. For more information on SPEC and its format, see the current package is built. For more information on SPEC and its format, see the
@ -41,8 +43,8 @@ Note that `--exclude` has to be specified in conjunction with the `--all` flag.
Compilation can be configured via the use of profiles which are configured in Compilation can be configured via the use of profiles which are configured in
the manifest. The default profile for this command is `dev`, but passing the manifest. The default profile for this command is `dev`, but passing
the --release flag will use the `release` profile instead. the --release flag will use the `release` profile instead.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View file

@ -24,15 +24,13 @@ pub fn cli() -> App {
"Check all targets (lib and bin targets by default)", "Check all targets (lib and bin targets by default)",
) )
.arg_release("Check artifacts in release mode, with optimizations") .arg_release("Check artifacts in release mode, with optimizations")
.arg( .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))
opt("profile", "Profile to build the selected target for")
.value_name("PROFILE")
)
.arg_features() .arg_features()
.arg_target_triple("Check for the target triple") .arg_target_triple("Check for the target triple")
.arg_manifest_path() .arg_manifest_path()
.arg_message_format() .arg_message_format()
.after_help("\ .after_help(
"\
If the --package argument is given, then SPEC is a package id specification If the --package argument is given, then SPEC is a package id specification
which indicates which package should be built. If it is not given, then the which indicates which package should be built. If it is not given, then the
current package is built. For more information on SPEC and its format, see the current package is built. For more information on SPEC and its format, see the
@ -48,7 +46,8 @@ the --release flag will use the `release` profile instead.
The `--profile test` flag can be used to check unit tests with the The `--profile test` flag can be used to check unit tests with the
`#[cfg(test)]` attribute. `#[cfg(test)]` attribute.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
@ -57,8 +56,11 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
Some("test") => true, Some("test") => true,
None => false, None => false,
Some(profile) => { Some(profile) => {
let err = format_err!("unknown profile: `{}`, only `test` is \ let err = format_err!(
currently supported", profile); "unknown profile: `{}`, only `test` is \
currently supported",
profile
);
return Err(CliError::new(err, 101)); return Err(CliError::new(err, 101));
} }
}; };

View file

@ -7,17 +7,21 @@ pub fn cli() -> App {
.about("Remove artifacts that cargo has generated in the past") .about("Remove artifacts that cargo has generated in the past")
.arg( .arg(
opt("package", "Package to clean artifacts for") opt("package", "Package to clean artifacts for")
.short("p").value_name("SPEC").multiple(true) .short("p")
.value_name("SPEC")
.multiple(true),
) )
.arg_manifest_path() .arg_manifest_path()
.arg_target_triple("Target triple to clean output for (default all)") .arg_target_triple("Target triple to clean output for (default all)")
.arg_release("Whether or not to clean release artifacts") .arg_release("Whether or not to clean release artifacts")
.after_help("\ .after_help(
"\
If the --package argument is given, then SPEC is a package id specification If the --package argument is given, then SPEC is a package id specification
which indicates which package's artifacts should be cleaned out. If it is not which indicates which package's artifacts should be cleaned out. If it is not
given, then all packages' artifacts are removed. For more information on SPEC given, then all packages' artifacts are removed. For more information on SPEC
and its format, see the `cargo help pkgid` command. and its format, see the `cargo help pkgid` command.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View file

@ -5,17 +5,16 @@ use cargo::ops::{self, CompileMode, DocOptions};
pub fn cli() -> App { pub fn cli() -> App {
subcommand("doc") subcommand("doc")
.about("Build a package's documentation") .about("Build a package's documentation")
.arg( .arg(opt(
opt("open", "Opens the docs in a browser after the operation") "open",
) "Opens the docs in a browser after the operation",
))
.arg_package( .arg_package(
"Package to document", "Package to document",
"Document all packages in the workspace", "Document all packages in the workspace",
"Exclude packages from the build", "Exclude packages from the build",
) )
.arg( .arg(opt("no-deps", "Don't build documentation for dependencies"))
opt("no-deps", "Don't build documentation for dependencies")
)
.arg_jobs() .arg_jobs()
.arg_targets_lib_bin( .arg_targets_lib_bin(
"Document only this package's library", "Document only this package's library",
@ -27,7 +26,8 @@ pub fn cli() -> App {
.arg_target_triple("Build for the target triple") .arg_target_triple("Build for the target triple")
.arg_manifest_path() .arg_manifest_path()
.arg_message_format() .arg_message_format()
.after_help("\ .after_help(
"\
By default the documentation for the local package and all dependencies is By default the documentation for the local package and all dependencies is
built. The output is all placed in `target/doc` in rustdoc's usual format. built. The output is all placed in `target/doc` in rustdoc's usual format.
@ -39,12 +39,15 @@ If the --package argument is given, then SPEC is a package id specification
which indicates which package should be documented. If it is not given, then the which indicates which package should be documented. If it is not given, then the
current package is documented. For more information on SPEC and its format, see current package is documented. For more information on SPEC and its format, see
the `cargo help pkgid` command. the `cargo help pkgid` command.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?; let ws = args.workspace(config)?;
let mode = CompileMode::Doc { deps: !args.is_present("no-deps") }; let mode = CompileMode::Doc {
deps: !args.is_present("no-deps"),
};
let compile_opts = args.compile_options(config, mode)?; let compile_opts = args.compile_options(config, mode)?;
let doc_opts = DocOptions { let doc_opts = DocOptions {
open_result: args.is_present("open"), open_result: args.is_present("open"),

View file

@ -6,7 +6,8 @@ pub fn cli() -> App {
subcommand("fetch") subcommand("fetch")
.about("Fetch dependencies of a package from the network") .about("Fetch dependencies of a package from the network")
.arg_manifest_path() .arg_manifest_path()
.after_help("\ .after_help(
"\
If a lockfile is available, this command will ensure that all of the git If a lockfile is available, this command will ensure that all of the git
dependencies and/or registries dependencies are downloaded and locally dependencies and/or registries dependencies are downloaded and locally
available. The network is never touched after a `cargo fetch` unless available. The network is never touched after a `cargo fetch` unless
@ -15,7 +16,8 @@ the lockfile changes.
If the lockfile is not available, then this is the equivalent of If the lockfile is not available, then this is the equivalent of
`cargo generate-lockfile`. A lockfile is generated and dependencies are also `cargo generate-lockfile`. A lockfile is generated and dependencies are also
all updated. all updated.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View file

@ -6,7 +6,8 @@ pub fn cli() -> App {
subcommand("generate-lockfile") subcommand("generate-lockfile")
.about("Generate the lockfile for a project") .about("Generate the lockfile for a project")
.arg_manifest_path() .arg_manifest_path()
.after_help("\ .after_help(
"\
If a lockfile is available, this command will ensure that all of the git If a lockfile is available, this command will ensure that all of the git
dependencies and/or registries dependencies are downloaded and locally dependencies and/or registries dependencies are downloaded and locally
available. The network is never touched after a `cargo fetch` unless available. The network is never touched after a `cargo fetch` unless
@ -15,7 +16,8 @@ the lockfile changes.
If the lockfile is not available, then this is the equivalent of If the lockfile is not available, then this is the equivalent of
`cargo generate-lockfile`. A lockfile is generated and dependencies are also `cargo generate-lockfile`. A lockfile is generated and dependencies are also
all updated. all updated.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View file

@ -1,14 +1,24 @@
use command_prelude::*; use command_prelude::*;
use cargo::core::{GitReference, SourceId, Source}; use cargo::core::{GitReference, Source, SourceId};
use cargo::sources::GitSource; use cargo::sources::GitSource;
use cargo::util::ToUrl; use cargo::util::ToUrl;
pub fn cli() -> App { pub fn cli() -> App {
subcommand("git-checkout") subcommand("git-checkout")
.about("Checkout a copy of a Git repository") .about("Checkout a copy of a Git repository")
.arg(Arg::with_name("url").long("url").value_name("URL").required(true)) .arg(
.arg(Arg::with_name("reference").long("reference").value_name("REF").required(true)) Arg::with_name("url")
.long("url")
.value_name("URL")
.required(true),
)
.arg(
Arg::with_name("reference")
.long("reference")
.value_name("REF")
.required(true),
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View file

@ -12,6 +12,8 @@ pub fn cli() -> App {
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let opts = args.new_options()?; let opts = args.new_options()?;
ops::init(&opts, config)?; ops::init(&opts, config)?;
config.shell().status("Created", format!("{} project", opts.kind))?; config
.shell()
.status("Created", format!("{} project", opts.kind))?;
Ok(()) Ok(())
} }

View file

@ -8,39 +8,22 @@ pub fn cli() -> App {
subcommand("install") subcommand("install")
.about("Create a new cargo package in an existing directory") .about("Create a new cargo package in an existing directory")
.arg(Arg::with_name("crate").multiple(true)) .arg(Arg::with_name("crate").multiple(true))
.arg( .arg(
opt("version", "Specify a version to install from crates.io") opt("version", "Specify a version to install from crates.io")
.alias("vers").value_name("VERSION") .alias("vers")
.value_name("VERSION"),
) )
.arg( .arg(opt("git", "Git URL to install the specified crate from").value_name("URL"))
opt("git", "Git URL to install the specified crate from") .arg(opt("branch", "Branch to use when installing from git").value_name("BRANCH"))
.value_name("URL") .arg(opt("tag", "Tag to use when installing from git").value_name("TAG"))
) .arg(opt("rev", "Specific commit to use when installing from git").value_name("SHA"))
.arg( .arg(opt("path", "Filesystem path to local crate to install").value_name("PATH"))
opt("branch", "Branch to use when installing from git") .arg(opt(
.value_name("BRANCH") "list",
) "list all installed packages and their versions",
.arg( ))
opt("tag", "Tag to use when installing from git")
.value_name("TAG")
)
.arg(
opt("rev", "Specific commit to use when installing from git")
.value_name("SHA")
)
.arg(
opt("path", "Filesystem path to local crate to install")
.value_name("PATH")
)
.arg(opt("list", "list all installed packages and their versions"))
.arg_jobs() .arg_jobs()
.arg( .arg(opt("force", "Force overwriting existing crates or binaries").short("f"))
opt("force", "Force overwriting existing crates or binaries")
.short("f")
)
.arg_features() .arg_features()
.arg(opt("debug", "Build in debug mode instead of release mode")) .arg(opt("debug", "Build in debug mode instead of release mode"))
.arg_targets_bins_examples( .arg_targets_bins_examples(
@ -49,11 +32,9 @@ pub fn cli() -> App {
"Install only the specified example", "Install only the specified example",
"Install all examples", "Install all examples",
) )
.arg( .arg(opt("root", "Directory to install packages into").value_name("DIR"))
opt("root", "Directory to install packages into") .after_help(
.value_name("DIR") "\
)
.after_help("\
This command manages Cargo's local set of installed binary crates. Only packages This command manages Cargo's local set of installed binary crates. Only packages
which have [[bin]] targets can be installed, and all binaries are installed into which have [[bin]] targets can be installed, and all binaries are installed into
the installation root's `bin` folder. The installation root is determined, in the installation root's `bin` folder. The installation root is determined, in
@ -86,14 +67,17 @@ If the source is crates.io or `--git` then by default the crate will be built
in a temporary target directory. To avoid this, the target directory can be in a temporary target directory. To avoid this, the target directory can be
specified by setting the `CARGO_TARGET_DIR` environment variable to a relative specified by setting the `CARGO_TARGET_DIR` environment variable to a relative
path. In particular, this can be useful for caching build artifacts on path. In particular, this can be useful for caching build artifacts on
continuous integration systems.") continuous integration systems.",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let mut compile_opts = args.compile_options(config, CompileMode::Build)?; let mut compile_opts = args.compile_options(config, CompileMode::Build)?;
compile_opts.release = !args.is_present("debug"); compile_opts.release = !args.is_present("debug");
let krates = args.values_of("crate").unwrap_or_default().collect::<Vec<_>>(); let krates = args.values_of("crate")
.unwrap_or_default()
.collect::<Vec<_>>();
let source = if let Some(url) = args.value_of("git") { let source = if let Some(url) = args.value_of("git") {
let url = url.to_url()?; let url = url.to_url()?;
@ -121,7 +105,14 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
if args.is_present("list") { if args.is_present("list") {
ops::install_list(root, config)?; ops::install_list(root, config)?;
} else { } else {
ops::install(root, krates, &source, version, &compile_opts, args.is_present("force"))?; ops::install(
root,
krates,
&source,
version,
&compile_opts,
args.is_present("force"),
)?;
} }
Ok(()) Ok(())
} }

View file

@ -10,15 +10,19 @@ pub fn cli() -> App {
#[derive(Serialize)] #[derive(Serialize)]
pub struct ProjectLocation { pub struct ProjectLocation {
root: String root: String,
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let root = args.root_manifest(config)?; let root = args.root_manifest(config)?;
let root = root.to_str() let root = root.to_str()
.ok_or_else(|| format_err!("your project path contains characters \ .ok_or_else(|| {
not representable in Unicode")) format_err!(
"your project path contains characters \
not representable in Unicode"
)
})
.map_err(|e| CliError::new(e, 1))? .map_err(|e| CliError::new(e, 1))?
.to_string(); .to_string();

View file

@ -2,15 +2,17 @@ use command_prelude::*;
use std::io::{self, BufRead}; use std::io::{self, BufRead};
use cargo::core::{SourceId, Source}; use cargo::core::{Source, SourceId};
use cargo::sources::RegistrySource; use cargo::sources::RegistrySource;
use cargo::util::{CargoError, CargoResultExt}; use cargo::util::{CargoError, CargoResultExt};
use cargo::ops; use cargo::ops;
pub fn cli() -> App { pub fn cli() -> App {
subcommand("login") subcommand("login")
.about("Save an api token from the registry locally. \ .about(
If token is not specified, it will be read from stdin.") "Save an api token from the registry locally. \
If token is not specified, it will be read from stdin.",
)
.arg(Arg::with_name("token")) .arg(Arg::with_name("token"))
.arg(opt("host", "Host to set the token for").value_name("HOST")) .arg(opt("host", "Host to set the token for").value_name("HOST"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY")) .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
@ -24,24 +26,29 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
None => { None => {
let host = match registry { let host = match registry {
Some(ref _registry) => { Some(ref _registry) => {
return Err(format_err!("token must be provided when \ return Err(format_err!(
--registry is provided.").into()); "token must be provided when \
--registry is provided."
).into());
} }
None => { None => {
let src = SourceId::crates_io(config)?; let src = SourceId::crates_io(config)?;
let mut src = RegistrySource::remote(&src, config); let mut src = RegistrySource::remote(&src, config);
src.update()?; src.update()?;
let config = src.config()?.unwrap(); let config = src.config()?.unwrap();
args.value_of("host").map(|s| s.to_string()) args.value_of("host")
.map(|s| s.to_string())
.unwrap_or(config.api.unwrap()) .unwrap_or(config.api.unwrap())
} }
}; };
println!("please visit {}me and paste the API Token below", host); println!("please visit {}me and paste the API Token below", host);
let mut line = String::new(); let mut line = String::new();
let input = io::stdin(); let input = io::stdin();
input.lock().read_line(&mut line).chain_err(|| { input
"failed to read stdin" .lock()
}).map_err(CargoError::from)?; .read_line(&mut line)
.chain_err(|| "failed to read stdin")
.map_err(CargoError::from)?;
line.trim().to_string() line.trim().to_string()
} }
}; };

View file

@ -5,18 +5,22 @@ use cargo::print_json;
pub fn cli() -> App { pub fn cli() -> App {
subcommand("metadata") subcommand("metadata")
.about("Output the resolved dependencies of a project, \ .about(
the concrete used versions including overrides, \ "Output the resolved dependencies of a project, \
in machine-readable format") the concrete used versions including overrides, \
.arg_features() in machine-readable format",
.arg(
opt("no-deps", "Output information only about the root package \
and don't fetch dependencies")
) )
.arg_features()
.arg(opt(
"no-deps",
"Output information only about the root package \
and don't fetch dependencies",
))
.arg_manifest_path() .arg_manifest_path()
.arg( .arg(
opt("format-version", "Format version") opt("format-version", "Format version")
.value_name("VERSION").possible_value("1") .value_name("VERSION")
.possible_value("1"),
) )
} }
@ -25,9 +29,10 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let version = match args.value_of("format-version") { let version = match args.value_of("format-version") {
None => { None => {
config.shell().warn("\ config.shell().warn(
please specify `--format-version` flag explicitly \ "\
to avoid compatibility problems" please specify `--format-version` flag explicitly \
to avoid compatibility problems",
)?; )?;
1 1
} }

View file

@ -34,7 +34,7 @@ pub fn builtin() -> Vec<App> {
] ]
} }
pub fn builtin_exec(cmd: & str) -> Option<fn(&mut Config, &ArgMatches) -> CliResult> { pub fn builtin_exec(cmd: &str) -> Option<fn(&mut Config, &ArgMatches) -> CliResult> {
let f = match cmd { let f = match cmd {
"bench" => bench::exec, "bench" => bench::exec,
"build" => build::exec, "build" => build::exec,

View file

@ -13,6 +13,8 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let opts = args.new_options()?; let opts = args.new_options()?;
ops::new(&opts, config)?; ops::new(&opts, config)?;
let path = args.value_of("path").unwrap(); let path = args.value_of("path").unwrap();
config.shell().status("Created", format!("{} `{}` project", opts.kind, path))?; config
.shell()
.status("Created", format!("{} `{}` project", opts.kind, path))?;
Ok(()) Ok(())
} }

View file

@ -8,17 +8,22 @@ pub fn cli() -> App {
.arg(Arg::with_name("crate")) .arg(Arg::with_name("crate"))
.arg( .arg(
opt("add", "Name of a user or team to add as an owner") opt("add", "Name of a user or team to add as an owner")
.short("a").value_name("LOGIN").multiple(true) .short("a")
.value_name("LOGIN")
.multiple(true),
) )
.arg( .arg(
opt("remove", "Name of a user or team to remove as an owner") opt("remove", "Name of a user or team to remove as an owner")
.short("r").value_name("LOGIN").multiple(true) .short("r")
.value_name("LOGIN")
.multiple(true),
) )
.arg(opt("list", "List owners of a crate").short("l")) .arg(opt("list", "List owners of a crate").short("l"))
.arg(opt("index", "Registry index to modify owners for").value_name("INDEX")) .arg(opt("index", "Registry index to modify owners for").value_name("INDEX"))
.arg(opt("token", "API token to use when authenticating").value_name("TOKEN")) .arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY")) .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
.after_help("\ .after_help(
"\
This command will modify the owners for a package This command will modify the owners for a package
on the specified registry(or on the specified registry(or
default).Note that owners of a package can upload new versions, yank old default).Note that owners of a package can upload new versions, yank old
@ -26,7 +31,8 @@ pub fn cli() -> App {
caution! caution!
See http://doc.crates.io/crates-io.html#cargo-owner for detailed documentation See http://doc.crates.io/crates-io.html#cargo-owner for detailed documentation
and troubleshooting.") and troubleshooting.",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View file

@ -2,14 +2,27 @@ use command_prelude::*;
use cargo::ops::{self, PackageOpts}; use cargo::ops::{self, PackageOpts};
pub fn cli() -> App { pub fn cli() -> App {
subcommand("package") subcommand("package")
.about("Assemble the local package into a distributable tarball") .about("Assemble the local package into a distributable tarball")
.arg(opt("list", "Print files included in a package without making one").short("l")) .arg(
.arg(opt("no-verify", "Don't verify the contents by building them")) opt(
.arg(opt("no-metadata", "Ignore warnings about a lack of human-usable metadata")) "list",
.arg(opt("allow-dirty", "Allow dirty working directories to be packaged")) "Print files included in a package without making one",
).short("l"),
)
.arg(opt(
"no-verify",
"Don't verify the contents by building them",
))
.arg(opt(
"no-metadata",
"Ignore warnings about a lack of human-usable metadata",
))
.arg(opt(
"allow-dirty",
"Allow dirty working directories to be packaged",
))
.arg_target_triple("Build for the target triple") .arg_target_triple("Build for the target triple")
.arg_manifest_path() .arg_manifest_path()
.arg_jobs() .arg_jobs()
@ -17,15 +30,18 @@ pub fn cli() -> App {
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?; let ws = args.workspace(config)?;
ops::package(&ws, &PackageOpts { ops::package(
config, &ws,
verify: !args.is_present("no-verify"), &PackageOpts {
list: args.is_present("list"), config,
check_metadata: !args.is_present("no-metadata"), verify: !args.is_present("no-verify"),
allow_dirty: args.is_present("allow-dirty"), list: args.is_present("list"),
target: args.target(), check_metadata: !args.is_present("no-metadata"),
jobs: args.jobs()?, allow_dirty: args.is_present("allow-dirty"),
registry: None, target: args.target(),
})?; jobs: args.jobs()?,
registry: None,
},
)?;
Ok(()) Ok(())
} }

View file

@ -8,7 +8,8 @@ pub fn cli() -> App {
.arg(Arg::with_name("spec")) .arg(Arg::with_name("spec"))
.arg_single_package("Argument to get the package id specifier for") .arg_single_package("Argument to get the package id specifier for")
.arg_manifest_path() .arg_manifest_path()
.after_help("\ .after_help(
"\
Given a <spec> argument, print out the fully qualified package id specifier. Given a <spec> argument, print out the fully qualified package id specifier.
This command will generate an error if <spec> is ambiguous as to which package This command will generate an error if <spec> is ambiguous as to which package
it refers to in the dependency graph. If no <spec> is given, then the pkgid for it refers to in the dependency graph. If no <spec> is given, then the pkgid for
@ -27,7 +28,8 @@ Example Package IDs
crates.io/foo#1.2.3 | foo | 1.2.3 | *://crates.io/foo crates.io/foo#1.2.3 | foo | 1.2.3 | *://crates.io/foo
crates.io/bar#foo:1.2.3 | foo | 1.2.3 | *://crates.io/bar crates.io/bar#foo:1.2.3 | foo | 1.2.3 | *://crates.io/bar
http://crates.io/foo#1.2.3 | foo | 1.2.3 | http://crates.io/foo http://crates.io/foo#1.2.3 | foo | 1.2.3 | http://crates.io/foo
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View file

@ -7,14 +7,18 @@ pub fn cli() -> App {
.about("Upload a package to the registry") .about("Upload a package to the registry")
.arg_index() .arg_index()
.arg(opt("token", "Token to use when uploading").value_name("TOKEN")) .arg(opt("token", "Token to use when uploading").value_name("TOKEN"))
.arg(opt("no-verify", "Don't verify the contents by building them")) .arg(opt(
.arg(opt("allow-dirty", "Allow dirty working directories to be packaged")) "no-verify",
"Don't verify the contents by building them",
))
.arg(opt(
"allow-dirty",
"Allow dirty working directories to be packaged",
))
.arg_target_triple("Build for the target triple") .arg_target_triple("Build for the target triple")
.arg_manifest_path() .arg_manifest_path()
.arg_jobs() .arg_jobs()
.arg( .arg(opt("dry-run", "Perform all checks without uploading"))
opt("dry-run", "Perform all checks without uploading")
)
.arg(opt("registry", "Registry to publish to").value_name("REGISTRY")) .arg(opt("registry", "Registry to publish to").value_name("REGISTRY"))
} }
@ -23,16 +27,19 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?; let ws = args.workspace(config)?;
let index = args.index(config)?; let index = args.index(config)?;
ops::publish(&ws, &PublishOpts { ops::publish(
config, &ws,
token: args.value_of("token").map(|s| s.to_string()), &PublishOpts {
index, config,
verify: !args.is_present("no-verify"), token: args.value_of("token").map(|s| s.to_string()),
allow_dirty: args.is_present("allow-dirty"), index,
target: args.target(), verify: !args.is_present("no-verify"),
jobs: args.jobs()?, allow_dirty: args.is_present("allow-dirty"),
dry_run: args.is_present("dry-run"), target: args.target(),
registry, jobs: args.jobs()?,
})?; dry_run: args.is_present("dry-run"),
registry,
},
)?;
Ok(()) Ok(())
} }

View file

@ -5,8 +5,10 @@ use cargo::print_json;
pub fn cli() -> App { pub fn cli() -> App {
subcommand("read-manifest") subcommand("read-manifest")
.about("Deprecated, use `cargo metadata --no-deps` instead. .about(
Print a JSON representation of a Cargo.toml manifest.") "Deprecated, use `cargo metadata --no-deps` instead.
Print a JSON representation of a Cargo.toml manifest.",
)
.arg_manifest_path() .arg_manifest_path()
} }

View file

@ -1,10 +1,11 @@
use command_prelude::*; use command_prelude::*;
use cargo::core::Verbosity; use cargo::core::Verbosity;
use cargo::ops::{self, CompileMode, CompileFilter}; use cargo::ops::{self, CompileFilter, CompileMode};
pub fn cli() -> App { pub fn cli() -> App {
subcommand("run").alias("r") subcommand("run")
.alias("r")
.setting(AppSettings::TrailingVarArg) .setting(AppSettings::TrailingVarArg)
.about("Run the main binary of the local package (src/main.rs)") .about("Run the main binary of the local package (src/main.rs)")
.arg(Arg::with_name("args").multiple(true)) .arg(Arg::with_name("args").multiple(true))
@ -19,7 +20,8 @@ pub fn cli() -> App {
.arg_target_triple("Build for the target triple") .arg_target_triple("Build for the target triple")
.arg_manifest_path() .arg_manifest_path()
.arg_message_format() .arg_message_format()
.after_help("\ .after_help(
"\
If neither `--bin` nor `--example` are given, then if the project only has one If neither `--bin` nor `--example` are given, then if the project only has one
bin target it will be run. Otherwise `--bin` specifies the bin target to run, bin target it will be run. Otherwise `--bin` specifies the bin target to run,
and `--example` specifies the example target to run. At most one of `--bin` or and `--example` specifies the example target to run. At most one of `--bin` or
@ -28,15 +30,14 @@ and `--example` specifies the example target to run. At most one of `--bin` or
All of the trailing arguments are passed to the binary to run. If you're passing All of the trailing arguments are passed to the binary to run. If you're passing
arguments to both Cargo and the binary, the ones after `--` go to the binary, arguments to both Cargo and the binary, the ones after `--` go to the binary,
the ones before go to Cargo. the ones before go to Cargo.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?; let ws = args.workspace(config)?;
let mut compile_opts = args.compile_options_for_single_package( let mut compile_opts = args.compile_options_for_single_package(config, CompileMode::Build)?;
config, CompileMode::Build,
)?;
if !args.is_present("example") && !args.is_present("bin") { if !args.is_present("example") && !args.is_present("bin") {
compile_opts.filter = CompileFilter::Default { compile_opts.filter = CompileFilter::Default {
required_features_filterable: false, required_features_filterable: false,

View file

@ -22,15 +22,13 @@ pub fn cli() -> App {
"Build all targets (lib and bin targets by default)", "Build all targets (lib and bin targets by default)",
) )
.arg_release("Build artifacts in release mode, with optimizations") .arg_release("Build artifacts in release mode, with optimizations")
.arg( .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))
opt("profile", "Profile to build the selected target for")
.value_name("PROFILE")
)
.arg_features() .arg_features()
.arg_target_triple("Target triple which compiles will be for") .arg_target_triple("Target triple which compiles will be for")
.arg_manifest_path() .arg_manifest_path()
.arg_message_format() .arg_message_format()
.after_help("\ .after_help(
"\
The specified target for the current package (or package specified by SPEC if The specified target for the current package (or package specified by SPEC if
provided) will be compiled along with all of its dependencies. The specified provided) will be compiled along with all of its dependencies. The specified
<args>... will all be passed to the final compiler invocation, not any of the <args>... will all be passed to the final compiler invocation, not any of the
@ -43,7 +41,8 @@ target is available for the current package the filters of --lib, --bin, etc,
must be used to select which target is compiled. To pass flags to all compiler must be used to select which target is compiled. To pass flags to all compiler
processes spawned by Cargo, use the $RUSTFLAGS environment variable or the processes spawned by Cargo, use the $RUSTFLAGS environment variable or the
`build.rustflags` configuration option. `build.rustflags` configuration option.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
@ -54,14 +53,15 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
Some("bench") => CompileMode::Bench, Some("bench") => CompileMode::Bench,
Some("check") => CompileMode::Check { test: false }, Some("check") => CompileMode::Check { test: false },
Some(mode) => { Some(mode) => {
let err = format_err!("unknown profile: `{}`, use dev, let err = format_err!(
test, or bench", mode); "unknown profile: `{}`, use dev,
test, or bench",
mode
);
return Err(CliError::new(err, 101)); return Err(CliError::new(err, 101));
} }
}; };
let mut compile_opts = args.compile_options_for_single_package( let mut compile_opts = args.compile_options_for_single_package(config, mode)?;
config, mode,
)?;
compile_opts.target_rustc_args = Some(values(args, "args")); compile_opts.target_rustc_args = Some(values(args, "args"));
ops::compile(&ws, &compile_opts)?; ops::compile(&ws, &compile_opts)?;
Ok(()) Ok(())

View file

@ -7,7 +7,10 @@ pub fn cli() -> App {
.setting(AppSettings::TrailingVarArg) .setting(AppSettings::TrailingVarArg)
.about("Build a package's documentation, using specified custom flags.") .about("Build a package's documentation, using specified custom flags.")
.arg(Arg::with_name("args").multiple(true)) .arg(Arg::with_name("args").multiple(true))
.arg(opt("open", "Opens the docs in a browser after the operation")) .arg(opt(
"open",
"Opens the docs in a browser after the operation",
))
.arg_single_package("Package to document") .arg_single_package("Package to document")
.arg_jobs() .arg_jobs()
.arg_targets_all( .arg_targets_all(
@ -25,7 +28,8 @@ pub fn cli() -> App {
.arg_release("Build artifacts in release mode, with optimizations") .arg_release("Build artifacts in release mode, with optimizations")
.arg_manifest_path() .arg_manifest_path()
.arg_message_format() .arg_message_format()
.after_help("\ .after_help(
"\
The specified target for the current package (or package specified by SPEC if The specified target for the current package (or package specified by SPEC if
provided) will be documented with the specified <opts>... being passed to the provided) will be documented with the specified <opts>... being passed to the
final rustdoc invocation. Dependencies will not be documented as part of this final rustdoc invocation. Dependencies will not be documented as part of this
@ -37,14 +41,14 @@ If the --package argument is given, then SPEC is a package id specification
which indicates which package should be documented. If it is not given, then the which indicates which package should be documented. If it is not given, then the
current package is documented. For more information on SPEC and its format, see current package is documented. For more information on SPEC and its format, see
the `cargo help pkgid` command. the `cargo help pkgid` command.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?; let ws = args.workspace(config)?;
let mut compile_opts = args.compile_options_for_single_package( let mut compile_opts =
config, CompileMode::Doc { deps: false }, args.compile_options_for_single_package(config, CompileMode::Doc { deps: false })?;
)?;
compile_opts.target_rustdoc_args = Some(values(args, "args")); compile_opts.target_rustdoc_args = Some(values(args, "args"));
let doc_opts = DocOptions { let doc_opts = DocOptions {
open_result: args.is_present("open"), open_result: args.is_present("open"),

View file

@ -10,8 +10,10 @@ pub fn cli() -> App {
.arg(Arg::with_name("query").multiple(true)) .arg(Arg::with_name("query").multiple(true))
.arg_index() .arg_index()
.arg( .arg(
opt("limit", "Limit the number of results (default: 10, max: 100)") opt(
.value_name("LIMIT") "limit",
"Limit the number of results (default: 10, max: 100)",
).value_name("LIMIT"),
) )
.arg(opt("registry", "Registry to use").value_name("REGISTRY")) .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
} }

View file

@ -3,18 +3,19 @@ use command_prelude::*;
use cargo::ops::{self, CompileMode}; use cargo::ops::{self, CompileMode};
pub fn cli() -> App { pub fn cli() -> App {
subcommand("test").alias("t") subcommand("test")
.alias("t")
.setting(AppSettings::TrailingVarArg) .setting(AppSettings::TrailingVarArg)
.about("Execute all unit and integration tests of a local package") .about("Execute all unit and integration tests of a local package")
.arg( .arg(
Arg::with_name("TESTNAME").help( Arg::with_name("TESTNAME")
"If specified, only run tests containing this string in their names" .help("If specified, only run tests containing this string in their names"),
)
) )
.arg( .arg(
Arg::with_name("args").help( Arg::with_name("args")
"Arguments for the test binary" .help("Arguments for the test binary")
).multiple(true).last(true) .multiple(true)
.last(true),
) )
.arg_targets_all( .arg_targets_all(
"Test only this package's library", "Test only this package's library",
@ -29,12 +30,8 @@ pub fn cli() -> App {
"Test all targets (default)", "Test all targets (default)",
) )
.arg(opt("doc", "Test only this library's documentation")) .arg(opt("doc", "Test only this library's documentation"))
.arg( .arg(opt("no-run", "Compile, but don't run tests"))
opt("no-run", "Compile, but don't run tests") .arg(opt("no-fail-fast", "Run all tests regardless of failure"))
)
.arg(
opt("no-fail-fast", "Run all tests regardless of failure")
)
.arg_package( .arg_package(
"Package to run tests for", "Package to run tests for",
"Test all packages in the workspace", "Test all packages in the workspace",
@ -46,7 +43,8 @@ pub fn cli() -> App {
.arg_target_triple("Build for the target triple") .arg_target_triple("Build for the target triple")
.arg_manifest_path() .arg_manifest_path()
.arg_message_format() .arg_message_format()
.after_help("\ .after_help(
"\
All of the trailing arguments are passed to the test binaries generated for All of the trailing arguments are passed to the test binaries generated for
filtering tests and generally providing options configuring how they run. For filtering tests and generally providing options configuring how they run. For
example, this will run all tests with the name `foo` in their name: example, this will run all tests with the name `foo` in their name:
@ -81,7 +79,8 @@ by passing `--nocapture` to the test binaries:
To get the list of all options available for the test binaries use this: To get the list of all options available for the test binaries use this:
cargo test -- --help cargo test -- --help
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
@ -91,12 +90,18 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let doc = args.is_present("doc"); let doc = args.is_present("doc");
if doc { if doc {
compile_opts.mode = ops::CompileMode::Doctest; compile_opts.mode = ops::CompileMode::Doctest;
compile_opts.filter = ops::CompileFilter::new(true, compile_opts.filter = ops::CompileFilter::new(
Vec::new(), false, true,
Vec::new(), false, Vec::new(),
Vec::new(), false, false,
Vec::new(), false, Vec::new(),
false); false,
Vec::new(),
false,
Vec::new(),
false,
false,
);
} }
let ops = ops::TestOptions { let ops = ops::TestOptions {
@ -110,16 +115,18 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
// important so we explicitly mention it and reconfigure // important so we explicitly mention it and reconfigure
let mut test_args = vec![]; let mut test_args = vec![];
test_args.extend(args.value_of("TESTNAME").into_iter().map(|s| s.to_string())); test_args.extend(args.value_of("TESTNAME").into_iter().map(|s| s.to_string()));
test_args.extend(args.values_of("args").unwrap_or_default().map(|s| s.to_string())); test_args.extend(
args.values_of("args")
.unwrap_or_default()
.map(|s| s.to_string()),
);
let err = ops::run_tests(&ws, &ops, &test_args)?; let err = ops::run_tests(&ws, &ops, &test_args)?;
return match err { return match err {
None => Ok(()), None => Ok(()),
Some(err) => { Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) {
Err(match err.exit.as_ref().and_then(|e| e.code()) { Some(i) => CliError::new(format_err!("{}", err.hint(&ws)), i),
Some(i) => CliError::new(format_err!("{}", err.hint(&ws)), i), None => CliError::new(err.into(), 101),
None => CliError::new(err.into(), 101), }),
})
}
}; };
} }

View file

@ -8,18 +8,18 @@ pub fn cli() -> App {
.arg(Arg::with_name("spec").multiple(true)) .arg(Arg::with_name("spec").multiple(true))
.arg( .arg(
opt("bin", "Only uninstall the binary NAME") opt("bin", "Only uninstall the binary NAME")
.value_name("NAME").multiple(true) .value_name("NAME")
.multiple(true),
) )
.arg( .arg(opt("root", "Directory to uninstall packages from").value_name("DIR"))
opt("root", "Directory to uninstall packages from") .after_help(
.value_name("DIR") "\
)
.after_help("\
The argument SPEC is a package id specification (see `cargo help pkgid`) to The argument SPEC is a package id specification (see `cargo help pkgid`) to
specify which crate should be uninstalled. By default all binaries are specify which crate should be uninstalled. By default all binaries are
uninstalled for a crate but the `--bin` and `--example` flags can be used to uninstalled for a crate but the `--bin` and `--example` flags can be used to
only uninstall particular binaries. only uninstall particular binaries.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View file

@ -7,15 +7,18 @@ pub fn cli() -> App {
.about("Update dependencies as recorded in the local lock file") .about("Update dependencies as recorded in the local lock file")
.arg( .arg(
opt("package", "Package to clean artifacts for") opt("package", "Package to clean artifacts for")
.short("p").value_name("SPEC").multiple(true) .short("p")
) .value_name("SPEC")
.arg(opt("aggressive", "Force updating all dependencies of <name> as well")) .multiple(true),
.arg(
opt("precise", "Update a single dependency to exactly PRECISE")
.value_name("PRECISE")
) )
.arg(opt(
"aggressive",
"Force updating all dependencies of <name> as well",
))
.arg(opt("precise", "Update a single dependency to exactly PRECISE").value_name("PRECISE"))
.arg_manifest_path() .arg_manifest_path()
.after_help("\ .after_help(
"\
This command requires that a `Cargo.lock` already exists as generated by This command requires that a `Cargo.lock` already exists as generated by
`cargo build` or related commands. `cargo build` or related commands.
@ -35,7 +38,8 @@ If SPEC is not given, then all dependencies will be re-resolved and
updated. updated.
For more information about package id specifications, see `cargo help pkgid`. For more information about package id specifications, see `cargo help pkgid`.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {

View file

@ -32,7 +32,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let file = File::open(&filename); let file = File::open(&filename);
match file.and_then(|mut f| f.read_to_string(&mut contents)) { match file.and_then(|mut f| f.read_to_string(&mut contents)) {
Ok(_) => {} Ok(_) => {}
Err(e) => fail("invalid", &format!("error reading file: {}", e)) Err(e) => fail("invalid", &format!("error reading file: {}", e)),
}; };
if contents.parse::<toml::Value>().is_err() { if contents.parse::<toml::Value>().is_err() {
fail("invalid", "invalid-format"); fail("invalid", "invalid-format");

View file

@ -3,8 +3,7 @@ use command_prelude::*;
use cargo; use cargo;
pub fn cli() -> App { pub fn cli() -> App {
subcommand("version") subcommand("version").about("Show version information")
.about("Show version information")
} }
pub fn exec(_config: &mut Config, _args: &ArgMatches) -> CliResult { pub fn exec(_config: &mut Config, _args: &ArgMatches) -> CliResult {

View file

@ -6,14 +6,16 @@ pub fn cli() -> App {
subcommand("yank") subcommand("yank")
.about("Remove a pushed crate from the index") .about("Remove a pushed crate from the index")
.arg(Arg::with_name("crate")) .arg(Arg::with_name("crate"))
.arg( .arg(opt("vers", "The version to yank or un-yank").value_name("VERSION"))
opt("vers", "The version to yank or un-yank").value_name("VERSION") .arg(opt(
) "undo",
.arg(opt("undo", "Undo a yank, putting a version back into the index")) "Undo a yank, putting a version back into the index",
))
.arg(opt("index", "Registry index to yank from").value_name("INDEX")) .arg(opt("index", "Registry index to yank from").value_name("INDEX"))
.arg(opt("token", "API token to use when authenticating").value_name("TOKEN")) .arg(opt("token", "API token to use when authenticating").value_name("TOKEN"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY")) .arg(opt("registry", "Registry to use").value_name("REGISTRY"))
.after_help("\ .after_help(
"\
The yank command removes a previously pushed crate's version from the server's The yank command removes a previously pushed crate's version from the server's
index. This command does not delete any data, and the crate will still be index. This command does not delete any data, and the crate will still be
available for download via the registry's download link. available for download via the registry's download link.
@ -21,18 +23,21 @@ available for download via the registry's download link.
Note that existing crates locked to a yanked version will still be able to Note that existing crates locked to a yanked version will still be able to
download the yanked version to use it. Cargo will, however, not allow any new download the yanked version to use it. Cargo will, however, not allow any new
crates to be locked to any yanked version. crates to be locked to any yanked version.
") ",
)
} }
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let registry = args.registry(config)?; let registry = args.registry(config)?;
ops::yank(config, ops::yank(
args.value_of("crate").map(|s| s.to_string()), config,
args.value_of("vers").map(|s| s.to_string()), args.value_of("crate").map(|s| s.to_string()),
args.value_of("token").map(|s| s.to_string()), args.value_of("vers").map(|s| s.to_string()),
args.value_of("index").map(|s| s.to_string()), args.value_of("token").map(|s| s.to_string()),
args.is_present("undo"), args.value_of("index").map(|s| s.to_string()),
registry)?; args.is_present("undo"),
registry,
)?;
Ok(()) Ok(())
} }

View file

@ -6,10 +6,10 @@ use semver::VersionReq;
use semver::ReqParseError; use semver::ReqParseError;
use serde::ser; use serde::ser;
use core::{SourceId, Summary, PackageId}; use core::{PackageId, SourceId, Summary};
use core::interning::InternedString; use core::interning::InternedString;
use util::{Cfg, CfgExpr, Config}; use util::{Cfg, CfgExpr, Config};
use util::errors::{CargoResult, CargoResultExt, CargoError}; use util::errors::{CargoError, CargoResult, CargoResultExt};
/// Information about a dependency requested by a Cargo manifest. /// Information about a dependency requested by a Cargo manifest.
/// Cheap to copy. /// Cheap to copy.
@ -61,7 +61,8 @@ struct SerializedDependency<'a> {
impl ser::Serialize for Dependency { impl ser::Serialize for Dependency {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer, where
S: ser::Serializer,
{ {
SerializedDependency { SerializedDependency {
name: &*self.name(), name: &*self.name(),
@ -84,9 +85,10 @@ pub enum Kind {
Build, Build,
} }
fn parse_req_with_deprecated(req: &str, fn parse_req_with_deprecated(
extra: Option<(&PackageId, &Config)>) req: &str,
-> CargoResult<VersionReq> { extra: Option<(&PackageId, &Config)>,
) -> CargoResult<VersionReq> {
match VersionReq::parse(req) { match VersionReq::parse(req) {
Err(e) => { Err(e) => {
let (inside, config) = match extra { let (inside, config) = match extra {
@ -95,7 +97,8 @@ fn parse_req_with_deprecated(req: &str,
}; };
match e { match e {
ReqParseError::DeprecatedVersionRequirement(requirement) => { ReqParseError::DeprecatedVersionRequirement(requirement) => {
let msg = format!("\ let msg = format!(
"\
parsed version requirement `{}` is no longer valid parsed version requirement `{}` is no longer valid
Previous versions of Cargo accepted this malformed requirement, Previous versions of Cargo accepted this malformed requirement,
@ -106,21 +109,26 @@ This will soon become a hard error, so it's either recommended to
update to a fixed version or contact the upstream maintainer about update to a fixed version or contact the upstream maintainer about
this warning. this warning.
", ",
req, inside.name(), inside.version(), requirement); req,
inside.name(),
inside.version(),
requirement
);
config.shell().warn(&msg)?; config.shell().warn(&msg)?;
Ok(requirement) Ok(requirement)
} }
e => Err(e.into()), e => Err(e.into()),
} }
}, }
Ok(v) => Ok(v), Ok(v) => Ok(v),
} }
} }
impl ser::Serialize for Kind { impl ser::Serialize for Kind {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer, where
S: ser::Serializer,
{ {
match *self { match *self {
Kind::Normal => None, Kind::Normal => None,
@ -132,15 +140,17 @@ impl ser::Serialize for Kind {
impl Dependency { impl Dependency {
/// Attempt to create a `Dependency` from an entry in the manifest. /// Attempt to create a `Dependency` from an entry in the manifest.
pub fn parse(name: &str, pub fn parse(
version: Option<&str>, name: &str,
source_id: &SourceId, version: Option<&str>,
inside: &PackageId, source_id: &SourceId,
config: &Config) -> CargoResult<Dependency> { inside: &PackageId,
config: &Config,
) -> CargoResult<Dependency> {
let arg = Some((inside, config)); let arg = Some((inside, config));
let (specified_req, version_req) = match version { let (specified_req, version_req) = match version {
Some(v) => (true, parse_req_with_deprecated(v, arg)?), Some(v) => (true, parse_req_with_deprecated(v, arg)?),
None => (false, VersionReq::any()) None => (false, VersionReq::any()),
}; };
let mut ret = Dependency::new_override(name, source_id); let mut ret = Dependency::new_override(name, source_id);
@ -154,12 +164,14 @@ impl Dependency {
} }
/// Attempt to create a `Dependency` from an entry in the manifest. /// Attempt to create a `Dependency` from an entry in the manifest.
pub fn parse_no_deprecated(name: &str, pub fn parse_no_deprecated(
version: Option<&str>, name: &str,
source_id: &SourceId) -> CargoResult<Dependency> { version: Option<&str>,
source_id: &SourceId,
) -> CargoResult<Dependency> {
let (specified_req, version_req) = match version { let (specified_req, version_req) = match version {
Some(v) => (true, parse_req_with_deprecated(v, None)?), Some(v) => (true, parse_req_with_deprecated(v, None)?),
None => (false, VersionReq::any()) None => (false, VersionReq::any()),
}; };
let mut ret = Dependency::new_override(name, source_id); let mut ret = Dependency::new_override(name, source_id);
@ -279,11 +291,13 @@ impl Dependency {
pub fn lock_to(&mut self, id: &PackageId) -> &mut Dependency { pub fn lock_to(&mut self, id: &PackageId) -> &mut Dependency {
assert_eq!(self.inner.source_id, *id.source_id()); assert_eq!(self.inner.source_id, *id.source_id());
assert!(self.inner.req.matches(id.version())); assert!(self.inner.req.matches(id.version()));
trace!("locking dep from `{}` with `{}` at {} to {}", trace!(
self.name(), "locking dep from `{}` with `{}` at {} to {}",
self.version_req(), self.name(),
self.source_id(), self.version_req(),
id); self.source_id(),
id
);
self.set_version_req(VersionReq::exact(id.version())) self.set_version_req(VersionReq::exact(id.version()))
.set_source_id(id.source_id().clone()) .set_source_id(id.source_id().clone())
} }
@ -330,19 +344,19 @@ impl Dependency {
/// Returns true if the package (`sum`) can fulfill this dependency request. /// Returns true if the package (`sum`) can fulfill this dependency request.
pub fn matches_ignoring_source(&self, sum: &Summary) -> bool { pub fn matches_ignoring_source(&self, sum: &Summary) -> bool {
self.name() == sum.package_id().name() && self.name() == sum.package_id().name()
self.version_req().matches(sum.package_id().version()) && self.version_req().matches(sum.package_id().version())
} }
/// Returns true if the package (`id`) can fulfill this dependency request. /// Returns true if the package (`id`) can fulfill this dependency request.
pub fn matches_id(&self, id: &PackageId) -> bool { pub fn matches_id(&self, id: &PackageId) -> bool {
self.inner.name == id.name() && self.inner.name == id.name()
(self.inner.only_match_name || (self.inner.req.matches(id.version()) && && (self.inner.only_match_name
&self.inner.source_id == id.source_id())) || (self.inner.req.matches(id.version())
&& &self.inner.source_id == id.source_id()))
} }
pub fn map_source(mut self, to_replace: &SourceId, replace_with: &SourceId) pub fn map_source(mut self, to_replace: &SourceId, replace_with: &SourceId) -> Dependency {
-> Dependency {
if self.source_id() != to_replace { if self.source_id() != to_replace {
self self
} else { } else {
@ -356,19 +370,18 @@ impl Platform {
pub fn matches(&self, name: &str, cfg: Option<&[Cfg]>) -> bool { pub fn matches(&self, name: &str, cfg: Option<&[Cfg]>) -> bool {
match *self { match *self {
Platform::Name(ref p) => p == name, Platform::Name(ref p) => p == name,
Platform::Cfg(ref p) => { Platform::Cfg(ref p) => match cfg {
match cfg { Some(cfg) => p.matches(cfg),
Some(cfg) => p.matches(cfg), None => false,
None => false, },
}
}
} }
} }
} }
impl ser::Serialize for Platform { impl ser::Serialize for Platform {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer, where
S: ser::Serializer,
{ {
self.to_string().serialize(s) self.to_string().serialize(s)
} }
@ -379,10 +392,10 @@ impl FromStr for Platform {
fn from_str(s: &str) -> CargoResult<Platform> { fn from_str(s: &str) -> CargoResult<Platform> {
if s.starts_with("cfg(") && s.ends_with(')') { if s.starts_with("cfg(") && s.ends_with(')') {
let s = &s[4..s.len()-1]; let s = &s[4..s.len() - 1];
let p = s.parse().map(Platform::Cfg).chain_err(|| { let p = s.parse()
format_err!("failed to parse `{}` as a cfg expression", s) .map(Platform::Cfg)
})?; .chain_err(|| format_err!("failed to parse `{}` as a cfg expression", s))?;
Ok(p) Ok(p)
} else { } else {
Ok(Platform::Name(s.to_string())) Ok(Platform::Name(s.to_string()))

View file

@ -46,8 +46,7 @@ use std::str::FromStr;
use util::errors::CargoResult; use util::errors::CargoResult;
/// The epoch of the compiler (RFC 2052) /// The epoch of the compiler (RFC 2052)
#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq)] #[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq, Serialize, Deserialize)]
#[derive(Serialize, Deserialize)]
pub enum Epoch { pub enum Epoch {
/// The 2015 epoch /// The 2015 epoch
Epoch2015, Epoch2015,
@ -69,7 +68,7 @@ impl FromStr for Epoch {
match s { match s {
"2015" => Ok(Epoch::Epoch2015), "2015" => Ok(Epoch::Epoch2015),
"2018" => Ok(Epoch::Epoch2018), "2018" => Ok(Epoch::Epoch2018),
_ => Err(()) _ => Err(()),
} }
} }
} }
@ -174,8 +173,7 @@ pub struct Feature {
} }
impl Features { impl Features {
pub fn new(features: &[String], pub fn new(features: &[String], warnings: &mut Vec<String>) -> CargoResult<Features> {
warnings: &mut Vec<String>) -> CargoResult<Features> {
let mut ret = Features::default(); let mut ret = Features::default();
for feature in features { for feature in features {
ret.add(feature, warnings)?; ret.add(feature, warnings)?;
@ -196,17 +194,20 @@ impl Features {
match status { match status {
Status::Stable => { Status::Stable => {
let warning = format!("the cargo feature `{}` is now stable \ let warning = format!(
and is no longer necessary to be listed \ "the cargo feature `{}` is now stable \
in the manifest", feature); and is no longer necessary to be listed \
in the manifest",
feature
);
warnings.push(warning); warnings.push(warning);
} }
Status::Unstable if !nightly_features_allowed() => { Status::Unstable if !nightly_features_allowed() => bail!(
bail!("the cargo feature `{}` requires a nightly version of \ "the cargo feature `{}` requires a nightly version of \
Cargo, but this is the `{}` channel", Cargo, but this is the `{}` channel",
feature, feature,
channel()) channel()
} ),
Status::Unstable => {} Status::Unstable => {}
} }
@ -227,15 +228,20 @@ impl Features {
let mut msg = format!("feature `{}` is required", feature); let mut msg = format!("feature `{}` is required", feature);
if nightly_features_allowed() { if nightly_features_allowed() {
let s = format!("\n\nconsider adding `cargo-features = [\"{0}\"]` \ let s = format!(
to the manifest", feature); "\n\nconsider adding `cargo-features = [\"{0}\"]` \
to the manifest",
feature
);
msg.push_str(&s); msg.push_str(&s);
} else { } else {
let s = format!("\n\n\ let s = format!(
this Cargo does not support nightly features, but if you\n\ "\n\n\
switch to nightly channel you can add\n\ this Cargo does not support nightly features, but if you\n\
`cargo-features = [\"{}\"]` to enable this feature", switch to nightly channel you can add\n\
feature); `cargo-features = [\"{}\"]` to enable this feature",
feature
);
msg.push_str(&s); msg.push_str(&s);
} }
bail!("{}", msg); bail!("{}", msg);
@ -299,8 +305,7 @@ impl CliUnstable {
fn parse_bool(value: Option<&str>) -> CargoResult<bool> { fn parse_bool(value: Option<&str>) -> CargoResult<bool> {
match value { match value {
None | None | Some("yes") => Ok(true),
Some("yes") => Ok(true),
Some("no") => Ok(false), Some("no") => Ok(false),
Some(s) => bail!("expected `no` or `yes`, found: {}", s), Some(s) => bail!("expected `no` or `yes`, found: {}", s),
} }
@ -321,7 +326,9 @@ impl CliUnstable {
fn channel() -> String { fn channel() -> String {
env::var("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS").unwrap_or_else(|_| { env::var("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS").unwrap_or_else(|_| {
::version().cfg_info.map(|c| c.release_channel) ::version()
.cfg_info
.map(|c| c.release_channel)
.unwrap_or_else(|| String::from("dev")) .unwrap_or_else(|| String::from("dev"))
}) })
} }

View file

@ -34,11 +34,17 @@ impl InternedString {
pub fn new(str: &str) -> InternedString { pub fn new(str: &str) -> InternedString {
let mut cache = STRING_CASHE.write().unwrap(); let mut cache = STRING_CASHE.write().unwrap();
if let Some(&s) = cache.get(str) { if let Some(&s) = cache.get(str) {
return InternedString { ptr: s.as_ptr(), len: s.len() }; return InternedString {
ptr: s.as_ptr(),
len: s.len(),
};
} }
let s = leek(str.to_string()); let s = leek(str.to_string());
cache.insert(s); cache.insert(s);
InternedString { ptr: s.as_ptr(), len: s.len() } InternedString {
ptr: s.as_ptr(),
len: s.len(),
}
} }
pub fn to_inner(&self) -> &'static str { pub fn to_inner(&self) -> &'static str {
unsafe { unsafe {
@ -87,4 +93,4 @@ impl PartialOrd for InternedString {
} }
unsafe impl Send for InternedString {} unsafe impl Send for InternedString {}
unsafe impl Sync for InternedString {} unsafe impl Sync for InternedString {}

View file

@ -1,6 +1,6 @@
use std::collections::{HashMap, BTreeMap}; use std::collections::{BTreeMap, HashMap};
use std::fmt; use std::fmt;
use std::path::{PathBuf, Path}; use std::path::{Path, PathBuf};
use std::rc::Rc; use std::rc::Rc;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
@ -8,8 +8,8 @@ use semver::Version;
use serde::ser; use serde::ser;
use url::Url; use url::Url;
use core::{Dependency, PackageId, Summary, SourceId, PackageIdSpec}; use core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary};
use core::{WorkspaceConfig, Epoch, Features, Feature}; use core::{Epoch, Feature, Features, WorkspaceConfig};
use core::interning::InternedString; use core::interning::InternedString;
use util::Config; use util::Config;
use util::toml::TomlManifest; use util::toml::TomlManifest;
@ -48,7 +48,7 @@ pub struct Manifest {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct DelayedWarning { pub struct DelayedWarning {
pub message: String, pub message: String,
pub is_critical: bool pub is_critical: bool,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -74,11 +74,11 @@ pub struct ManifestMetadata {
pub categories: Vec<String>, pub categories: Vec<String>,
pub license: Option<String>, pub license: Option<String>,
pub license_file: Option<String>, pub license_file: Option<String>,
pub description: Option<String>, // not markdown pub description: Option<String>, // not markdown
pub readme: Option<String>, // file, not contents pub readme: Option<String>, // file, not contents
pub homepage: Option<String>, // url pub homepage: Option<String>, // url
pub repository: Option<String>, // url pub repository: Option<String>, // url
pub documentation: Option<String>, // url pub documentation: Option<String>, // url
pub badges: BTreeMap<String, BTreeMap<String, String>>, pub badges: BTreeMap<String, BTreeMap<String, String>>,
pub links: Option<String>, pub links: Option<String>,
} }
@ -116,10 +116,7 @@ impl LibKind {
pub fn linkable(&self) -> bool { pub fn linkable(&self) -> bool {
match *self { match *self {
LibKind::Lib | LibKind::Lib | LibKind::Rlib | LibKind::Dylib | LibKind::ProcMacro => true,
LibKind::Rlib |
LibKind::Dylib |
LibKind::ProcMacro => true,
LibKind::Other(..) => false, LibKind::Other(..) => false,
} }
} }
@ -138,7 +135,8 @@ pub enum TargetKind {
impl ser::Serialize for TargetKind { impl ser::Serialize for TargetKind {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer, where
S: ser::Serializer,
{ {
use self::TargetKind::*; use self::TargetKind::*;
match *self { match *self {
@ -147,42 +145,31 @@ impl ser::Serialize for TargetKind {
ExampleBin | ExampleLib(_) => vec!["example"], ExampleBin | ExampleLib(_) => vec!["example"],
Test => vec!["test"], Test => vec!["test"],
CustomBuild => vec!["custom-build"], CustomBuild => vec!["custom-build"],
Bench => vec!["bench"] Bench => vec!["bench"],
}.serialize(s) }.serialize(s)
} }
} }
// Note that most of the fields here are skipped when serializing because we // Note that most of the fields here are skipped when serializing because we
// don't want to export them just yet (becomes a public API of Cargo). Others // don't want to export them just yet (becomes a public API of Cargo). Others
// though are definitely needed! // though are definitely needed!
#[derive(Clone, PartialEq, Eq, Debug, Hash, Serialize)] #[derive(Clone, PartialEq, Eq, Debug, Hash, Serialize)]
pub struct Profile { pub struct Profile {
pub opt_level: String, pub opt_level: String,
#[serde(skip_serializing)] #[serde(skip_serializing)] pub lto: Lto,
pub lto: Lto, #[serde(skip_serializing)] pub codegen_units: Option<u32>, // None = use rustc default
#[serde(skip_serializing)] #[serde(skip_serializing)] pub rustc_args: Option<Vec<String>>,
pub codegen_units: Option<u32>, // None = use rustc default #[serde(skip_serializing)] pub rustdoc_args: Option<Vec<String>>,
#[serde(skip_serializing)]
pub rustc_args: Option<Vec<String>>,
#[serde(skip_serializing)]
pub rustdoc_args: Option<Vec<String>>,
pub debuginfo: Option<u32>, pub debuginfo: Option<u32>,
pub debug_assertions: bool, pub debug_assertions: bool,
pub overflow_checks: bool, pub overflow_checks: bool,
#[serde(skip_serializing)] #[serde(skip_serializing)] pub rpath: bool,
pub rpath: bool,
pub test: bool, pub test: bool,
#[serde(skip_serializing)] #[serde(skip_serializing)] pub doc: bool,
pub doc: bool, #[serde(skip_serializing)] pub run_custom_build: bool,
#[serde(skip_serializing)] #[serde(skip_serializing)] pub check: bool,
pub run_custom_build: bool, #[serde(skip_serializing)] pub panic: Option<String>,
#[serde(skip_serializing)] #[serde(skip_serializing)] pub incremental: bool,
pub check: bool,
#[serde(skip_serializing)]
pub panic: Option<String>,
#[serde(skip_serializing)]
pub incremental: bool,
} }
#[derive(Clone, PartialEq, Eq, Debug, Hash)] #[derive(Clone, PartialEq, Eq, Debug, Hash)]
@ -261,22 +248,24 @@ impl ser::Serialize for Target {
} }
impl Manifest { impl Manifest {
pub fn new(summary: Summary, pub fn new(
targets: Vec<Target>, summary: Summary,
exclude: Vec<String>, targets: Vec<Target>,
include: Vec<String>, exclude: Vec<String>,
links: Option<String>, include: Vec<String>,
metadata: ManifestMetadata, links: Option<String>,
profiles: Profiles, metadata: ManifestMetadata,
publish: Option<Vec<String>>, profiles: Profiles,
publish_lockfile: bool, publish: Option<Vec<String>>,
replace: Vec<(PackageIdSpec, Dependency)>, publish_lockfile: bool,
patch: HashMap<Url, Vec<Dependency>>, replace: Vec<(PackageIdSpec, Dependency)>,
workspace: WorkspaceConfig, patch: HashMap<Url, Vec<Dependency>>,
features: Features, workspace: WorkspaceConfig,
epoch: Epoch, features: Features,
im_a_teapot: Option<bool>, epoch: Epoch,
original: Rc<TomlManifest>) -> Manifest { im_a_teapot: Option<bool>,
original: Rc<TomlManifest>,
) -> Manifest {
Manifest { Manifest {
summary, summary,
targets, targets,
@ -298,22 +287,54 @@ impl Manifest {
} }
} }
pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() } pub fn dependencies(&self) -> &[Dependency] {
pub fn exclude(&self) -> &[String] { &self.exclude } self.summary.dependencies()
pub fn include(&self) -> &[String] { &self.include } }
pub fn metadata(&self) -> &ManifestMetadata { &self.metadata } pub fn exclude(&self) -> &[String] {
pub fn name(&self) -> InternedString { self.package_id().name() } &self.exclude
pub fn package_id(&self) -> &PackageId { self.summary.package_id() } }
pub fn summary(&self) -> &Summary { &self.summary } pub fn include(&self) -> &[String] {
pub fn targets(&self) -> &[Target] { &self.targets } &self.include
pub fn version(&self) -> &Version { self.package_id().version() } }
pub fn warnings(&self) -> &[DelayedWarning] { &self.warnings } pub fn metadata(&self) -> &ManifestMetadata {
pub fn profiles(&self) -> &Profiles { &self.profiles } &self.metadata
pub fn publish(&self) -> &Option<Vec<String>> { &self.publish } }
pub fn publish_lockfile(&self) -> bool { self.publish_lockfile } pub fn name(&self) -> InternedString {
pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { &self.replace } self.package_id().name()
pub fn original(&self) -> &TomlManifest { &self.original } }
pub fn patch(&self) -> &HashMap<Url, Vec<Dependency>> { &self.patch } pub fn package_id(&self) -> &PackageId {
self.summary.package_id()
}
pub fn summary(&self) -> &Summary {
&self.summary
}
pub fn targets(&self) -> &[Target] {
&self.targets
}
pub fn version(&self) -> &Version {
self.package_id().version()
}
pub fn warnings(&self) -> &[DelayedWarning] {
&self.warnings
}
pub fn profiles(&self) -> &Profiles {
&self.profiles
}
pub fn publish(&self) -> &Option<Vec<String>> {
&self.publish
}
pub fn publish_lockfile(&self) -> bool {
self.publish_lockfile
}
pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] {
&self.replace
}
pub fn original(&self) -> &TomlManifest {
&self.original
}
pub fn patch(&self) -> &HashMap<Url, Vec<Dependency>> {
&self.patch
}
pub fn links(&self) -> Option<&str> { pub fn links(&self) -> Option<&str> {
self.links.as_ref().map(|s| &s[..]) self.links.as_ref().map(|s| &s[..])
} }
@ -327,19 +348,24 @@ impl Manifest {
} }
pub fn add_warning(&mut self, s: String) { pub fn add_warning(&mut self, s: String) {
self.warnings.push(DelayedWarning { message: s, is_critical: false }) self.warnings.push(DelayedWarning {
message: s,
is_critical: false,
})
} }
pub fn add_critical_warning(&mut self, s: String) { pub fn add_critical_warning(&mut self, s: String) {
self.warnings.push(DelayedWarning { message: s, is_critical: true }) self.warnings.push(DelayedWarning {
message: s,
is_critical: true,
})
} }
pub fn set_summary(&mut self, summary: Summary) { pub fn set_summary(&mut self, summary: Summary) {
self.summary = summary; self.summary = summary;
} }
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Manifest {
-> Manifest {
Manifest { Manifest {
summary: self.summary.map_source(to_replace, replace_with), summary: self.summary.map_source(to_replace, replace_with),
..self ..self
@ -348,10 +374,14 @@ impl Manifest {
pub fn feature_gate(&self) -> CargoResult<()> { pub fn feature_gate(&self) -> CargoResult<()> {
if self.im_a_teapot.is_some() { if self.im_a_teapot.is_some() {
self.features.require(Feature::test_dummy_unstable()).chain_err(|| { self.features
format_err!("the `im-a-teapot` manifest key is unstable and may \ .require(Feature::test_dummy_unstable())
not work properly in England") .chain_err(|| {
})?; format_err!(
"the `im-a-teapot` manifest key is unstable and may \
not work properly in England"
)
})?;
} }
Ok(()) Ok(())
@ -372,10 +402,12 @@ impl Manifest {
} }
impl VirtualManifest { impl VirtualManifest {
pub fn new(replace: Vec<(PackageIdSpec, Dependency)>, pub fn new(
patch: HashMap<Url, Vec<Dependency>>, replace: Vec<(PackageIdSpec, Dependency)>,
workspace: WorkspaceConfig, patch: HashMap<Url, Vec<Dependency>>,
profiles: Profiles) -> VirtualManifest { workspace: WorkspaceConfig,
profiles: Profiles,
) -> VirtualManifest {
VirtualManifest { VirtualManifest {
replace, replace,
patch, patch,
@ -418,9 +450,7 @@ impl Target {
} }
} }
pub fn lib_target(name: &str, pub fn lib_target(name: &str, crate_targets: Vec<LibKind>, src_path: PathBuf) -> Target {
crate_targets: Vec<LibKind>,
src_path: PathBuf) -> Target {
Target { Target {
kind: TargetKind::Lib(crate_targets), kind: TargetKind::Lib(crate_targets),
name: name.to_string(), name: name.to_string(),
@ -430,8 +460,11 @@ impl Target {
} }
} }
pub fn bin_target(name: &str, src_path: PathBuf, pub fn bin_target(
required_features: Option<Vec<String>>) -> Target { name: &str,
src_path: PathBuf,
required_features: Option<Vec<String>>,
) -> Target {
Target { Target {
kind: TargetKind::Bin, kind: TargetKind::Bin,
name: name.to_string(), name: name.to_string(),
@ -453,10 +486,12 @@ impl Target {
} }
} }
pub fn example_target(name: &str, pub fn example_target(
crate_targets: Vec<LibKind>, name: &str,
src_path: PathBuf, crate_targets: Vec<LibKind>,
required_features: Option<Vec<String>>) -> Target { src_path: PathBuf,
required_features: Option<Vec<String>>,
) -> Target {
let kind = if crate_targets.is_empty() { let kind = if crate_targets.is_empty() {
TargetKind::ExampleBin TargetKind::ExampleBin
} else { } else {
@ -472,8 +507,11 @@ impl Target {
} }
} }
pub fn test_target(name: &str, src_path: PathBuf, pub fn test_target(
required_features: Option<Vec<String>>) -> Target { name: &str,
src_path: PathBuf,
required_features: Option<Vec<String>>,
) -> Target {
Target { Target {
kind: TargetKind::Test, kind: TargetKind::Test,
name: name.to_string(), name: name.to_string(),
@ -483,8 +521,11 @@ impl Target {
} }
} }
pub fn bench_target(name: &str, src_path: PathBuf, pub fn bench_target(
required_features: Option<Vec<String>>) -> Target { name: &str,
src_path: PathBuf,
required_features: Option<Vec<String>>,
) -> Target {
Target { Target {
kind: TargetKind::Bench, kind: TargetKind::Bench,
name: name.to_string(), name: name.to_string(),
@ -494,26 +535,42 @@ impl Target {
} }
} }
pub fn name(&self) -> &str { &self.name } pub fn name(&self) -> &str {
pub fn crate_name(&self) -> String { self.name.replace("-", "_") } &self.name
pub fn src_path(&self) -> &Path { &self.src_path.path } }
pub fn required_features(&self) -> Option<&Vec<String>> { self.required_features.as_ref() } pub fn crate_name(&self) -> String {
pub fn kind(&self) -> &TargetKind { &self.kind } self.name.replace("-", "_")
pub fn tested(&self) -> bool { self.tested } }
pub fn harness(&self) -> bool { self.harness } pub fn src_path(&self) -> &Path {
pub fn documented(&self) -> bool { self.doc } &self.src_path.path
pub fn for_host(&self) -> bool { self.for_host } }
pub fn benched(&self) -> bool { self.benched } pub fn required_features(&self) -> Option<&Vec<String>> {
self.required_features.as_ref()
}
pub fn kind(&self) -> &TargetKind {
&self.kind
}
pub fn tested(&self) -> bool {
self.tested
}
pub fn harness(&self) -> bool {
self.harness
}
pub fn documented(&self) -> bool {
self.doc
}
pub fn for_host(&self) -> bool {
self.for_host
}
pub fn benched(&self) -> bool {
self.benched
}
pub fn doctested(&self) -> bool { pub fn doctested(&self) -> bool {
self.doctest && match self.kind { self.doctest && match self.kind {
TargetKind::Lib(ref kinds) => { TargetKind::Lib(ref kinds) => kinds
kinds.iter().any(|k| { .iter()
*k == LibKind::Rlib || .any(|k| *k == LibKind::Rlib || *k == LibKind::Lib || *k == LibKind::ProcMacro),
*k == LibKind::Lib ||
*k == LibKind::ProcMacro
})
}
_ => false, _ => false,
} }
} }
@ -525,46 +582,43 @@ impl Target {
pub fn is_lib(&self) -> bool { pub fn is_lib(&self) -> bool {
match self.kind { match self.kind {
TargetKind::Lib(_) => true, TargetKind::Lib(_) => true,
_ => false _ => false,
} }
} }
pub fn is_dylib(&self) -> bool { pub fn is_dylib(&self) -> bool {
match self.kind { match self.kind {
TargetKind::Lib(ref libs) => libs.iter().any(|l| *l == LibKind::Dylib), TargetKind::Lib(ref libs) => libs.iter().any(|l| *l == LibKind::Dylib),
_ => false _ => false,
} }
} }
pub fn is_cdylib(&self) -> bool { pub fn is_cdylib(&self) -> bool {
let libs = match self.kind { let libs = match self.kind {
TargetKind::Lib(ref libs) => libs, TargetKind::Lib(ref libs) => libs,
_ => return false _ => return false,
}; };
libs.iter().any(|l| { libs.iter().any(|l| match *l {
match *l { LibKind::Other(ref s) => s == "cdylib",
LibKind::Other(ref s) => s == "cdylib", _ => false,
_ => false,
}
}) })
} }
pub fn linkable(&self) -> bool { pub fn linkable(&self) -> bool {
match self.kind { match self.kind {
TargetKind::Lib(ref kinds) => { TargetKind::Lib(ref kinds) => kinds.iter().any(|k| k.linkable()),
kinds.iter().any(|k| k.linkable()) _ => false,
}
_ => false
} }
} }
pub fn is_bin(&self) -> bool { self.kind == TargetKind::Bin } pub fn is_bin(&self) -> bool {
self.kind == TargetKind::Bin
}
pub fn is_example(&self) -> bool { pub fn is_example(&self) -> bool {
match self.kind { match self.kind {
TargetKind::ExampleBin | TargetKind::ExampleBin | TargetKind::ExampleLib(..) => true,
TargetKind::ExampleLib(..) => true, _ => false,
_ => false
} }
} }
@ -572,35 +626,39 @@ impl Target {
// Needed for --all-examples in contexts where only runnable examples make sense // Needed for --all-examples in contexts where only runnable examples make sense
match self.kind { match self.kind {
TargetKind::ExampleBin => true, TargetKind::ExampleBin => true,
_ => false _ => false,
} }
} }
pub fn is_test(&self) -> bool { self.kind == TargetKind::Test } pub fn is_test(&self) -> bool {
pub fn is_bench(&self) -> bool { self.kind == TargetKind::Bench } self.kind == TargetKind::Test
pub fn is_custom_build(&self) -> bool { self.kind == TargetKind::CustomBuild } }
pub fn is_bench(&self) -> bool {
self.kind == TargetKind::Bench
}
pub fn is_custom_build(&self) -> bool {
self.kind == TargetKind::CustomBuild
}
/// Returns the arguments suitable for `--crate-type` to pass to rustc. /// Returns the arguments suitable for `--crate-type` to pass to rustc.
pub fn rustc_crate_types(&self) -> Vec<&str> { pub fn rustc_crate_types(&self) -> Vec<&str> {
match self.kind { match self.kind {
TargetKind::Lib(ref kinds) | TargetKind::Lib(ref kinds) | TargetKind::ExampleLib(ref kinds) => {
TargetKind::ExampleLib(ref kinds) => {
kinds.iter().map(LibKind::crate_type).collect() kinds.iter().map(LibKind::crate_type).collect()
} }
TargetKind::CustomBuild | TargetKind::CustomBuild
TargetKind::Bench | | TargetKind::Bench
TargetKind::Test | | TargetKind::Test
TargetKind::ExampleBin | | TargetKind::ExampleBin
TargetKind::Bin => vec!["bin"], | TargetKind::Bin => vec!["bin"],
} }
} }
pub fn can_lto(&self) -> bool { pub fn can_lto(&self) -> bool {
match self.kind { match self.kind {
TargetKind::Lib(ref v) => { TargetKind::Lib(ref v) => {
!v.contains(&LibKind::Rlib) && !v.contains(&LibKind::Rlib) && !v.contains(&LibKind::Dylib)
!v.contains(&LibKind::Dylib) && && !v.contains(&LibKind::Lib)
!v.contains(&LibKind::Lib)
} }
_ => true, _ => true,
} }
@ -639,8 +697,9 @@ impl fmt::Display for Target {
TargetKind::Bin => write!(f, "Target(bin: {})", self.name), TargetKind::Bin => write!(f, "Target(bin: {})", self.name),
TargetKind::Test => write!(f, "Target(test: {})", self.name), TargetKind::Test => write!(f, "Target(test: {})", self.name),
TargetKind::Bench => write!(f, "Target(bench: {})", self.name), TargetKind::Bench => write!(f, "Target(bench: {})", self.name),
TargetKind::ExampleBin | TargetKind::ExampleBin | TargetKind::ExampleLib(..) => {
TargetKind::ExampleLib(..) => write!(f, "Target(example: {})", self.name), write!(f, "Target(example: {})", self.name)
}
TargetKind::CustomBuild => write!(f, "Target(script)"), TargetKind::CustomBuild => write!(f, "Target(script)"),
} }
} }
@ -752,6 +811,5 @@ impl fmt::Display for Profile {
} else { } else {
write!(f, "Profile(build)") write!(f, "Profile(build)")
} }
} }
} }

View file

@ -1,14 +1,14 @@
pub use self::dependency::Dependency; pub use self::dependency::Dependency;
pub use self::features::{Epoch, Features, Feature, CliUnstable}; pub use self::features::{CliUnstable, Epoch, Feature, Features};
pub use self::manifest::{EitherManifest, VirtualManifest}; pub use self::manifest::{EitherManifest, VirtualManifest};
pub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles}; pub use self::manifest::{LibKind, Manifest, Profile, Profiles, Target, TargetKind};
pub use self::package::{Package, PackageSet}; pub use self::package::{Package, PackageSet};
pub use self::package_id::PackageId; pub use self::package_id::PackageId;
pub use self::package_id_spec::PackageIdSpec; pub use self::package_id_spec::PackageIdSpec;
pub use self::registry::Registry; pub use self::registry::Registry;
pub use self::resolver::Resolve; pub use self::resolver::Resolve;
pub use self::shell::{Shell, Verbosity}; pub use self::shell::{Shell, Verbosity};
pub use self::source::{Source, SourceId, SourceMap, GitReference}; pub use self::source::{GitReference, Source, SourceId, SourceMap};
pub use self::summary::Summary; pub use self::summary::Summary;
pub use self::workspace::{Members, Workspace, WorkspaceConfig, WorkspaceRootConfig}; pub use self::workspace::{Members, Workspace, WorkspaceConfig, WorkspaceRootConfig};

View file

@ -1,5 +1,5 @@
use std::cell::{Ref, RefCell}; use std::cell::{Ref, RefCell};
use std::collections::{HashMap, BTreeMap}; use std::collections::{BTreeMap, HashMap};
use std::fmt; use std::fmt;
use std::hash; use std::hash;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -10,10 +10,10 @@ use toml;
use lazycell::LazyCell; use lazycell::LazyCell;
use core::{Dependency, Manifest, PackageId, SourceId, Target}; use core::{Dependency, Manifest, PackageId, SourceId, Target};
use core::{Summary, SourceMap}; use core::{SourceMap, Summary};
use core::interning::InternedString; use core::interning::InternedString;
use ops; use ops;
use util::{Config, internal, lev_distance}; use util::{internal, lev_distance, Config};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
/// Information about a package that is available somewhere in the file system. /// Information about a package that is available somewhere in the file system.
@ -46,7 +46,8 @@ struct SerializedPackage<'a> {
impl ser::Serialize for Package { impl ser::Serialize for Package {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer, where
S: ser::Serializer,
{ {
let summary = self.manifest.summary(); let summary = self.manifest.summary();
let package_id = summary.package_id(); let package_id = summary.package_id();
@ -73,8 +74,7 @@ impl ser::Serialize for Package {
impl Package { impl Package {
/// Create a package from a manifest and its location /// Create a package from a manifest and its location
pub fn new(manifest: Manifest, pub fn new(manifest: Manifest, manifest_path: &Path) -> Package {
manifest_path: &Path) -> Package {
Package { Package {
manifest, manifest,
manifest_path: manifest_path.to_path_buf(), manifest_path: manifest_path.to_path_buf(),
@ -90,46 +90,71 @@ impl Package {
} }
/// Get the manifest dependencies /// Get the manifest dependencies
pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() } pub fn dependencies(&self) -> &[Dependency] {
self.manifest.dependencies()
}
/// Get the manifest /// Get the manifest
pub fn manifest(&self) -> &Manifest { &self.manifest } pub fn manifest(&self) -> &Manifest {
&self.manifest
}
/// Get the path to the manifest /// Get the path to the manifest
pub fn manifest_path(&self) -> &Path { &self.manifest_path } pub fn manifest_path(&self) -> &Path {
&self.manifest_path
}
/// Get the name of the package /// Get the name of the package
pub fn name(&self) -> InternedString { self.package_id().name() } pub fn name(&self) -> InternedString {
self.package_id().name()
}
/// Get the PackageId object for the package (fully defines a package) /// Get the PackageId object for the package (fully defines a package)
pub fn package_id(&self) -> &PackageId { self.manifest.package_id() } pub fn package_id(&self) -> &PackageId {
self.manifest.package_id()
}
/// Get the root folder of the package /// Get the root folder of the package
pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() } pub fn root(&self) -> &Path {
self.manifest_path.parent().unwrap()
}
/// Get the summary for the package /// Get the summary for the package
pub fn summary(&self) -> &Summary { self.manifest.summary() } pub fn summary(&self) -> &Summary {
self.manifest.summary()
}
/// Get the targets specified in the manifest /// Get the targets specified in the manifest
pub fn targets(&self) -> &[Target] { self.manifest.targets() } pub fn targets(&self) -> &[Target] {
self.manifest.targets()
}
/// Get the current package version /// Get the current package version
pub fn version(&self) -> &Version { self.package_id().version() } pub fn version(&self) -> &Version {
self.package_id().version()
}
/// Get the package authors /// Get the package authors
pub fn authors(&self) -> &Vec<String> { &self.manifest.metadata().authors } pub fn authors(&self) -> &Vec<String> {
&self.manifest.metadata().authors
}
/// Whether the package is set to publish /// Whether the package is set to publish
pub fn publish(&self) -> &Option<Vec<String>> { self.manifest.publish() } pub fn publish(&self) -> &Option<Vec<String>> {
self.manifest.publish()
}
/// Whether the package uses a custom build script for any target /// Whether the package uses a custom build script for any target
pub fn has_custom_build(&self) -> bool { pub fn has_custom_build(&self) -> bool {
self.targets().iter().any(|t| t.is_custom_build()) self.targets().iter().any(|t| t.is_custom_build())
} }
pub fn find_closest_target(&self, pub fn find_closest_target(
target: &str, &self,
is_expected_kind: fn(&Target)-> bool) -> Option<&Target> { target: &str,
is_expected_kind: fn(&Target) -> bool,
) -> Option<&Target> {
let targets = self.targets(); let targets = self.targets();
let matches = targets.iter().filter(|t| is_expected_kind(t)) let matches = targets
.map(|t| (lev_distance(target, t.name()), t)) .iter()
.filter(|&(d, _)| d < 4); .filter(|t| is_expected_kind(t))
.map(|t| (lev_distance(target, t.name()), t))
.filter(|&(d, _)| d < 4);
matches.min_by_key(|t| t.0).map(|t| t.1) matches.min_by_key(|t| t.0).map(|t| t.1)
} }
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Package {
-> Package {
Package { Package {
manifest: self.manifest.map_source(to_replace, replace_with), manifest: self.manifest.map_source(to_replace, replace_with),
manifest_path: self.manifest_path, manifest_path: self.manifest_path,
@ -139,21 +164,24 @@ impl Package {
pub fn to_registry_toml(&self, config: &Config) -> CargoResult<String> { pub fn to_registry_toml(&self, config: &Config) -> CargoResult<String> {
let manifest = self.manifest().original().prepare_for_publish(config)?; let manifest = self.manifest().original().prepare_for_publish(config)?;
let toml = toml::to_string(&manifest)?; let toml = toml::to_string(&manifest)?;
Ok(format!("\ Ok(format!(
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO\n\ "\
#\n\ # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO\n\
# When uploading crates to the registry Cargo will automatically\n\ #\n\
# \"normalize\" Cargo.toml files for maximal compatibility\n\ # When uploading crates to the registry Cargo will automatically\n\
# with all versions of Cargo and also rewrite `path` dependencies\n\ # \"normalize\" Cargo.toml files for maximal compatibility\n\
# to registry (e.g. crates.io) dependencies\n\ # with all versions of Cargo and also rewrite `path` dependencies\n\
#\n\ # to registry (e.g. crates.io) dependencies\n\
# If you believe there's an error in this file please file an\n\ #\n\
# issue against the rust-lang/cargo repository. If you're\n\ # If you believe there's an error in this file please file an\n\
# editing this file be aware that the upstream Cargo.toml\n\ # issue against the rust-lang/cargo repository. If you're\n\
# will likely look very different (and much more reasonable)\n\ # editing this file be aware that the upstream Cargo.toml\n\
\n\ # will likely look very different (and much more reasonable)\n\
{}\ \n\
", toml)) {}\
",
toml
))
} }
} }
@ -183,34 +211,34 @@ pub struct PackageSet<'cfg> {
} }
impl<'cfg> PackageSet<'cfg> { impl<'cfg> PackageSet<'cfg> {
pub fn new(package_ids: &[PackageId], pub fn new(package_ids: &[PackageId], sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
PackageSet { PackageSet {
packages: package_ids.iter().map(|id| { packages: package_ids
(id.clone(), LazyCell::new()) .iter()
}).collect(), .map(|id| (id.clone(), LazyCell::new()))
.collect(),
sources: RefCell::new(sources), sources: RefCell::new(sources),
} }
} }
pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item=&'a PackageId> + 'a> { pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item = &'a PackageId> + 'a> {
Box::new(self.packages.keys()) Box::new(self.packages.keys())
} }
pub fn get(&self, id: &PackageId) -> CargoResult<&Package> { pub fn get(&self, id: &PackageId) -> CargoResult<&Package> {
let slot = self.packages.get(id).ok_or_else(|| { let slot = self.packages
internal(format!("couldn't find `{}` in package set", id)) .get(id)
})?; .ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?;
if let Some(pkg) = slot.borrow() { if let Some(pkg) = slot.borrow() {
return Ok(pkg) return Ok(pkg);
} }
let mut sources = self.sources.borrow_mut(); let mut sources = self.sources.borrow_mut();
let source = sources.get_mut(id.source_id()).ok_or_else(|| { let source = sources
internal(format!("couldn't find source for `{}`", id)) .get_mut(id.source_id())
})?; .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
let pkg = source.download(id).chain_err(|| { let pkg = source
format_err!("unable to get packages from source") .download(id)
})?; .chain_err(|| format_err!("unable to get packages from source"))?;
assert!(slot.fill(pkg).is_ok()); assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap()) Ok(slot.borrow().unwrap())
} }

View file

@ -28,18 +28,22 @@ struct PackageIdInner {
impl ser::Serialize for PackageId { impl ser::Serialize for PackageId {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer where
S: ser::Serializer,
{ {
s.collect_str(&format_args!("{} {} ({})", s.collect_str(&format_args!(
self.inner.name, "{} {} ({})",
self.inner.version, self.inner.name,
self.inner.source_id.to_url())) self.inner.version,
self.inner.source_id.to_url()
))
} }
} }
impl<'de> de::Deserialize<'de> for PackageId { impl<'de> de::Deserialize<'de> for PackageId {
fn deserialize<D>(d: D) -> Result<PackageId, D::Error> fn deserialize<D>(d: D) -> Result<PackageId, D::Error>
where D: de::Deserializer<'de> where
D: de::Deserializer<'de>,
{ {
let string = String::deserialize(d)?; let string = String::deserialize(d)?;
let mut s = string.splitn(3, ' '); let mut s = string.splitn(3, ' ');
@ -48,8 +52,7 @@ impl<'de> de::Deserialize<'de> for PackageId {
Some(s) => s, Some(s) => s,
None => return Err(de::Error::custom("invalid serialized PackageId")), None => return Err(de::Error::custom("invalid serialized PackageId")),
}; };
let version = semver::Version::parse(version) let version = semver::Version::parse(version).map_err(de::Error::custom)?;
.map_err(de::Error::custom)?;
let url = match s.next() { let url = match s.next() {
Some(s) => s, Some(s) => s,
None => return Err(de::Error::custom("invalid serialized PackageId")), None => return Err(de::Error::custom("invalid serialized PackageId")),
@ -57,8 +60,7 @@ impl<'de> de::Deserialize<'de> for PackageId {
let url = if url.starts_with('(') && url.ends_with(')') { let url = if url.starts_with('(') && url.ends_with(')') {
&url[1..url.len() - 1] &url[1..url.len() - 1]
} else { } else {
return Err(de::Error::custom("invalid serialized PackageId")) return Err(de::Error::custom("invalid serialized PackageId"));
}; };
let source_id = SourceId::from_url(url).map_err(de::Error::custom)?; let source_id = SourceId::from_url(url).map_err(de::Error::custom)?;
@ -98,8 +100,7 @@ impl Ord for PackageId {
} }
impl PackageId { impl PackageId {
pub fn new<T: ToSemver>(name: &str, version: T, pub fn new<T: ToSemver>(name: &str, version: T, sid: &SourceId) -> CargoResult<PackageId> {
sid: &SourceId) -> CargoResult<PackageId> {
let v = version.to_semver()?; let v = version.to_semver()?;
Ok(PackageId { Ok(PackageId {
inner: Arc::new(PackageIdInner { inner: Arc::new(PackageIdInner {
@ -110,9 +111,15 @@ impl PackageId {
}) })
} }
pub fn name(&self) -> InternedString { self.inner.name } pub fn name(&self) -> InternedString {
pub fn version(&self) -> &semver::Version { &self.inner.version } self.inner.name
pub fn source_id(&self) -> &SourceId { &self.inner.source_id } }
pub fn version(&self) -> &semver::Version {
&self.inner.version
}
pub fn source_id(&self) -> &SourceId {
&self.inner.source_id
}
pub fn with_precise(&self, precise: Option<String>) -> PackageId { pub fn with_precise(&self, precise: Option<String>) -> PackageId {
PackageId { PackageId {
@ -164,10 +171,10 @@ impl fmt::Display for PackageId {
impl fmt::Debug for PackageId { impl fmt::Debug for PackageId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result { fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_struct("PackageId") f.debug_struct("PackageId")
.field("name", &self.inner.name) .field("name", &self.inner.name)
.field("version", &self.inner.version.to_string()) .field("version", &self.inner.version.to_string())
.field("source", &self.inner.source_id.to_string()) .field("source", &self.inner.source_id.to_string())
.finish() .finish()
} }
} }

View file

@ -5,7 +5,7 @@ use semver::Version;
use url::Url; use url::Url;
use core::PackageId; use core::PackageId;
use util::{ToUrl, ToSemver}; use util::{ToSemver, ToUrl};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
@ -46,11 +46,11 @@ impl PackageIdSpec {
} }
pub fn query_str<'a, I>(spec: &str, i: I) -> CargoResult<&'a PackageId> pub fn query_str<'a, I>(spec: &str, i: I) -> CargoResult<&'a PackageId>
where I: IntoIterator<Item=&'a PackageId> where
I: IntoIterator<Item = &'a PackageId>,
{ {
let spec = PackageIdSpec::parse(spec).chain_err(|| { let spec = PackageIdSpec::parse(spec)
format_err!("invalid package id specification: `{}`", spec) .chain_err(|| format_err!("invalid package id specification: `{}`", spec))?;
})?;
spec.query(i) spec.query(i)
} }
@ -69,12 +69,14 @@ impl PackageIdSpec {
let frag = url.fragment().map(|s| s.to_owned()); let frag = url.fragment().map(|s| s.to_owned());
url.set_fragment(None); url.set_fragment(None);
let (name, version) = { let (name, version) = {
let mut path = url.path_segments().ok_or_else(|| { let mut path = url.path_segments()
format_err!("pkgid urls must have a path: {}", url) .ok_or_else(|| format_err!("pkgid urls must have a path: {}", url))?;
})?;
let path_name = path.next_back().ok_or_else(|| { let path_name = path.next_back().ok_or_else(|| {
format_err!("pkgid urls must have at least one path \ format_err!(
component: {}", url) "pkgid urls must have at least one path \
component: {}",
url
)
})?; })?;
match frag { match frag {
Some(fragment) => { Some(fragment) => {
@ -86,8 +88,7 @@ impl PackageIdSpec {
(name_or_version.to_string(), Some(version)) (name_or_version.to_string(), Some(version))
} }
None => { None => {
if name_or_version.chars().next().unwrap() if name_or_version.chars().next().unwrap().is_alphabetic() {
.is_alphabetic() {
(name_or_version.to_string(), None) (name_or_version.to_string(), None)
} else { } else {
let version = name_or_version.to_semver()?; let version = name_or_version.to_semver()?;
@ -106,16 +107,24 @@ impl PackageIdSpec {
}) })
} }
pub fn name(&self) -> &str { &self.name } pub fn name(&self) -> &str {
pub fn version(&self) -> Option<&Version> { self.version.as_ref() } &self.name
pub fn url(&self) -> Option<&Url> { self.url.as_ref() } }
pub fn version(&self) -> Option<&Version> {
self.version.as_ref()
}
pub fn url(&self) -> Option<&Url> {
self.url.as_ref()
}
pub fn set_url(&mut self, url: Url) { pub fn set_url(&mut self, url: Url) {
self.url = Some(url); self.url = Some(url);
} }
pub fn matches(&self, package_id: &PackageId) -> bool { pub fn matches(&self, package_id: &PackageId) -> bool {
if self.name() != &*package_id.name() { return false } if self.name() != &*package_id.name() {
return false;
}
if let Some(ref v) = self.version { if let Some(ref v) = self.version {
if v != package_id.version() { if v != package_id.version() {
@ -125,50 +134,53 @@ impl PackageIdSpec {
match self.url { match self.url {
Some(ref u) => u == package_id.source_id().url(), Some(ref u) => u == package_id.source_id().url(),
None => true None => true,
} }
} }
pub fn query<'a, I>(&self, i: I) -> CargoResult<&'a PackageId> pub fn query<'a, I>(&self, i: I) -> CargoResult<&'a PackageId>
where I: IntoIterator<Item=&'a PackageId> where
I: IntoIterator<Item = &'a PackageId>,
{ {
let mut ids = i.into_iter().filter(|p| self.matches(*p)); let mut ids = i.into_iter().filter(|p| self.matches(*p));
let ret = match ids.next() { let ret = match ids.next() {
Some(id) => id, Some(id) => id,
None => bail!("package id specification `{}` \ None => bail!(
matched no packages", self), "package id specification `{}` \
matched no packages",
self
),
}; };
return match ids.next() { return match ids.next() {
Some(other) => { Some(other) => {
let mut msg = format!("There are multiple `{}` packages in \ let mut msg = format!(
your project, and the specification \ "There are multiple `{}` packages in \
`{}` is ambiguous.\n\ your project, and the specification \
Please re-run this command \ `{}` is ambiguous.\n\
with `-p <spec>` where `<spec>` is one \ Please re-run this command \
of the following:", with `-p <spec>` where `<spec>` is one \
self.name(), self); of the following:",
self.name(),
self
);
let mut vec = vec![ret, other]; let mut vec = vec![ret, other];
vec.extend(ids); vec.extend(ids);
minimize(&mut msg, &vec, self); minimize(&mut msg, &vec, self);
Err(format_err!("{}", msg)) Err(format_err!("{}", msg))
} }
None => Ok(ret) None => Ok(ret),
}; };
fn minimize(msg: &mut String, fn minimize(msg: &mut String, ids: &[&PackageId], spec: &PackageIdSpec) {
ids: &[&PackageId],
spec: &PackageIdSpec) {
let mut version_cnt = HashMap::new(); let mut version_cnt = HashMap::new();
for id in ids { for id in ids {
*version_cnt.entry(id.version()).or_insert(0) += 1; *version_cnt.entry(id.version()).or_insert(0) += 1;
} }
for id in ids { for id in ids {
if version_cnt[id.version()] == 1 { if version_cnt[id.version()] == 1 {
msg.push_str(&format!("\n {}:{}", spec.name(), msg.push_str(&format!("\n {}:{}", spec.name(), id.version()));
id.version()));
} else { } else {
msg.push_str(&format!("\n {}", msg.push_str(&format!("\n {}", PackageIdSpec::from_package_id(*id)));
PackageIdSpec::from_package_id(*id)));
} }
} }
} }
@ -190,10 +202,13 @@ impl fmt::Display for PackageIdSpec {
write!(f, "#{}", self.name)?; write!(f, "#{}", self.name)?;
} }
} }
None => { printed_name = true; write!(f, "{}", self.name)? } None => {
printed_name = true;
write!(f, "{}", self.name)?
}
} }
if let Some(ref v) = self.version { if let Some(ref v) = self.version {
write!(f, "{}{}", if printed_name {":"} else {"#"}, v)?; write!(f, "{}{}", if printed_name { ":" } else { "#" }, v)?;
} }
Ok(()) Ok(())
} }
@ -214,46 +229,70 @@ mod tests {
assert_eq!(parsed.to_string(), spec); assert_eq!(parsed.to_string(), spec);
} }
ok("http://crates.io/foo#1.2.3", PackageIdSpec { ok(
name: "foo".to_string(), "http://crates.io/foo#1.2.3",
version: Some(Version::parse("1.2.3").unwrap()), PackageIdSpec {
url: Some(Url::parse("http://crates.io/foo").unwrap()), name: "foo".to_string(),
}); version: Some(Version::parse("1.2.3").unwrap()),
ok("http://crates.io/foo#bar:1.2.3", PackageIdSpec { url: Some(Url::parse("http://crates.io/foo").unwrap()),
name: "bar".to_string(), },
version: Some(Version::parse("1.2.3").unwrap()), );
url: Some(Url::parse("http://crates.io/foo").unwrap()), ok(
}); "http://crates.io/foo#bar:1.2.3",
ok("crates.io/foo", PackageIdSpec { PackageIdSpec {
name: "foo".to_string(), name: "bar".to_string(),
version: None, version: Some(Version::parse("1.2.3").unwrap()),
url: Some(Url::parse("cargo://crates.io/foo").unwrap()), url: Some(Url::parse("http://crates.io/foo").unwrap()),
}); },
ok("crates.io/foo#1.2.3", PackageIdSpec { );
name: "foo".to_string(), ok(
version: Some(Version::parse("1.2.3").unwrap()), "crates.io/foo",
url: Some(Url::parse("cargo://crates.io/foo").unwrap()), PackageIdSpec {
}); name: "foo".to_string(),
ok("crates.io/foo#bar", PackageIdSpec { version: None,
name: "bar".to_string(), url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
version: None, },
url: Some(Url::parse("cargo://crates.io/foo").unwrap()), );
}); ok(
ok("crates.io/foo#bar:1.2.3", PackageIdSpec { "crates.io/foo#1.2.3",
name: "bar".to_string(), PackageIdSpec {
version: Some(Version::parse("1.2.3").unwrap()), name: "foo".to_string(),
url: Some(Url::parse("cargo://crates.io/foo").unwrap()), version: Some(Version::parse("1.2.3").unwrap()),
}); url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
ok("foo", PackageIdSpec { },
name: "foo".to_string(), );
version: None, ok(
url: None, "crates.io/foo#bar",
}); PackageIdSpec {
ok("foo:1.2.3", PackageIdSpec { name: "bar".to_string(),
name: "foo".to_string(), version: None,
version: Some(Version::parse("1.2.3").unwrap()), url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
url: None, },
}); );
ok(
"crates.io/foo#bar:1.2.3",
PackageIdSpec {
name: "bar".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: Some(Url::parse("cargo://crates.io/foo").unwrap()),
},
);
ok(
"foo",
PackageIdSpec {
name: "foo".to_string(),
version: None,
url: None,
},
);
ok(
"foo:1.2.3",
PackageIdSpec {
name: "foo".to_string(),
version: Some(Version::parse("1.2.3").unwrap()),
url: None,
},
);
} }
#[test] #[test]
@ -272,9 +311,9 @@ mod tests {
let foo = PackageId::new("foo", "1.2.3", &sid).unwrap(); let foo = PackageId::new("foo", "1.2.3", &sid).unwrap();
let bar = PackageId::new("bar", "1.2.3", &sid).unwrap(); let bar = PackageId::new("bar", "1.2.3", &sid).unwrap();
assert!( PackageIdSpec::parse("foo").unwrap().matches(&foo)); assert!(PackageIdSpec::parse("foo").unwrap().matches(&foo));
assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar)); assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar));
assert!( PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo)); assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo));
assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(&foo)); assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(&foo));
} }
} }

View file

@ -3,9 +3,9 @@ use std::collections::HashMap;
use semver::VersionReq; use semver::VersionReq;
use url::Url; use url::Url;
use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId}; use core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary};
use core::PackageSet; use core::PackageSet;
use util::{Config, profile}; use util::{profile, Config};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
use sources::config::SourceConfigMap; use sources::config::SourceConfigMap;
@ -14,9 +14,7 @@ use sources::config::SourceConfigMap;
/// See also `core::Source`. /// See also `core::Source`.
pub trait Registry { pub trait Registry {
/// Attempt to find the packages that match a dependency request. /// Attempt to find the packages that match a dependency request.
fn query(&mut self, fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()>;
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()>;
fn query_vec(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> { fn query_vec(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
let mut ret = Vec::new(); let mut ret = Vec::new();
@ -34,9 +32,7 @@ pub trait Registry {
} }
impl<'a, T: ?Sized + Registry + 'a> Registry for Box<T> { impl<'a, T: ?Sized + Registry + 'a> Registry for Box<T> {
fn query(&mut self, fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
(**self).query(dep, f) (**self).query(dep, f)
} }
@ -130,14 +126,14 @@ impl<'cfg> PackageRegistry<'cfg> {
// slightly different precise version listed. // slightly different precise version listed.
Some(&(_, Kind::Locked)) => { Some(&(_, Kind::Locked)) => {
debug!("load/locked {}", namespace); debug!("load/locked {}", namespace);
return Ok(()) return Ok(());
} }
// If the previous source was not a precise source, then we can be // If the previous source was not a precise source, then we can be
// sure that it's already been updated if we've already loaded it. // sure that it's already been updated if we've already loaded it.
Some(&(ref previous, _)) if previous.precise().is_none() => { Some(&(ref previous, _)) if previous.precise().is_none() => {
debug!("load/precise {}", namespace); debug!("load/precise {}", namespace);
return Ok(()) return Ok(());
} }
// If the previous source has the same precise version as we do, // If the previous source has the same precise version as we do,
@ -146,7 +142,7 @@ impl<'cfg> PackageRegistry<'cfg> {
Some(&(ref previous, _)) => { Some(&(ref previous, _)) => {
if previous.precise() == namespace.precise() { if previous.precise() == namespace.precise() {
debug!("load/match {}", namespace); debug!("load/match {}", namespace);
return Ok(()) return Ok(());
} }
debug!("load/mismatch {}", namespace); debug!("load/mismatch {}", namespace);
} }
@ -186,10 +182,12 @@ impl<'cfg> PackageRegistry<'cfg> {
for dep in deps.iter() { for dep in deps.iter() {
trace!("\t-> {}", dep); trace!("\t-> {}", dep);
} }
let sub_map = self.locked.entry(id.source_id().clone()) let sub_map = self.locked
.or_insert_with(HashMap::new); .entry(id.source_id().clone())
let sub_vec = sub_map.entry(id.name().to_string()) .or_insert_with(HashMap::new);
.or_insert_with(Vec::new); let sub_vec = sub_map
.entry(id.name().to_string())
.or_insert_with(Vec::new);
sub_vec.push((id, deps)); sub_vec.push((id, deps));
} }
@ -219,53 +217,65 @@ impl<'cfg> PackageRegistry<'cfg> {
// Remember that each dependency listed in `[patch]` has to resolve to // Remember that each dependency listed in `[patch]` has to resolve to
// precisely one package, so that's why we're just creating a flat list // precisely one package, so that's why we're just creating a flat list
// of summaries which should be the same length as `deps` above. // of summaries which should be the same length as `deps` above.
let unlocked_summaries = deps.iter().map(|dep| { let unlocked_summaries = deps.iter()
debug!("registring a patch for `{}` with `{}`", .map(|dep| {
url, debug!("registring a patch for `{}` with `{}`", url, dep.name());
dep.name());
// Go straight to the source for resolving `dep`. Load it as we // Go straight to the source for resolving `dep`. Load it as we
// normally would and then ask it directly for the list of summaries // normally would and then ask it directly for the list of summaries
// corresponding to this `dep`. // corresponding to this `dep`.
self.ensure_loaded(dep.source_id(), Kind::Normal).chain_err(|| { self.ensure_loaded(dep.source_id(), Kind::Normal)
format_err!("failed to load source for a dependency \ .chain_err(|| {
on `{}`", dep.name()) format_err!(
})?; "failed to load source for a dependency \
on `{}`",
dep.name()
)
})?;
let mut summaries = self.sources.get_mut(dep.source_id()) let mut summaries = self.sources
.expect("loaded source not present") .get_mut(dep.source_id())
.query_vec(dep)? .expect("loaded source not present")
.into_iter(); .query_vec(dep)?
.into_iter();
let summary = match summaries.next() { let summary = match summaries.next() {
Some(summary) => summary, Some(summary) => summary,
None => { None => bail!(
bail!("patch for `{}` in `{}` did not resolve to any crates. If this is \ "patch for `{}` in `{}` did not resolve to any crates. If this is \
unexpected, you may wish to consult: \ unexpected, you may wish to consult: \
https://github.com/rust-lang/cargo/issues/4678", https://github.com/rust-lang/cargo/issues/4678",
dep.name(), url) dep.name(),
url
),
};
if summaries.next().is_some() {
bail!(
"patch for `{}` in `{}` resolved to more than one candidate",
dep.name(),
url
)
} }
}; if summary.package_id().source_id().url() == url {
if summaries.next().is_some() { bail!(
bail!("patch for `{}` in `{}` resolved to more than one candidate", "patch for `{}` in `{}` points to the same source, but \
dep.name(), url) patches must point to different sources",
} dep.name(),
if summary.package_id().source_id().url() == url { url
bail!("patch for `{}` in `{}` points to the same source, but \ );
patches must point to different sources", }
dep.name(), url); Ok(summary)
} })
Ok(summary) .collect::<CargoResult<Vec<_>>>()
}).collect::<CargoResult<Vec<_>>>().chain_err(|| { .chain_err(|| format_err!("failed to resolve patches for `{}`", url))?;
format_err!("failed to resolve patches for `{}`", url)
})?;
// Note that we do not use `lock` here to lock summaries! That step // Note that we do not use `lock` here to lock summaries! That step
// happens later once `lock_patches` is invoked. In the meantime though // happens later once `lock_patches` is invoked. In the meantime though
// we want to fill in the `patches_available` map (later used in the // we want to fill in the `patches_available` map (later used in the
// `lock` method) and otherwise store the unlocked summaries in // `lock` method) and otherwise store the unlocked summaries in
// `patches` to get locked in a future call to `lock_patches`. // `patches` to get locked in a future call to `lock_patches`.
let ids = unlocked_summaries.iter() let ids = unlocked_summaries
.iter()
.map(|s| s.package_id()) .map(|s| s.package_id())
.cloned() .cloned()
.collect(); .collect();
@ -309,18 +319,18 @@ impl<'cfg> PackageRegistry<'cfg> {
// Ensure the source has fetched all necessary remote data. // Ensure the source has fetched all necessary remote data.
let _p = profile::start(format!("updating: {}", source_id)); let _p = profile::start(format!("updating: {}", source_id));
self.sources.get_mut(source_id).unwrap().update() self.sources.get_mut(source_id).unwrap().update()
})().chain_err(|| format_err!("Unable to update {}", source_id))?; })()
.chain_err(|| format_err!("Unable to update {}", source_id))?;
Ok(()) Ok(())
} }
fn query_overrides(&mut self, dep: &Dependency) fn query_overrides(&mut self, dep: &Dependency) -> CargoResult<Option<Summary>> {
-> CargoResult<Option<Summary>> {
for s in self.overrides.iter() { for s in self.overrides.iter() {
let src = self.sources.get_mut(s).unwrap(); let src = self.sources.get_mut(s).unwrap();
let dep = Dependency::new_override(&*dep.name(), s); let dep = Dependency::new_override(&*dep.name(), s);
let mut results = src.query_vec(&dep)?; let mut results = src.query_vec(&dep)?;
if !results.is_empty() { if !results.is_empty() {
return Ok(Some(results.remove(0))) return Ok(Some(results.remove(0)));
} }
} }
Ok(None) Ok(None)
@ -348,9 +358,11 @@ impl<'cfg> PackageRegistry<'cfg> {
lock(&self.locked, &self.patches_available, summary) lock(&self.locked, &self.patches_available, summary)
} }
fn warn_bad_override(&self, fn warn_bad_override(
override_summary: &Summary, &self,
real_summary: &Summary) -> CargoResult<()> { override_summary: &Summary,
real_summary: &Summary,
) -> CargoResult<()> {
let mut real_deps = real_summary.dependencies().iter().collect::<Vec<_>>(); let mut real_deps = real_summary.dependencies().iter().collect::<Vec<_>>();
let boilerplate = "\ let boilerplate = "\
@ -369,24 +381,34 @@ http://doc.crates.io/specifying-dependencies.html#overriding-dependencies
for dep in override_summary.dependencies() { for dep in override_summary.dependencies() {
if let Some(i) = real_deps.iter().position(|d| dep == *d) { if let Some(i) = real_deps.iter().position(|d| dep == *d) {
real_deps.remove(i); real_deps.remove(i);
continue continue;
} }
let msg = format!("\ let msg = format!(
path override for crate `{}` has altered the original list of\n\ "\
dependencies; the dependency on `{}` was either added or\n\ path override for crate `{}` has altered the original list of\n\
modified to not match the previously resolved version\n\n\ dependencies; the dependency on `{}` was either added or\n\
{}", override_summary.package_id().name(), dep.name(), boilerplate); modified to not match the previously resolved version\n\n\
{}",
override_summary.package_id().name(),
dep.name(),
boilerplate
);
self.source_config.config().shell().warn(&msg)?; self.source_config.config().shell().warn(&msg)?;
return Ok(()) return Ok(());
} }
if let Some(id) = real_deps.get(0) { if let Some(id) = real_deps.get(0) {
let msg = format!("\ let msg = format!(
"\
path override for crate `{}` has altered the original list of path override for crate `{}` has altered the original list of
dependencies; the dependency on `{}` was removed\n\n dependencies; the dependency on `{}` was removed\n\n
{}", override_summary.package_id().name(), id.name(), boilerplate); {}",
override_summary.package_id().name(),
id.name(),
boilerplate
);
self.source_config.config().shell().warn(&msg)?; self.source_config.config().shell().warn(&msg)?;
return Ok(()) return Ok(());
} }
Ok(()) Ok(())
@ -394,9 +416,7 @@ http://doc.crates.io/specifying-dependencies.html#overriding-dependencies
} }
impl<'cfg> Registry for PackageRegistry<'cfg> { impl<'cfg> Registry for PackageRegistry<'cfg> {
fn query(&mut self, fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
assert!(self.patches_locked); assert!(self.patches_locked);
let (override_summary, n, to_warn) = { let (override_summary, n, to_warn) = {
// Look for an override and get ready to query the real source. // Look for an override and get ready to query the real source.
@ -411,9 +431,12 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
// what we really care about is the name/version match. // what we really care about is the name/version match.
let mut patches = Vec::<Summary>::new(); let mut patches = Vec::<Summary>::new();
if let Some(extra) = self.patches.get(dep.source_id().url()) { if let Some(extra) = self.patches.get(dep.source_id().url()) {
patches.extend(extra.iter().filter(|s| { patches.extend(
dep.matches_ignoring_source(s) extra
}).cloned()); .iter()
.filter(|s| dep.matches_ignoring_source(s))
.cloned(),
);
} }
// A crucial feature of the `[patch]` feature is that we *don't* // A crucial feature of the `[patch]` feature is that we *don't*
@ -427,24 +450,31 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
Some(summary) => (summary, 1, Some(patch)), Some(summary) => (summary, 1, Some(patch)),
None => { None => {
f(patch); f(patch);
return Ok(()) return Ok(());
} }
} }
} else { } else {
if !patches.is_empty() { if !patches.is_empty() {
debug!("found {} patches with an unlocked dep on `{}` at {} \ debug!(
with `{}`, \ "found {} patches with an unlocked dep on `{}` at {} \
looking at sources", patches.len(), with `{}`, \
dep.name(), looking at sources",
dep.source_id(), patches.len(),
dep.version_req()); dep.name(),
dep.source_id(),
dep.version_req()
);
} }
// Ensure the requested source_id is loaded // Ensure the requested source_id is loaded
self.ensure_loaded(dep.source_id(), Kind::Normal).chain_err(|| { self.ensure_loaded(dep.source_id(), Kind::Normal)
format_err!("failed to load source for a dependency \ .chain_err(|| {
on `{}`", dep.name()) format_err!(
})?; "failed to load source for a dependency \
on `{}`",
dep.name()
)
})?;
let source = self.sources.get_mut(dep.source_id()); let source = self.sources.get_mut(dep.source_id());
match (override_summary, source) { match (override_summary, source) {
@ -471,11 +501,11 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
for patch in patches.iter() { for patch in patches.iter() {
let patch = patch.package_id().version(); let patch = patch.package_id().version();
if summary.package_id().version() == patch { if summary.package_id().version() == patch {
return return;
} }
} }
f(lock(locked, all_patches, summary)) f(lock(locked, all_patches, summary))
}) });
} }
// If we have an override summary then we query the source // If we have an override summary then we query the source
@ -515,14 +545,11 @@ impl<'cfg> Registry for PackageRegistry<'cfg> {
} }
} }
fn lock(locked: &LockedMap, fn lock(locked: &LockedMap, patches: &HashMap<Url, Vec<PackageId>>, summary: Summary) -> Summary {
patches: &HashMap<Url, Vec<PackageId>>, let pair = locked
summary: Summary) -> Summary { .get(summary.source_id())
let pair = locked.get(summary.source_id()).and_then(|map| { .and_then(|map| map.get(&*summary.name()))
map.get(&*summary.name()) .and_then(|vec| vec.iter().find(|&&(ref id, _)| id == summary.package_id()));
}).and_then(|vec| {
vec.iter().find(|&&(ref id, _)| id == summary.package_id())
});
trace!("locking summary of {}", summary.package_id()); trace!("locking summary of {}", summary.package_id());
@ -532,8 +559,7 @@ fn lock(locked: &LockedMap,
None => summary, None => summary,
}; };
summary.map_dependencies(|dep| { summary.map_dependencies(|dep| {
trace!("\t{}/{}/{}", dep.name(), dep.version_req(), trace!("\t{}/{}/{}", dep.name(), dep.version_req(), dep.source_id());
dep.source_id());
// If we've got a known set of overrides for this summary, then // If we've got a known set of overrides for this summary, then
// one of a few cases can arise: // one of a few cases can arise:
@ -560,23 +586,22 @@ fn lock(locked: &LockedMap,
trace!("\tfirst hit on {}", locked); trace!("\tfirst hit on {}", locked);
let mut dep = dep.clone(); let mut dep = dep.clone();
dep.lock_to(locked); dep.lock_to(locked);
return dep return dep;
} }
} }
// If this dependency did not have a locked version, then we query // If this dependency did not have a locked version, then we query
// all known locked packages to see if they match this dependency. // all known locked packages to see if they match this dependency.
// If anything does then we lock it to that and move on. // If anything does then we lock it to that and move on.
let v = locked.get(dep.source_id()).and_then(|map| { let v = locked
map.get(&*dep.name()) .get(dep.source_id())
}).and_then(|vec| { .and_then(|map| map.get(&*dep.name()))
vec.iter().find(|&&(ref id, _)| dep.matches_id(id)) .and_then(|vec| vec.iter().find(|&&(ref id, _)| dep.matches_id(id)));
});
if let Some(&(ref id, _)) = v { if let Some(&(ref id, _)) = v {
trace!("\tsecond hit on {}", id); trace!("\tsecond hit on {}", id);
let mut dep = dep.clone(); let mut dep = dep.clone();
dep.lock_to(id); dep.lock_to(id);
return dep return dep;
} }
// Finally we check to see if any registered patches correspond to // Finally we check to see if any registered patches correspond to
@ -584,26 +609,25 @@ fn lock(locked: &LockedMap,
let v = patches.get(dep.source_id().url()).map(|vec| { let v = patches.get(dep.source_id().url()).map(|vec| {
let dep2 = dep.clone(); let dep2 = dep.clone();
let mut iter = vec.iter().filter(move |p| { let mut iter = vec.iter().filter(move |p| {
dep2.name() == p.name() && dep2.name() == p.name() && dep2.version_req().matches(p.version())
dep2.version_req().matches(p.version())
}); });
(iter.next(), iter) (iter.next(), iter)
}); });
if let Some((Some(patch_id), mut remaining)) = v { if let Some((Some(patch_id), mut remaining)) = v {
assert!(remaining.next().is_none()); assert!(remaining.next().is_none());
let patch_source = patch_id.source_id(); let patch_source = patch_id.source_id();
let patch_locked = locked.get(patch_source).and_then(|m| { let patch_locked = locked
m.get(&*patch_id.name()) .get(patch_source)
}).map(|list| { .and_then(|m| m.get(&*patch_id.name()))
list.iter().any(|&(ref id, _)| id == patch_id) .map(|list| list.iter().any(|&(ref id, _)| id == patch_id))
}).unwrap_or(false); .unwrap_or(false);
if patch_locked { if patch_locked {
trace!("\tthird hit on {}", patch_id); trace!("\tthird hit on {}", patch_id);
let req = VersionReq::exact(patch_id.version()); let req = VersionReq::exact(patch_id.version());
let mut dep = dep.clone(); let mut dep = dep.clone();
dep.set_version_req(req); dep.set_version_req(req);
return dep return dep;
} }
} }
@ -614,17 +638,20 @@ fn lock(locked: &LockedMap,
#[cfg(test)] #[cfg(test)]
pub mod test { pub mod test {
use core::{Summary, Registry, Dependency}; use core::{Dependency, Registry, Summary};
use util::CargoResult; use util::CargoResult;
pub struct RegistryBuilder { pub struct RegistryBuilder {
summaries: Vec<Summary>, summaries: Vec<Summary>,
overrides: Vec<Summary> overrides: Vec<Summary>,
} }
impl RegistryBuilder { impl RegistryBuilder {
pub fn new() -> RegistryBuilder { pub fn new() -> RegistryBuilder {
RegistryBuilder { summaries: vec![], overrides: vec![] } RegistryBuilder {
summaries: vec![],
overrides: vec![],
}
} }
pub fn summary(mut self, summary: Summary) -> RegistryBuilder { pub fn summary(mut self, summary: Summary) -> RegistryBuilder {
@ -648,7 +675,8 @@ pub mod test {
} }
fn query_overrides(&self, dep: &Dependency) -> Vec<Summary> { fn query_overrides(&self, dep: &Dependency) -> Vec<Summary> {
self.overrides.iter() self.overrides
.iter()
.filter(|s| s.name() == dep.name()) .filter(|s| s.name() == dep.name())
.map(|s| s.clone()) .map(|s| s.clone())
.collect() .collect()
@ -656,9 +684,7 @@ pub mod test {
} }
impl Registry for RegistryBuilder { impl Registry for RegistryBuilder {
fn query(&mut self, fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
debug!("querying; dep={:?}", dep); debug!("querying; dep={:?}", dep);
let overrides = self.query_overrides(dep); let overrides = self.query_overrides(dep);

View file

@ -1,13 +1,13 @@
use std::collections::{HashMap, HashSet, BTreeMap}; use std::collections::{BTreeMap, HashMap, HashSet};
use std::fmt; use std::fmt;
use std::str::FromStr; use std::str::FromStr;
use serde::ser; use serde::ser;
use serde::de; use serde::de;
use core::{Package, PackageId, SourceId, Workspace, Dependency}; use core::{Dependency, Package, PackageId, SourceId, Workspace};
use util::{Graph, Config, internal}; use util::{internal, Config, Graph};
use util::errors::{CargoResult, CargoResultExt, CargoError}; use util::errors::{CargoError, CargoResult, CargoResultExt};
use super::Resolve; use super::Resolve;
@ -18,8 +18,7 @@ pub struct EncodableResolve {
root: Option<EncodableDependency>, root: Option<EncodableDependency>,
metadata: Option<Metadata>, metadata: Option<Metadata>,
#[serde(default, skip_serializing_if = "Patch::is_empty")] #[serde(default, skip_serializing_if = "Patch::is_empty")] patch: Patch,
patch: Patch,
} }
#[derive(Serialize, Deserialize, Debug, Default)] #[derive(Serialize, Deserialize, Debug, Default)]
@ -54,19 +53,19 @@ impl EncodableResolve {
}; };
if !all_pkgs.insert(enc_id.clone()) { if !all_pkgs.insert(enc_id.clone()) {
return Err(internal(format!("package `{}` is specified twice in the lockfile", return Err(internal(format!(
pkg.name))); "package `{}` is specified twice in the lockfile",
pkg.name
)));
} }
let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) {
// We failed to find a local package in the workspace. // We failed to find a local package in the workspace.
// It must have been removed and should be ignored. // It must have been removed and should be ignored.
None => { None => {
debug!("path dependency now missing {} v{}", debug!("path dependency now missing {} v{}", pkg.name, pkg.version);
pkg.name, continue;
pkg.version);
continue
} }
Some(source) => PackageId::new(&pkg.name, &pkg.version, source)? Some(source) => PackageId::new(&pkg.name, &pkg.version, source)?,
}; };
assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none()) assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none())
@ -82,9 +81,12 @@ impl EncodableResolve {
// no longer a member of the workspace. // no longer a member of the workspace.
Ok(None) Ok(None)
} else { } else {
Err(internal(format!("package `{}` is specified as a dependency, \ Err(internal(format!(
but is missing from the package list", enc_id))) "package `{}` is specified as a dependency, \
} but is missing from the package list",
enc_id
)))
},
} }
}; };
@ -98,7 +100,7 @@ impl EncodableResolve {
for &(ref id, pkg) in live_pkgs.values() { for &(ref id, pkg) in live_pkgs.values() {
let deps = match pkg.dependencies { let deps = match pkg.dependencies {
Some(ref deps) => deps, Some(ref deps) => deps,
None => continue None => continue,
}; };
for edge in deps.iter() { for edge in deps.iter() {
@ -146,9 +148,8 @@ impl EncodableResolve {
for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) {
to_remove.push(k.to_string()); to_remove.push(k.to_string());
let k = &k[prefix.len()..]; let k = &k[prefix.len()..];
let enc_id: EncodablePackageId = k.parse().chain_err(|| { let enc_id: EncodablePackageId = k.parse()
internal("invalid encoding of checksum in lockfile") .chain_err(|| internal("invalid encoding of checksum in lockfile"))?;
})?;
let id = match lookup_id(&enc_id) { let id = match lookup_id(&enc_id) {
Ok(Some(id)) => id, Ok(Some(id)) => id,
_ => continue, _ => continue,
@ -192,21 +193,23 @@ fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
// such as `cargo install` with a lock file from a remote dependency. In // such as `cargo install` with a lock file from a remote dependency. In
// that case we don't need to fixup any path dependencies (as they're not // that case we don't need to fixup any path dependencies (as they're not
// actually path dependencies any more), so we ignore them. // actually path dependencies any more), so we ignore them.
let members = ws.members().filter(|p| { let members = ws.members()
p.package_id().source_id().is_path() .filter(|p| p.package_id().source_id().is_path())
}).collect::<Vec<_>>(); .collect::<Vec<_>>();
let mut ret = HashMap::new(); let mut ret = HashMap::new();
let mut visited = HashSet::new(); let mut visited = HashSet::new();
for member in members.iter() { for member in members.iter() {
ret.insert(member.package_id().name().to_string(), ret.insert(
member.package_id().source_id().clone()); member.package_id().name().to_string(),
member.package_id().source_id().clone(),
);
visited.insert(member.package_id().source_id().clone()); visited.insert(member.package_id().source_id().clone());
} }
for member in members.iter() { for member in members.iter() {
build_pkg(member, ws.config(), &mut ret, &mut visited); build_pkg(member, ws.config(), &mut ret, &mut visited);
} }
for deps in ws.root_patch().values() { for deps in ws.root_patch().values() {
for dep in deps { for dep in deps {
build_dep(dep, ws.config(), &mut ret, &mut visited); build_dep(dep, ws.config(), &mut ret, &mut visited);
} }
@ -217,22 +220,26 @@ fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
return ret; return ret;
fn build_pkg(pkg: &Package, fn build_pkg(
config: &Config, pkg: &Package,
ret: &mut HashMap<String, SourceId>, config: &Config,
visited: &mut HashSet<SourceId>) { ret: &mut HashMap<String, SourceId>,
visited: &mut HashSet<SourceId>,
) {
for dep in pkg.dependencies() { for dep in pkg.dependencies() {
build_dep(dep, config, ret, visited); build_dep(dep, config, ret, visited);
} }
} }
fn build_dep(dep: &Dependency, fn build_dep(
config: &Config, dep: &Dependency,
ret: &mut HashMap<String, SourceId>, config: &Config,
visited: &mut HashSet<SourceId>) { ret: &mut HashMap<String, SourceId>,
visited: &mut HashSet<SourceId>,
) {
let id = dep.source_id(); let id = dep.source_id();
if visited.contains(id) || !id.is_path() { if visited.contains(id) || !id.is_path() {
return return;
} }
let path = match id.url().to_file_path() { let path = match id.url().to_file_path() {
Ok(p) => p.join("Cargo.toml"), Ok(p) => p.join("Cargo.toml"),
@ -242,8 +249,7 @@ fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
Ok(p) => p, Ok(p) => p,
Err(_) => return, Err(_) => return,
}; };
ret.insert(pkg.name().to_string(), ret.insert(pkg.name().to_string(), pkg.package_id().source_id().clone());
pkg.package_id().source_id().clone());
visited.insert(pkg.package_id().source_id().clone()); visited.insert(pkg.package_id().source_id().clone());
build_pkg(&pkg, config, ret, visited); build_pkg(&pkg, config, ret, visited);
} }
@ -268,7 +274,7 @@ pub struct EncodableDependency {
pub struct EncodablePackageId { pub struct EncodablePackageId {
name: String, name: String,
version: String, version: String,
source: Option<SourceId> source: Option<SourceId>,
} }
impl fmt::Display for EncodablePackageId { impl fmt::Display for EncodablePackageId {
@ -287,9 +293,8 @@ impl FromStr for EncodablePackageId {
fn from_str(s: &str) -> CargoResult<EncodablePackageId> { fn from_str(s: &str) -> CargoResult<EncodablePackageId> {
let mut s = s.splitn(3, ' '); let mut s = s.splitn(3, ' ');
let name = s.next().unwrap(); let name = s.next().unwrap();
let version = s.next().ok_or_else(|| { let version = s.next()
internal("invalid serialized PackageId") .ok_or_else(|| internal("invalid serialized PackageId"))?;
})?;
let source_id = match s.next() { let source_id = match s.next() {
Some(s) => { Some(s) => {
if s.starts_with('(') && s.ends_with(')') { if s.starts_with('(') && s.ends_with(')') {
@ -304,14 +309,15 @@ impl FromStr for EncodablePackageId {
Ok(EncodablePackageId { Ok(EncodablePackageId {
name: name.to_string(), name: name.to_string(),
version: version.to_string(), version: version.to_string(),
source: source_id source: source_id,
}) })
} }
} }
impl ser::Serialize for EncodablePackageId { impl ser::Serialize for EncodablePackageId {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer, where
S: ser::Serializer,
{ {
s.collect_str(self) s.collect_str(self)
} }
@ -319,11 +325,13 @@ impl ser::Serialize for EncodablePackageId {
impl<'de> de::Deserialize<'de> for EncodablePackageId { impl<'de> de::Deserialize<'de> for EncodablePackageId {
fn deserialize<D>(d: D) -> Result<EncodablePackageId, D::Error> fn deserialize<D>(d: D) -> Result<EncodablePackageId, D::Error>
where D: de::Deserializer<'de>, where
D: de::Deserializer<'de>,
{ {
String::deserialize(d).and_then(|string| { String::deserialize(d).and_then(|string| {
string.parse::<EncodablePackageId>() string
.map_err(de::Error::custom) .parse::<EncodablePackageId>()
.map_err(de::Error::custom)
}) })
} }
} }
@ -335,14 +343,15 @@ pub struct WorkspaceResolve<'a, 'cfg: 'a> {
impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> { impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer, where
S: ser::Serializer,
{ {
let mut ids: Vec<&PackageId> = self.resolve.graph.iter().collect(); let mut ids: Vec<&PackageId> = self.resolve.graph.iter().collect();
ids.sort(); ids.sort();
let encodable = ids.iter().filter_map(|&id| { let encodable = ids.iter()
Some(encodable_resolve_node(id, self.resolve)) .filter_map(|&id| Some(encodable_resolve_node(id, self.resolve)))
}).collect::<Vec<_>>(); .collect::<Vec<_>>();
let mut metadata = self.resolve.metadata.clone(); let mut metadata = self.resolve.metadata.clone();
@ -352,22 +361,27 @@ impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
None => "<none>", None => "<none>",
}; };
let id = encodable_package_id(id); let id = encodable_package_id(id);
metadata.insert(format!("checksum {}", id.to_string()), metadata.insert(format!("checksum {}", id.to_string()), checksum.to_string());
checksum.to_string());
} }
let metadata = if metadata.is_empty() { None } else { Some(metadata) }; let metadata = if metadata.is_empty() {
None
} else {
Some(metadata)
};
let patch = Patch { let patch = Patch {
unused: self.resolve.unused_patches().iter().map(|id| { unused: self.resolve
EncodableDependency { .unused_patches()
.iter()
.map(|id| EncodableDependency {
name: id.name().to_string(), name: id.name().to_string(),
version: id.version().to_string(), version: id.version().to_string(),
source: encode_source(id.source_id()), source: encode_source(id.source_id()),
dependencies: None, dependencies: None,
replace: None, replace: None,
} })
}).collect(), .collect(),
}; };
EncodableResolve { EncodableResolve {
package: Some(encodable), package: Some(encodable),
@ -378,17 +392,17 @@ impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> {
} }
} }
fn encodable_resolve_node(id: &PackageId, resolve: &Resolve) fn encodable_resolve_node(id: &PackageId, resolve: &Resolve) -> EncodableDependency {
-> EncodableDependency {
let (replace, deps) = match resolve.replacement(id) { let (replace, deps) = match resolve.replacement(id) {
Some(id) => { Some(id) => (Some(encodable_package_id(id)), None),
(Some(encodable_package_id(id)), None)
}
None => { None => {
let mut deps = resolve.graph.edges(id) let mut deps = resolve
.into_iter().flat_map(|a| a) .graph
.map(encodable_package_id) .edges(id)
.collect::<Vec<_>>(); .into_iter()
.flat_map(|a| a)
.map(encodable_package_id)
.collect::<Vec<_>>();
deps.sort(); deps.sort();
(None, Some(deps)) (None, Some(deps))
} }

File diff suppressed because it is too large Load diff

View file

@ -2,8 +2,8 @@ use std::fmt;
use std::io::prelude::*; use std::io::prelude::*;
use atty; use atty;
use termcolor::Color::{Green, Red, Yellow, Cyan}; use termcolor::Color::{Cyan, Green, Red, Yellow};
use termcolor::{self, StandardStream, Color, ColorSpec, WriteColor}; use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor};
use util::errors::CargoResult; use util::errors::CargoResult;
@ -12,7 +12,7 @@ use util::errors::CargoResult;
pub enum Verbosity { pub enum Verbosity {
Verbose, Verbose,
Normal, Normal,
Quiet Quiet,
} }
/// An abstraction around a `Write`able object that remembers preferences for output verbosity and /// An abstraction around a `Write`able object that remembers preferences for output verbosity and
@ -28,17 +28,13 @@ pub struct Shell {
impl fmt::Debug for Shell { impl fmt::Debug for Shell {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.err { match self.err {
ShellOut::Write(_) => { ShellOut::Write(_) => f.debug_struct("Shell")
f.debug_struct("Shell") .field("verbosity", &self.verbosity)
.field("verbosity", &self.verbosity) .finish(),
.finish() ShellOut::Stream { color_choice, .. } => f.debug_struct("Shell")
} .field("verbosity", &self.verbosity)
ShellOut::Stream { color_choice, .. } => { .field("color_choice", &color_choice)
f.debug_struct("Shell") .finish(),
.field("verbosity", &self.verbosity)
.field("color_choice", &color_choice)
.finish()
}
} }
} }
} }
@ -90,16 +86,16 @@ impl Shell {
/// Print a message, where the status will have `color` color, and can be justified. The /// Print a message, where the status will have `color` color, and can be justified. The
/// messages follows without color. /// messages follows without color.
fn print(&mut self, fn print(
status: &fmt::Display, &mut self,
message: Option<&fmt::Display>, status: &fmt::Display,
color: Color, message: Option<&fmt::Display>,
justified: bool) -> CargoResult<()> { color: Color,
justified: bool,
) -> CargoResult<()> {
match self.verbosity { match self.verbosity {
Verbosity::Quiet => Ok(()), Verbosity::Quiet => Ok(()),
_ => { _ => self.err.print(status, message, color, justified),
self.err.print(status, message, color, justified)
}
} }
} }
@ -126,44 +122,53 @@ impl Shell {
/// Shortcut to right-align and color green a status message. /// Shortcut to right-align and color green a status message.
pub fn status<T, U>(&mut self, status: T, message: U) -> CargoResult<()> pub fn status<T, U>(&mut self, status: T, message: U) -> CargoResult<()>
where T: fmt::Display, U: fmt::Display where
T: fmt::Display,
U: fmt::Display,
{ {
self.print(&status, Some(&message), Green, true) self.print(&status, Some(&message), Green, true)
} }
pub fn status_header<T>(&mut self, status: T) -> CargoResult<()> pub fn status_header<T>(&mut self, status: T) -> CargoResult<()>
where T: fmt::Display, where
T: fmt::Display,
{ {
self.print(&status, None, Cyan, true) self.print(&status, None, Cyan, true)
} }
/// Shortcut to right-align a status message. /// Shortcut to right-align a status message.
pub fn status_with_color<T, U>(&mut self, pub fn status_with_color<T, U>(
status: T, &mut self,
message: U, status: T,
color: Color) -> CargoResult<()> message: U,
where T: fmt::Display, U: fmt::Display color: Color,
) -> CargoResult<()>
where
T: fmt::Display,
U: fmt::Display,
{ {
self.print(&status, Some(&message), color, true) self.print(&status, Some(&message), color, true)
} }
/// Run the callback only if we are in verbose mode /// Run the callback only if we are in verbose mode
pub fn verbose<F>(&mut self, mut callback: F) -> CargoResult<()> pub fn verbose<F>(&mut self, mut callback: F) -> CargoResult<()>
where F: FnMut(&mut Shell) -> CargoResult<()> where
F: FnMut(&mut Shell) -> CargoResult<()>,
{ {
match self.verbosity { match self.verbosity {
Verbosity::Verbose => callback(self), Verbosity::Verbose => callback(self),
_ => Ok(()) _ => Ok(()),
} }
} }
/// Run the callback if we are not in verbose mode. /// Run the callback if we are not in verbose mode.
pub fn concise<F>(&mut self, mut callback: F) -> CargoResult<()> pub fn concise<F>(&mut self, mut callback: F) -> CargoResult<()>
where F: FnMut(&mut Shell) -> CargoResult<()> where
F: FnMut(&mut Shell) -> CargoResult<()>,
{ {
match self.verbosity { match self.verbosity {
Verbosity::Verbose => Ok(()), Verbosity::Verbose => Ok(()),
_ => callback(self) _ => callback(self),
} }
} }
@ -192,16 +197,23 @@ impl Shell {
/// Update the color choice (always, never, or auto) from a string. /// Update the color choice (always, never, or auto) from a string.
pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> { pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> {
if let ShellOut::Stream { ref mut stream, ref mut color_choice, .. } = self.err { if let ShellOut::Stream {
ref mut stream,
ref mut color_choice,
..
} = self.err
{
let cfg = match color { let cfg = match color {
Some("always") => ColorChoice::Always, Some("always") => ColorChoice::Always,
Some("never") => ColorChoice::Never, Some("never") => ColorChoice::Never,
Some("auto") | Some("auto") | None => ColorChoice::CargoAuto,
None => ColorChoice::CargoAuto,
Some(arg) => bail!("argument for --color must be auto, always, or \ Some(arg) => bail!(
never, but found `{}`", arg), "argument for --color must be auto, always, or \
never, but found `{}`",
arg
),
}; };
*color_choice = cfg; *color_choice = cfg;
*stream = StandardStream::stderr(cfg.to_termcolor_color_choice()); *stream = StandardStream::stderr(cfg.to_termcolor_color_choice());
@ -230,17 +242,17 @@ impl Default for Shell {
impl ShellOut { impl ShellOut {
/// Print out a message with a status. The status comes first and is bold + the given color. /// Print out a message with a status. The status comes first and is bold + the given color.
/// The status can be justified, in which case the max width that will right align is 12 chars. /// The status can be justified, in which case the max width that will right align is 12 chars.
fn print(&mut self, fn print(
status: &fmt::Display, &mut self,
message: Option<&fmt::Display>, status: &fmt::Display,
color: Color, message: Option<&fmt::Display>,
justified: bool) -> CargoResult<()> { color: Color,
justified: bool,
) -> CargoResult<()> {
match *self { match *self {
ShellOut::Stream { ref mut stream, .. } => { ShellOut::Stream { ref mut stream, .. } => {
stream.reset()?; stream.reset()?;
stream.set_color(ColorSpec::new() stream.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?;
.set_bold(true)
.set_fg(Some(color)))?;
if justified { if justified {
write!(stream, "{:>12}", status)?; write!(stream, "{:>12}", status)?;
} else { } else {
@ -303,7 +315,7 @@ mod imp {
unsafe { unsafe {
let mut winsize: libc::winsize = mem::zeroed(); let mut winsize: libc::winsize = mem::zeroed();
if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) < 0 { if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) < 0 {
return None return None;
} }
if winsize.ws_col > 0 { if winsize.ws_col > 0 {
Some(winsize.ws_col as usize) Some(winsize.ws_col as usize)
@ -335,7 +347,7 @@ mod imp {
let stdout = GetStdHandle(STD_ERROR_HANDLE); let stdout = GetStdHandle(STD_ERROR_HANDLE);
let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed(); let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
if GetConsoleScreenBufferInfo(stdout, &mut csbi) == 0 { if GetConsoleScreenBufferInfo(stdout, &mut csbi) == 0 {
return None return None;
} }
Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize) Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize)
} }

View file

@ -1,11 +1,11 @@
use std::collections::hash_map::{HashMap, Values, IterMut}; use std::collections::hash_map::{HashMap, IterMut, Values};
use core::{Package, PackageId, Registry}; use core::{Package, PackageId, Registry};
use util::CargoResult; use util::CargoResult;
mod source_id; mod source_id;
pub use self::source_id::{SourceId, GitReference}; pub use self::source_id::{GitReference, SourceId};
/// A Source finds and downloads remote packages based on names and /// A Source finds and downloads remote packages based on names and
/// versions. /// versions.
@ -89,7 +89,9 @@ pub struct SourcesMut<'a, 'src: 'a> {
impl<'src> SourceMap<'src> { impl<'src> SourceMap<'src> {
/// Create an empty map /// Create an empty map
pub fn new() -> SourceMap<'src> { pub fn new() -> SourceMap<'src> {
SourceMap { map: HashMap::new() } SourceMap {
map: HashMap::new(),
}
} }
/// Like `HashMap::contains_key` /// Like `HashMap::contains_key`
@ -144,7 +146,9 @@ impl<'src> SourceMap<'src> {
/// Like `HashMap::iter_mut` /// Like `HashMap::iter_mut`
pub fn sources_mut<'a>(&'a mut self) -> SourcesMut<'a, 'src> { pub fn sources_mut<'a>(&'a mut self) -> SourcesMut<'a, 'src> {
SourcesMut { inner: self.map.iter_mut() } SourcesMut {
inner: self.map.iter_mut(),
}
} }
} }
@ -154,4 +158,3 @@ impl<'a, 'src> Iterator for SourcesMut<'a, 'src> {
self.inner.next().map(|(a, b)| (a, &mut **b)) self.inner.next().map(|(a, b)| (a, &mut **b))
} }
} }

View file

@ -12,9 +12,9 @@ use url::Url;
use ops; use ops;
use sources::git; use sources::git;
use sources::{PathSource, GitSource, RegistrySource, CRATES_IO}; use sources::{GitSource, PathSource, RegistrySource, CRATES_IO};
use sources::DirectorySource; use sources::DirectorySource;
use util::{Config, CargoResult, ToUrl}; use util::{CargoResult, Config, ToUrl};
/// Unique identifier for a source of packages. /// Unique identifier for a source of packages.
#[derive(Clone, Eq, Debug)] #[derive(Clone, Eq, Debug)]
@ -93,7 +93,9 @@ impl SourceId {
pub fn from_url(string: &str) -> CargoResult<SourceId> { pub fn from_url(string: &str) -> CargoResult<SourceId> {
let mut parts = string.splitn(2, '+'); let mut parts = string.splitn(2, '+');
let kind = parts.next().unwrap(); let kind = parts.next().unwrap();
let url = parts.next().ok_or_else(|| format_err!("invalid source `{}`", string))?; let url = parts
.next()
.ok_or_else(|| format_err!("invalid source `{}`", string))?;
match kind { match kind {
"git" => { "git" => {
@ -102,8 +104,7 @@ impl SourceId {
for (k, v) in url.query_pairs() { for (k, v) in url.query_pairs() {
match &k[..] { match &k[..] {
// map older 'ref' to branch // map older 'ref' to branch
"branch" | "branch" | "ref" => reference = GitReference::Branch(v.into_owned()),
"ref" => reference = GitReference::Branch(v.into_owned()),
"rev" => reference = GitReference::Rev(v.into_owned()), "rev" => reference = GitReference::Rev(v.into_owned()),
"tag" => reference = GitReference::Tag(v.into_owned()), "tag" => reference = GitReference::Tag(v.into_owned()),
@ -114,23 +115,24 @@ impl SourceId {
url.set_fragment(None); url.set_fragment(None);
url.set_query(None); url.set_query(None);
Ok(SourceId::for_git(&url, reference)?.with_precise(precise)) Ok(SourceId::for_git(&url, reference)?.with_precise(precise))
}, }
"registry" => { "registry" => {
let url = url.to_url()?; let url = url.to_url()?;
Ok(SourceId::new(Kind::Registry, url)? Ok(SourceId::new(Kind::Registry, url)?.with_precise(Some("locked".to_string())))
.with_precise(Some("locked".to_string())))
} }
"path" => { "path" => {
let url = url.to_url()?; let url = url.to_url()?;
SourceId::new(Kind::Path, url) SourceId::new(Kind::Path, url)
} }
kind => Err(format_err!("unsupported source protocol: {}", kind)) kind => Err(format_err!("unsupported source protocol: {}", kind)),
} }
} }
/// A view of the `SourceId` that can be `Display`ed as a URL /// A view of the `SourceId` that can be `Display`ed as a URL
pub fn to_url(&self) -> SourceIdToUrl { pub fn to_url(&self) -> SourceIdToUrl {
SourceIdToUrl { inner: &*self.inner } SourceIdToUrl {
inner: &*self.inner,
}
} }
/// Create a SourceId from a filesystem path. /// Create a SourceId from a filesystem path.
@ -173,10 +175,12 @@ impl SourceId {
let url = if let Some(ref index) = cfg.index { let url = if let Some(ref index) = cfg.index {
static WARNED: AtomicBool = ATOMIC_BOOL_INIT; static WARNED: AtomicBool = ATOMIC_BOOL_INIT;
if !WARNED.swap(true, SeqCst) { if !WARNED.swap(true, SeqCst) {
config.shell().warn("custom registry support via \ config.shell().warn(
the `registry.index` configuration is \ "custom registry support via \
being removed, this functionality \ the `registry.index` configuration is \
will not work in the future")?; being removed, this functionality \
will not work in the future",
)?;
} }
&index[..] &index[..]
} else { } else {
@ -218,7 +222,7 @@ impl SourceId {
pub fn is_registry(&self) -> bool { pub fn is_registry(&self) -> bool {
match self.inner.kind { match self.inner.kind {
Kind::Registry | Kind::LocalRegistry => true, Kind::Registry | Kind::LocalRegistry => true,
_ => false, _ => false,
} }
} }
@ -284,7 +288,7 @@ impl SourceId {
inner: Arc::new(SourceIdInner { inner: Arc::new(SourceIdInner {
precise: v, precise: v,
..(*self.inner).clone() ..(*self.inner).clone()
}) }),
} }
} }
@ -303,10 +307,15 @@ impl SourceId {
/// same hash in different locations. /// same hash in different locations.
pub fn stable_hash<S: hash::Hasher>(&self, workspace: &Path, into: &mut S) { pub fn stable_hash<S: hash::Hasher>(&self, workspace: &Path, into: &mut S) {
if self.is_path() { if self.is_path() {
if let Ok(p) = self.inner.url.to_file_path().unwrap().strip_prefix(workspace) { if let Ok(p) = self.inner
.url
.to_file_path()
.unwrap()
.strip_prefix(workspace)
{
self.inner.kind.hash(into); self.inner.kind.hash(into);
p.to_str().unwrap().hash(into); p.to_str().unwrap().hash(into);
return return;
} }
} }
self.hash(into) self.hash(into)
@ -333,7 +342,8 @@ impl Ord for SourceId {
impl ser::Serialize for SourceId { impl ser::Serialize for SourceId {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer, where
S: ser::Serializer,
{ {
if self.is_path() { if self.is_path() {
None::<String>.serialize(s) None::<String>.serialize(s)
@ -345,7 +355,8 @@ impl ser::Serialize for SourceId {
impl<'de> de::Deserialize<'de> for SourceId { impl<'de> de::Deserialize<'de> for SourceId {
fn deserialize<D>(d: D) -> Result<SourceId, D::Error> fn deserialize<D>(d: D) -> Result<SourceId, D::Error>
where D: de::Deserializer<'de>, where
D: de::Deserializer<'de>,
{ {
let string = String::deserialize(d)?; let string = String::deserialize(d)?;
SourceId::from_url(&string).map_err(de::Error::custom) SourceId::from_url(&string).map_err(de::Error::custom)
@ -355,11 +366,17 @@ impl<'de> de::Deserialize<'de> for SourceId {
impl fmt::Display for SourceId { impl fmt::Display for SourceId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result { fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self.inner { match *self.inner {
SourceIdInner { kind: Kind::Path, ref url, .. } => { SourceIdInner {
fmt::Display::fmt(url, f) kind: Kind::Path,
} ref url,
SourceIdInner { kind: Kind::Git(ref reference), ref url, ..
ref precise, .. } => { } => fmt::Display::fmt(url, f),
SourceIdInner {
kind: Kind::Git(ref reference),
ref url,
ref precise,
..
} => {
write!(f, "{}", url)?; write!(f, "{}", url)?;
if let Some(pretty) = reference.pretty_ref() { if let Some(pretty) = reference.pretty_ref() {
write!(f, "?{}", pretty)?; write!(f, "?{}", pretty)?;
@ -371,13 +388,21 @@ impl fmt::Display for SourceId {
} }
Ok(()) Ok(())
} }
SourceIdInner { kind: Kind::Registry, ref url, .. } | SourceIdInner {
SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => { kind: Kind::Registry,
write!(f, "registry `{}`", url) ref url,
} ..
SourceIdInner { kind: Kind::Directory, ref url, .. } => {
write!(f, "dir {}", url)
} }
| SourceIdInner {
kind: Kind::LocalRegistry,
ref url,
..
} => write!(f, "registry `{}`", url),
SourceIdInner {
kind: Kind::Directory,
ref url,
..
} => write!(f, "dir {}", url),
} }
} }
} }
@ -438,9 +463,11 @@ impl Hash for SourceId {
fn hash<S: hash::Hasher>(&self, into: &mut S) { fn hash<S: hash::Hasher>(&self, into: &mut S) {
self.inner.kind.hash(into); self.inner.kind.hash(into);
match *self.inner { match *self.inner {
SourceIdInner { kind: Kind::Git(..), ref canonical_url, .. } => { SourceIdInner {
canonical_url.as_str().hash(into) kind: Kind::Git(..),
} ref canonical_url,
..
} => canonical_url.as_str().hash(into),
_ => self.inner.url.as_str().hash(into), _ => self.inner.url.as_str().hash(into),
} }
} }
@ -454,11 +481,16 @@ pub struct SourceIdToUrl<'a> {
impl<'a> fmt::Display for SourceIdToUrl<'a> { impl<'a> fmt::Display for SourceIdToUrl<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self.inner { match *self.inner {
SourceIdInner { kind: Kind::Path, ref url, .. } => {
write!(f, "path+{}", url)
}
SourceIdInner { SourceIdInner {
kind: Kind::Git(ref reference), ref url, ref precise, .. kind: Kind::Path,
ref url,
..
} => write!(f, "path+{}", url),
SourceIdInner {
kind: Kind::Git(ref reference),
ref url,
ref precise,
..
} => { } => {
write!(f, "git+{}", url)?; write!(f, "git+{}", url)?;
if let Some(pretty) = reference.pretty_ref() { if let Some(pretty) = reference.pretty_ref() {
@ -469,15 +501,21 @@ impl<'a> fmt::Display for SourceIdToUrl<'a> {
} }
Ok(()) Ok(())
} }
SourceIdInner { kind: Kind::Registry, ref url, .. } => { SourceIdInner {
write!(f, "registry+{}", url) kind: Kind::Registry,
} ref url,
SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => { ..
write!(f, "local-registry+{}", url) } => write!(f, "registry+{}", url),
} SourceIdInner {
SourceIdInner { kind: Kind::Directory, ref url, .. } => { kind: Kind::LocalRegistry,
write!(f, "directory+{}", url) ref url,
} ..
} => write!(f, "local-registry+{}", url),
SourceIdInner {
kind: Kind::Directory,
ref url,
..
} => write!(f, "directory+{}", url),
} }
} }
} }
@ -510,7 +548,7 @@ impl<'a> fmt::Display for PrettyRef<'a> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{SourceId, Kind, GitReference}; use super::{GitReference, Kind, SourceId};
use util::ToUrl; use util::ToUrl;
#[test] #[test]

View file

@ -27,18 +27,25 @@ struct Inner {
} }
impl Summary { impl Summary {
pub fn new(pkg_id: PackageId, pub fn new(
dependencies: Vec<Dependency>, pkg_id: PackageId,
features: BTreeMap<String, Vec<String>>, dependencies: Vec<Dependency>,
links: Option<String>) -> CargoResult<Summary> { features: BTreeMap<String, Vec<String>>,
links: Option<String>,
) -> CargoResult<Summary> {
for dep in dependencies.iter() { for dep in dependencies.iter() {
if features.get(&*dep.name()).is_some() { if features.get(&*dep.name()).is_some() {
bail!("Features and dependencies cannot have the \ bail!(
same name: `{}`", dep.name()) "Features and dependencies cannot have the \
same name: `{}`",
dep.name()
)
} }
if dep.is_optional() && !dep.is_transitive() { if dep.is_optional() && !dep.is_transitive() {
bail!("Dev-dependencies are not allowed to be optional: `{}`", bail!(
dep.name()) "Dev-dependencies are not allowed to be optional: `{}`",
dep.name()
)
} }
} }
for (feature, list) in features.iter() { for (feature, list) in features.iter() {
@ -46,23 +53,34 @@ impl Summary {
let mut parts = dep.splitn(2, '/'); let mut parts = dep.splitn(2, '/');
let dep = parts.next().unwrap(); let dep = parts.next().unwrap();
let is_reexport = parts.next().is_some(); let is_reexport = parts.next().is_some();
if !is_reexport && features.get(dep).is_some() { continue } if !is_reexport && features.get(dep).is_some() {
continue;
}
match dependencies.iter().find(|d| &*d.name() == dep) { match dependencies.iter().find(|d| &*d.name() == dep) {
Some(d) => { Some(d) => {
if d.is_optional() || is_reexport { continue } if d.is_optional() || is_reexport {
bail!("Feature `{}` depends on `{}` which is not an \ continue;
optional dependency.\nConsider adding \ }
`optional = true` to the dependency", bail!(
feature, dep) "Feature `{}` depends on `{}` which is not an \
} optional dependency.\nConsider adding \
None if is_reexport => { `optional = true` to the dependency",
bail!("Feature `{}` requires a feature of `{}` which is not a \ feature,
dependency", feature, dep) dep
} )
None => {
bail!("Feature `{}` includes `{}` which is neither \
a dependency nor another feature", feature, dep)
} }
None if is_reexport => bail!(
"Feature `{}` requires a feature of `{}` which is not a \
dependency",
feature,
dep
),
None => bail!(
"Feature `{}` includes `{}` which is neither \
a dependency nor another feature",
feature,
dep
),
} }
} }
} }
@ -77,12 +95,24 @@ impl Summary {
}) })
} }
pub fn package_id(&self) -> &PackageId { &self.inner.package_id } pub fn package_id(&self) -> &PackageId {
pub fn name(&self) -> InternedString { self.package_id().name() } &self.inner.package_id
pub fn version(&self) -> &Version { self.package_id().version() } }
pub fn source_id(&self) -> &SourceId { self.package_id().source_id() } pub fn name(&self) -> InternedString {
pub fn dependencies(&self) -> &[Dependency] { &self.inner.dependencies } self.package_id().name()
pub fn features(&self) -> &BTreeMap<String, Vec<String>> { &self.inner.features } }
pub fn version(&self) -> &Version {
self.package_id().version()
}
pub fn source_id(&self) -> &SourceId {
self.package_id().source_id()
}
pub fn dependencies(&self) -> &[Dependency] {
&self.inner.dependencies
}
pub fn features(&self) -> &BTreeMap<String, Vec<String>> {
&self.inner.features
}
pub fn checksum(&self) -> Option<&str> { pub fn checksum(&self) -> Option<&str> {
self.inner.checksum.as_ref().map(|s| &s[..]) self.inner.checksum.as_ref().map(|s| &s[..])
} }
@ -101,7 +131,9 @@ impl Summary {
} }
pub fn map_dependencies<F>(mut self, f: F) -> Summary pub fn map_dependencies<F>(mut self, f: F) -> Summary
where F: FnMut(Dependency) -> Dependency { where
F: FnMut(Dependency) -> Dependency,
{
{ {
let slot = &mut Rc::make_mut(&mut self.inner).dependencies; let slot = &mut Rc::make_mut(&mut self.inner).dependencies;
let deps = mem::replace(slot, Vec::new()); let deps = mem::replace(slot, Vec::new());
@ -110,17 +142,14 @@ impl Summary {
self self
} }
pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Summary {
-> Summary {
let me = if self.package_id().source_id() == to_replace { let me = if self.package_id().source_id() == to_replace {
let new_id = self.package_id().with_source_id(replace_with); let new_id = self.package_id().with_source_id(replace_with);
self.override_id(new_id) self.override_id(new_id)
} else { } else {
self self
}; };
me.map_dependencies(|dep| { me.map_dependencies(|dep| dep.map_source(to_replace, replace_with))
dep.map_source(to_replace, replace_with)
})
} }
} }

View file

@ -1,4 +1,4 @@
use std::collections::hash_map::{HashMap, Entry}; use std::collections::hash_map::{Entry, HashMap};
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::slice; use std::slice;
@ -6,8 +6,8 @@ use std::slice;
use glob::glob; use glob::glob;
use url::Url; use url::Url;
use core::{Package, VirtualManifest, EitherManifest, SourceId}; use core::{EitherManifest, Package, SourceId, VirtualManifest};
use core::{PackageIdSpec, Dependency, Profile, Profiles}; use core::{Dependency, PackageIdSpec, Profile, Profiles};
use util::{Config, Filesystem}; use util::{Config, Filesystem};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
use util::paths; use util::paths;
@ -121,8 +121,7 @@ impl<'cfg> Workspace<'cfg> {
/// This function will construct the entire workspace by determining the /// This function will construct the entire workspace by determining the
/// root and all member packages. It will then validate the workspace /// root and all member packages. It will then validate the workspace
/// before returning it, so `Ok` is only returned for valid workspaces. /// before returning it, so `Ok` is only returned for valid workspaces.
pub fn new(manifest_path: &Path, config: &'cfg Config) pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult<Workspace<'cfg>> {
-> CargoResult<Workspace<'cfg>> {
let target_dir = config.target_dir()?; let target_dir = config.target_dir()?;
let mut ws = Workspace { let mut ws = Workspace {
@ -154,10 +153,12 @@ impl<'cfg> Workspace<'cfg> {
/// ///
/// This is currently only used in niche situations like `cargo install` or /// This is currently only used in niche situations like `cargo install` or
/// `cargo package`. /// `cargo package`.
pub fn ephemeral(package: Package, pub fn ephemeral(
config: &'cfg Config, package: Package,
target_dir: Option<Filesystem>, config: &'cfg Config,
require_optional_deps: bool) -> CargoResult<Workspace<'cfg>> { target_dir: Option<Filesystem>,
require_optional_deps: bool,
) -> CargoResult<Workspace<'cfg>> {
let mut ws = Workspace { let mut ws = Workspace {
config, config,
current_manifest: package.manifest_path().to_path_buf(), current_manifest: package.manifest_path().to_path_buf(),
@ -194,9 +195,12 @@ impl<'cfg> Workspace<'cfg> {
/// indicating that something else should be passed. /// indicating that something else should be passed.
pub fn current(&self) -> CargoResult<&Package> { pub fn current(&self) -> CargoResult<&Package> {
let pkg = self.current_opt().ok_or_else(|| { let pkg = self.current_opt().ok_or_else(|| {
format_err!("manifest path `{}` is a virtual manifest, but this \ format_err!(
command requires running against an actual package in \ "manifest path `{}` is a virtual manifest, but this \
this workspace", self.current_manifest.display()) command requires running against an actual package in \
this workspace",
self.current_manifest.display()
)
})?; })?;
Ok(pkg) Ok(pkg)
} }
@ -204,14 +208,14 @@ impl<'cfg> Workspace<'cfg> {
pub fn current_opt(&self) -> Option<&Package> { pub fn current_opt(&self) -> Option<&Package> {
match *self.packages.get(&self.current_manifest) { match *self.packages.get(&self.current_manifest) {
MaybePackage::Package(ref p) => Some(p), MaybePackage::Package(ref p) => Some(p),
MaybePackage::Virtual(..) => None MaybePackage::Virtual(..) => None,
} }
} }
pub fn is_virtual(&self) -> bool { pub fn is_virtual(&self) -> bool {
match *self.packages.get(&self.current_manifest) { match *self.packages.get(&self.current_manifest) {
MaybePackage::Package(..) => false, MaybePackage::Package(..) => false,
MaybePackage::Virtual(..) => true MaybePackage::Virtual(..) => true,
} }
} }
@ -221,7 +225,9 @@ impl<'cfg> Workspace<'cfg> {
} }
pub fn profiles(&self) -> &Profiles { pub fn profiles(&self) -> &Profiles {
let root = self.root_manifest.as_ref().unwrap_or(&self.current_manifest); let root = self.root_manifest
.as_ref()
.unwrap_or(&self.current_manifest);
match *self.packages.get(root) { match *self.packages.get(root) {
MaybePackage::Package(ref p) => p.manifest().profiles(), MaybePackage::Package(ref p) => p.manifest().profiles(),
MaybePackage::Virtual(ref vm) => vm.profiles(), MaybePackage::Virtual(ref vm) => vm.profiles(),
@ -235,14 +241,15 @@ impl<'cfg> Workspace<'cfg> {
pub fn root(&self) -> &Path { pub fn root(&self) -> &Path {
match self.root_manifest { match self.root_manifest {
Some(ref p) => p, Some(ref p) => p,
None => &self.current_manifest None => &self.current_manifest,
}.parent().unwrap() }.parent()
.unwrap()
} }
pub fn target_dir(&self) -> Filesystem { pub fn target_dir(&self) -> Filesystem {
self.target_dir.clone().unwrap_or_else(|| { self.target_dir
Filesystem::new(self.root().join("target")) .clone()
}) .unwrap_or_else(|| Filesystem::new(self.root().join("target")))
} }
/// Returns the root [replace] section of this workspace. /// Returns the root [replace] section of this workspace.
@ -297,7 +304,10 @@ impl<'cfg> Workspace<'cfg> {
self.require_optional_deps self.require_optional_deps
} }
pub fn set_require_optional_deps<'a>(&'a mut self, require_optional_deps: bool) -> &mut Workspace<'cfg> { pub fn set_require_optional_deps<'a>(
&'a mut self,
require_optional_deps: bool,
) -> &mut Workspace<'cfg> {
self.require_optional_deps = require_optional_deps; self.require_optional_deps = require_optional_deps;
self self
} }
@ -311,10 +321,11 @@ impl<'cfg> Workspace<'cfg> {
/// ///
/// Returns an error if `manifest_path` isn't actually a valid manifest or /// Returns an error if `manifest_path` isn't actually a valid manifest or
/// if some other transient error happens. /// if some other transient error happens.
fn find_root(&mut self, manifest_path: &Path) fn find_root(&mut self, manifest_path: &Path) -> CargoResult<Option<PathBuf>> {
-> CargoResult<Option<PathBuf>> {
fn read_root_pointer(member_manifest: &Path, root_link: &str) -> CargoResult<PathBuf> { fn read_root_pointer(member_manifest: &Path, root_link: &str) -> CargoResult<PathBuf> {
let path = member_manifest.parent().unwrap() let path = member_manifest
.parent()
.unwrap()
.join(root_link) .join(root_link)
.join("Cargo.toml"); .join("Cargo.toml");
debug!("find_root - pointer {}", path.display()); debug!("find_root - pointer {}", path.display());
@ -326,11 +337,11 @@ impl<'cfg> Workspace<'cfg> {
match *current.workspace_config() { match *current.workspace_config() {
WorkspaceConfig::Root(_) => { WorkspaceConfig::Root(_) => {
debug!("find_root - is root {}", manifest_path.display()); debug!("find_root - is root {}", manifest_path.display());
return Ok(Some(manifest_path.to_path_buf())) return Ok(Some(manifest_path.to_path_buf()));
}
WorkspaceConfig::Member { root: Some(ref path_to_root) } => {
return Ok(Some(read_root_pointer(manifest_path, path_to_root)?))
} }
WorkspaceConfig::Member {
root: Some(ref path_to_root),
} => return Ok(Some(read_root_pointer(manifest_path, path_to_root)?)),
WorkspaceConfig::Member { root: None } => {} WorkspaceConfig::Member { root: None } => {}
} }
} }
@ -344,12 +355,14 @@ impl<'cfg> Workspace<'cfg> {
debug!("find_root - found a root checking exclusion"); debug!("find_root - found a root checking exclusion");
if !ances_root_config.is_excluded(manifest_path) { if !ances_root_config.is_excluded(manifest_path) {
debug!("find_root - found!"); debug!("find_root - found!");
return Ok(Some(ances_manifest_path)) return Ok(Some(ances_manifest_path));
} }
} }
WorkspaceConfig::Member { root: Some(ref path_to_root) } => { WorkspaceConfig::Member {
root: Some(ref path_to_root),
} => {
debug!("find_root - found pointer"); debug!("find_root - found pointer");
return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)?)) return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)?));
} }
WorkspaceConfig::Member { .. } => {} WorkspaceConfig::Member { .. } => {}
} }
@ -361,7 +374,7 @@ impl<'cfg> Workspace<'cfg> {
// current project, but we don't want to mistakenly try to put // current project, but we don't want to mistakenly try to put
// crates.io crates into the workspace by accident. // crates.io crates into the workspace by accident.
if self.config.home() == path { if self.config.home() == path {
break break;
} }
} }
@ -382,7 +395,7 @@ impl<'cfg> Workspace<'cfg> {
debug!("find_members - only me as a member"); debug!("find_members - only me as a member");
self.members.push(self.current_manifest.clone()); self.members.push(self.current_manifest.clone());
self.default_members.push(self.current_manifest.clone()); self.default_members.push(self.current_manifest.clone());
return Ok(()) return Ok(());
} }
}; };
@ -392,17 +405,18 @@ impl<'cfg> Workspace<'cfg> {
let root_package = self.packages.load(&root_manifest_path)?; let root_package = self.packages.load(&root_manifest_path)?;
match *root_package.workspace_config() { match *root_package.workspace_config() {
WorkspaceConfig::Root(ref root_config) => { WorkspaceConfig::Root(ref root_config) => {
members_paths = root_config.members_paths( members_paths =
root_config.members.as_ref().unwrap_or(&vec![]) root_config.members_paths(root_config.members.as_ref().unwrap_or(&vec![]))?;
)?;
default_members_paths = if let Some(ref default) = root_config.default_members { default_members_paths = if let Some(ref default) = root_config.default_members {
Some(root_config.members_paths(default)?) Some(root_config.members_paths(default)?)
} else { } else {
None None
} }
} }
_ => bail!("root of a workspace inferred but wasn't a root: {}", _ => bail!(
root_manifest_path.display()), "root of a workspace inferred but wasn't a root: {}",
root_manifest_path.display()
),
} }
} }
@ -414,9 +428,11 @@ impl<'cfg> Workspace<'cfg> {
for path in default { for path in default {
let manifest_path = paths::normalize_path(&path.join("Cargo.toml")); let manifest_path = paths::normalize_path(&path.join("Cargo.toml"));
if !self.members.contains(&manifest_path) { if !self.members.contains(&manifest_path) {
bail!("package `{}` is listed in workspaces default-members \ bail!(
but is not a member.", "package `{}` is listed in workspaces default-members \
path.display()) but is not a member.",
path.display()
)
} }
self.default_members.push(manifest_path) self.default_members.push(manifest_path)
} }
@ -429,25 +445,29 @@ impl<'cfg> Workspace<'cfg> {
self.find_path_deps(&root_manifest_path, &root_manifest_path, false) self.find_path_deps(&root_manifest_path, &root_manifest_path, false)
} }
fn find_path_deps(&mut self, fn find_path_deps(
manifest_path: &Path, &mut self,
root_manifest: &Path, manifest_path: &Path,
is_path_dep: bool) -> CargoResult<()> { root_manifest: &Path,
is_path_dep: bool,
) -> CargoResult<()> {
let manifest_path = paths::normalize_path(manifest_path); let manifest_path = paths::normalize_path(manifest_path);
if self.members.contains(&manifest_path) { if self.members.contains(&manifest_path) {
return Ok(()) return Ok(());
} }
if is_path_dep if is_path_dep && !manifest_path.parent().unwrap().starts_with(self.root())
&& !manifest_path.parent().unwrap().starts_with(self.root()) && self.find_root(&manifest_path)? != self.root_manifest
&& self.find_root(&manifest_path)? != self.root_manifest { {
// If `manifest_path` is a path dependency outside of the workspace, // If `manifest_path` is a path dependency outside of the workspace,
// don't add it, or any of its dependencies, as a members. // don't add it, or any of its dependencies, as a members.
return Ok(()) return Ok(());
} }
if let WorkspaceConfig::Root(ref root_config) = *self.packages.load(root_manifest)?.workspace_config() { if let WorkspaceConfig::Root(ref root_config) =
*self.packages.load(root_manifest)?.workspace_config()
{
if root_config.is_excluded(&manifest_path) { if root_config.is_excluded(&manifest_path) {
return Ok(()) return Ok(());
} }
} }
@ -460,12 +480,12 @@ impl<'cfg> Workspace<'cfg> {
MaybePackage::Virtual(_) => return Ok(()), MaybePackage::Virtual(_) => return Ok(()),
}; };
pkg.dependencies() pkg.dependencies()
.iter() .iter()
.map(|d| d.source_id()) .map(|d| d.source_id())
.filter(|d| d.is_path()) .filter(|d| d.is_path())
.filter_map(|d| d.url().to_file_path().ok()) .filter_map(|d| d.url().to_file_path().ok())
.map(|p| p.join("Cargo.toml")) .map(|p| p.join("Cargo.toml"))
.collect::<Vec<_>>() .collect::<Vec<_>>()
}; };
for candidate in candidates { for candidate in candidates {
self.find_path_deps(&candidate, root_manifest, true)?; self.find_path_deps(&candidate, root_manifest, true)?;
@ -480,7 +500,7 @@ impl<'cfg> Workspace<'cfg> {
/// 3. The current crate is a member of this workspace. /// 3. The current crate is a member of this workspace.
fn validate(&mut self) -> CargoResult<()> { fn validate(&mut self) -> CargoResult<()> {
if self.root_manifest.is_none() { if self.root_manifest.is_none() {
return Ok(()) return Ok(());
} }
let mut roots = Vec::new(); let mut roots = Vec::new();
@ -499,52 +519,64 @@ impl<'cfg> Workspace<'cfg> {
MaybePackage::Virtual(_) => continue, MaybePackage::Virtual(_) => continue,
}; };
if let Some(prev) = names.insert(name, member) { if let Some(prev) = names.insert(name, member) {
bail!("two packages named `{}` in this workspace:\n\ bail!(
- {}\n\ "two packages named `{}` in this workspace:\n\
- {}", name, prev.display(), member.display()); - {}\n\
- {}",
name,
prev.display(),
member.display()
);
} }
} }
} }
match roots.len() { match roots.len() {
0 => { 0 => bail!(
bail!("`package.workspace` configuration points to a crate \ "`package.workspace` configuration points to a crate \
which is not configured with [workspace]: \n\ which is not configured with [workspace]: \n\
configuration at: {}\n\ configuration at: {}\n\
points to: {}", points to: {}",
self.current_manifest.display(), self.current_manifest.display(),
self.root_manifest.as_ref().unwrap().display()) self.root_manifest.as_ref().unwrap().display()
} ),
1 => {} 1 => {}
_ => { _ => {
bail!("multiple workspace roots found in the same workspace:\n{}", bail!(
roots.iter() "multiple workspace roots found in the same workspace:\n{}",
.map(|r| format!(" {}", r.display())) roots
.collect::<Vec<_>>() .iter()
.join("\n")); .map(|r| format!(" {}", r.display()))
.collect::<Vec<_>>()
.join("\n")
);
} }
} }
for member in self.members.clone() { for member in self.members.clone() {
let root = self.find_root(&member)?; let root = self.find_root(&member)?;
if root == self.root_manifest { if root == self.root_manifest {
continue continue;
} }
match root { match root {
Some(root) => { Some(root) => {
bail!("package `{}` is a member of the wrong workspace\n\ bail!(
expected: {}\n\ "package `{}` is a member of the wrong workspace\n\
actual: {}", expected: {}\n\
member.display(), actual: {}",
self.root_manifest.as_ref().unwrap().display(), member.display(),
root.display()); self.root_manifest.as_ref().unwrap().display(),
root.display()
);
} }
None => { None => {
bail!("workspace member `{}` is not hierarchically below \ bail!(
the workspace root `{}`", "workspace member `{}` is not hierarchically below \
member.display(), the workspace root `{}`",
self.root_manifest.as_ref().unwrap().display()); member.display(),
self.root_manifest.as_ref().unwrap().display()
);
} }
} }
} }
@ -558,18 +590,19 @@ impl<'cfg> Workspace<'cfg> {
// FIXME: Make this more generic by using a relative path resolver between member and // FIXME: Make this more generic by using a relative path resolver between member and
// root. // root.
let members_msg = match current_dir.strip_prefix(root_dir) { let members_msg = match current_dir.strip_prefix(root_dir) {
Ok(rel) => { Ok(rel) => format!(
format!("this may be fixable by adding `{}` to the \ "this may be fixable by adding `{}` to the \
`workspace.members` array of the manifest \ `workspace.members` array of the manifest \
located at: {}", located at: {}",
rel.display(), rel.display(),
root.display()) root.display()
} ),
Err(_) => { Err(_) => format!(
format!("this may be fixable by adding a member to \ "this may be fixable by adding a member to \
the `workspace.members` array of the \ the `workspace.members` array of the \
manifest located at: {}", root.display()) manifest located at: {}",
} root.display()
),
}; };
let extra = match *root_pkg { let extra = match *root_pkg {
MaybePackage::Virtual(_) => members_msg, MaybePackage::Virtual(_) => members_msg,
@ -579,20 +612,25 @@ impl<'cfg> Workspace<'cfg> {
WorkspaceConfig::Member { .. } => unreachable!(), WorkspaceConfig::Member { .. } => unreachable!(),
}; };
if !has_members_list { if !has_members_list {
format!("this may be fixable by ensuring that this \ format!(
crate is depended on by the workspace \ "this may be fixable by ensuring that this \
root: {}", root.display()) crate is depended on by the workspace \
root: {}",
root.display()
)
} else { } else {
members_msg members_msg
} }
} }
}; };
bail!("current package believes it's in a workspace when it's not:\n\ bail!(
current: {}\n\ "current package believes it's in a workspace when it's not:\n\
workspace: {}\n\n{}", current: {}\n\
self.current_manifest.display(), workspace: {}\n\n{}",
root.display(), self.current_manifest.display(),
extra); root.display(),
extra
);
} }
if let Some(ref root_manifest) = self.root_manifest { if let Some(ref root_manifest) = self.root_manifest {
@ -610,14 +648,18 @@ impl<'cfg> Workspace<'cfg> {
doctest: Profile::default_doctest(), doctest: Profile::default_doctest(),
}; };
for pkg in self.members().filter(|p| p.manifest_path() != root_manifest) { for pkg in self.members()
.filter(|p| p.manifest_path() != root_manifest)
{
if pkg.manifest().profiles() != &default_profiles { if pkg.manifest().profiles() != &default_profiles {
let message = &format!("profiles for the non root package will be ignored, \ let message = &format!(
specify profiles at the workspace root:\n\ "profiles for the non root package will be ignored, \
package: {}\n\ specify profiles at the workspace root:\n\
workspace: {}", package: {}\n\
pkg.manifest_path().display(), workspace: {}",
root_manifest.display()); pkg.manifest_path().display(),
root_manifest.display()
);
//TODO: remove `Eq` bound from `Profiles` when the warning is removed. //TODO: remove `Eq` bound from `Profiles` when the warning is removed.
self.config.shell().warn(&message)?; self.config.shell().warn(&message)?;
@ -629,7 +671,6 @@ impl<'cfg> Workspace<'cfg> {
} }
} }
impl<'cfg> Packages<'cfg> { impl<'cfg> Packages<'cfg> {
fn get(&self, manifest_path: &Path) -> &MaybePackage { fn get(&self, manifest_path: &Path) -> &MaybePackage {
&self.packages[manifest_path.parent().unwrap()] &self.packages[manifest_path.parent().unwrap()]
@ -647,9 +688,7 @@ impl<'cfg> Packages<'cfg> {
EitherManifest::Real(manifest) => { EitherManifest::Real(manifest) => {
MaybePackage::Package(Package::new(manifest, manifest_path)) MaybePackage::Package(Package::new(manifest, manifest_path))
} }
EitherManifest::Virtual(vm) => { EitherManifest::Virtual(vm) => MaybePackage::Virtual(vm),
MaybePackage::Virtual(vm)
}
})) }))
} }
} }
@ -667,9 +706,7 @@ impl<'a, 'cfg> Iterator for Members<'a, 'cfg> {
fn next(&mut self) -> Option<&'a Package> { fn next(&mut self) -> Option<&'a Package> {
loop { loop {
let next = self.iter.next().map(|path| { let next = self.iter.next().map(|path| self.ws.packages.get(path));
self.ws.packages.get(path)
});
match next { match next {
Some(&MaybePackage::Package(ref p)) => return Some(p), Some(&MaybePackage::Package(ref p)) => return Some(p),
Some(&MaybePackage::Virtual(_)) => {} Some(&MaybePackage::Virtual(_)) => {}
@ -708,16 +745,14 @@ impl WorkspaceRootConfig {
/// ///
/// This method does NOT consider the `members` list. /// This method does NOT consider the `members` list.
fn is_excluded(&self, manifest_path: &Path) -> bool { fn is_excluded(&self, manifest_path: &Path) -> bool {
let excluded = self.exclude.iter().any(|ex| { let excluded = self.exclude
manifest_path.starts_with(self.root_dir.join(ex)) .iter()
}); .any(|ex| manifest_path.starts_with(self.root_dir.join(ex)));
let explicit_member = match self.members { let explicit_member = match self.members {
Some(ref members) => { Some(ref members) => members
members.iter().any(|mem| { .iter()
manifest_path.starts_with(self.root_dir.join(mem)) .any(|mem| manifest_path.starts_with(self.root_dir.join(mem))),
})
}
None => false, None => false,
}; };
@ -752,13 +787,9 @@ impl WorkspaceRootConfig {
Some(p) => p, Some(p) => p,
None => return Ok(Vec::new()), None => return Ok(Vec::new()),
}; };
let res = glob(path).chain_err(|| { let res = glob(path).chain_err(|| format_err!("could not parse pattern `{}`", &path))?;
format_err!("could not parse pattern `{}`", &path)
})?;
let res = res.map(|p| { let res = res.map(|p| {
p.chain_err(|| { p.chain_err(|| format_err!("unable to match path to pattern `{}`", &path))
format_err!("unable to match path to pattern `{}`", &path)
})
}).collect::<Result<Vec<_>, _>>()?; }).collect::<Result<Vec<_>, _>>()?;
Ok(res) Ok(res)
} }

View file

@ -1,20 +1,18 @@
#![cfg_attr(test, deny(warnings))] #![cfg_attr(test, deny(warnings))]
// Currently, Cargo does not use clippy for its source code. // Currently, Cargo does not use clippy for its source code.
// But if someone runs it they should know that // But if someone runs it they should know that
// @alexcrichton disagree with clippy on some style things // @alexcrichton disagree with clippy on some style things
#![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))] #![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))]
#[macro_use] extern crate failure;
#[macro_use] extern crate log;
#[macro_use] extern crate scoped_tls;
#[macro_use] extern crate serde_derive;
#[macro_use] extern crate serde_json;
extern crate atty; extern crate atty;
extern crate clap; extern crate clap;
#[cfg(target_os = "macos")]
extern crate core_foundation;
extern crate crates_io as registry; extern crate crates_io as registry;
extern crate crossbeam; extern crate crossbeam;
extern crate curl; extern crate curl;
#[macro_use]
extern crate failure;
extern crate filetime; extern crate filetime;
extern crate flate2; extern crate flate2;
extern crate fs2; extern crate fs2;
@ -24,23 +22,30 @@ extern crate hex;
extern crate home; extern crate home;
extern crate ignore; extern crate ignore;
extern crate jobserver; extern crate jobserver;
#[macro_use]
extern crate lazy_static;
extern crate lazycell; extern crate lazycell;
#[macro_use] extern crate lazy_static;
extern crate libc; extern crate libc;
extern crate libgit2_sys; extern crate libgit2_sys;
#[macro_use]
extern crate log;
extern crate num_cpus; extern crate num_cpus;
extern crate same_file; extern crate same_file;
#[macro_use]
extern crate scoped_tls;
extern crate semver; extern crate semver;
extern crate serde; extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_ignored; extern crate serde_ignored;
#[macro_use]
extern crate serde_json;
extern crate shell_escape; extern crate shell_escape;
extern crate tar; extern crate tar;
extern crate tempdir; extern crate tempdir;
extern crate termcolor; extern crate termcolor;
extern crate toml; extern crate toml;
extern crate url; extern crate url;
#[cfg(target_os = "macos")]
extern crate core_foundation;
use std::fmt; use std::fmt;
@ -85,8 +90,7 @@ pub struct VersionInfo {
impl fmt::Display for VersionInfo { impl fmt::Display for VersionInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "cargo {}.{}.{}", write!(f, "cargo {}.{}.{}", self.major, self.minor, self.patch)?;
self.major, self.minor, self.patch)?;
if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) { if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) {
if channel != "stable" { if channel != "stable" {
write!(f, "-{}", channel)?; write!(f, "-{}", channel)?;
@ -97,8 +101,7 @@ impl fmt::Display for VersionInfo {
if let Some(ref cfg) = self.cfg_info { if let Some(ref cfg) = self.cfg_info {
if let Some(ref ci) = cfg.commit_info { if let Some(ref ci) = cfg.commit_info {
write!(f, " ({} {})", write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
ci.short_commit_hash, ci.commit_date)?;
} }
}; };
Ok(()) Ok(())
@ -118,7 +121,11 @@ pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! {
} }
} }
let CliError { error, exit_code, unknown } = err; let CliError {
error,
exit_code,
unknown,
} = err;
// exit_code == 0 is non-fatal error, e.g. docopt version info // exit_code == 0 is non-fatal error, e.g. docopt version info
let fatal = exit_code != 0; let fatal = exit_code != 0;
@ -134,8 +141,11 @@ pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! {
} }
if !handle_cause(&error, shell) || hide { if !handle_cause(&error, shell) || hide {
drop(writeln!(shell.err(), "\nTo learn more, run the command again \ drop(writeln!(
with --verbose.")); shell.err(),
"\nTo learn more, run the command again \
with --verbose."
));
} }
} }
@ -204,14 +214,11 @@ pub fn version() -> VersionInfo {
match option_env!("CFG_RELEASE_CHANNEL") { match option_env!("CFG_RELEASE_CHANNEL") {
// We have environment variables set up from configure/make. // We have environment variables set up from configure/make.
Some(_) => { Some(_) => {
let commit_info = let commit_info = option_env!("CFG_COMMIT_HASH").map(|s| CommitInfo {
option_env!("CFG_COMMIT_HASH").map(|s| { commit_hash: s.to_string(),
CommitInfo { short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(),
commit_hash: s.to_string(), commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(),
short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(), });
commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(),
}
});
VersionInfo { VersionInfo {
major, major,
minor, minor,
@ -222,16 +229,14 @@ pub fn version() -> VersionInfo {
commit_info, commit_info,
}), }),
} }
},
// We are being compiled by Cargo itself.
None => {
VersionInfo {
major,
minor,
patch,
pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"),
cfg_info: None,
}
} }
// We are being compiled by Cargo itself.
None => VersionInfo {
major,
minor,
patch,
pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"),
cfg_info: None,
},
} }
} }

View file

@ -6,7 +6,7 @@ use core::{Profiles, Workspace};
use util::Config; use util::Config;
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
use util::paths; use util::paths;
use ops::{self, Context, BuildConfig, Kind, Unit}; use ops::{self, BuildConfig, Context, Kind, Unit};
pub struct CleanOptions<'a> { pub struct CleanOptions<'a> {
pub config: &'a Config, pub config: &'a Config,
@ -34,15 +34,20 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
let profiles = ws.profiles(); let profiles = ws.profiles();
let host_triple = opts.config.rustc()?.host.clone(); let host_triple = opts.config.rustc()?.host.clone();
let mut cx = Context::new(ws, &resolve, &packages, opts.config, let mut cx = Context::new(
BuildConfig { ws,
host_triple, &resolve,
requested_target: opts.target.clone(), &packages,
release: opts.release, opts.config,
jobs: 1, BuildConfig {
..BuildConfig::default() host_triple,
}, requested_target: opts.target.clone(),
profiles)?; release: opts.release,
jobs: 1,
..BuildConfig::default()
},
profiles,
)?;
let mut units = Vec::new(); let mut units = Vec::new();
for spec in opts.spec.iter() { for spec in opts.spec.iter() {
@ -54,12 +59,31 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
for target in pkg.targets() { for target in pkg.targets() {
for kind in [Kind::Host, Kind::Target].iter() { for kind in [Kind::Host, Kind::Target].iter() {
let Profiles { let Profiles {
ref release, ref dev, ref test, ref bench, ref doc, ref release,
ref custom_build, ref test_deps, ref bench_deps, ref check, ref dev,
ref check_test, ref doctest, ref test,
ref bench,
ref doc,
ref custom_build,
ref test_deps,
ref bench_deps,
ref check,
ref check_test,
ref doctest,
} = *profiles; } = *profiles;
let profiles = [release, dev, test, bench, doc, custom_build, let profiles = [
test_deps, bench_deps, check, check_test, doctest]; release,
dev,
test,
bench,
doc,
custom_build,
test_deps,
bench_deps,
check,
check_test,
doctest,
];
for profile in profiles.iter() { for profile in profiles.iter() {
units.push(Unit { units.push(Unit {
pkg, pkg,
@ -82,7 +106,7 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
} else { } else {
rm_rf(&cx.build_script_dir(unit), config)?; rm_rf(&cx.build_script_dir(unit), config)?;
} }
continue continue;
} }
for &(ref src, ref link_dst, _) in cx.target_filenames(unit)?.iter() { for &(ref src, ref link_dst, _) in cx.target_filenames(unit)?.iter() {
@ -99,15 +123,15 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> { fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> {
let m = fs::metadata(path); let m = fs::metadata(path);
if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) { if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) {
config.shell().verbose(|shell| {shell.status("Removing", path.display())})?; config
paths::remove_dir_all(path).chain_err(|| { .shell()
format_err!("could not remove build directory") .verbose(|shell| shell.status("Removing", path.display()))?;
})?; paths::remove_dir_all(path).chain_err(|| format_err!("could not remove build directory"))?;
} else if m.is_ok() { } else if m.is_ok() {
config.shell().verbose(|shell| {shell.status("Removing", path.display())})?; config
paths::remove_file(path).chain_err(|| { .shell()
format_err!("failed to remove build artifact") .verbose(|shell| shell.status("Removing", path.display()))?;
})?; paths::remove_file(path).chain_err(|| format_err!("failed to remove build artifact"))?;
} }
Ok(()) Ok(())
} }

View file

@ -27,12 +27,12 @@ use std::default::Default;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use core::{Source, Package, Target}; use core::{Package, Source, Target};
use core::{Profile, TargetKind, Profiles, Workspace, PackageId, PackageIdSpec}; use core::{PackageId, PackageIdSpec, Profile, Profiles, TargetKind, Workspace};
use core::resolver::{Resolve, Method}; use core::resolver::{Method, Resolve};
use ops::{self, BuildOutput, Executor, DefaultExecutor}; use ops::{self, BuildOutput, DefaultExecutor, Executor};
use util::config::Config; use util::config::Config;
use util::{CargoResult, profile}; use util::{profile, CargoResult};
/// Contains information about how a package should be compiled. /// Contains information about how a package should be compiled.
#[derive(Debug)] #[derive(Debug)]
@ -67,8 +67,7 @@ pub struct CompileOptions<'a> {
} }
impl<'a> CompileOptions<'a> { impl<'a> CompileOptions<'a> {
pub fn default(config: &'a Config, mode: CompileMode) -> CompileOptions<'a> pub fn default(config: &'a Config, mode: CompileMode) -> CompileOptions<'a> {
{
CompileOptions { CompileOptions {
config, config,
jobs: None, jobs: None,
@ -79,7 +78,9 @@ impl<'a> CompileOptions<'a> {
spec: ops::Packages::Packages(Vec::new()), spec: ops::Packages::Packages(Vec::new()),
mode, mode,
release: false, release: false,
filter: CompileFilter::Default { required_features_filterable: false }, filter: CompileFilter::Default {
required_features_filterable: false,
},
message_format: MessageFormat::Human, message_format: MessageFormat::Human,
target_rustdoc_args: None, target_rustdoc_args: None,
target_rustc_args: None, target_rustc_args: None,
@ -100,7 +101,7 @@ pub enum CompileMode {
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MessageFormat { pub enum MessageFormat {
Human, Human,
Json Json,
} }
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
@ -112,9 +113,7 @@ pub enum Packages {
} }
impl Packages { impl Packages {
pub fn from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) pub fn from_flags(all: bool, exclude: Vec<String>, package: Vec<String>) -> CargoResult<Self> {
-> CargoResult<Self>
{
Ok(match (all, exclude.len(), package.len()) { Ok(match (all, exclude.len(), package.len()) {
(false, 0, 0) => Packages::Default, (false, 0, 0) => Packages::Default,
(false, 0, _) => Packages::Packages(package), (false, 0, _) => Packages::Packages(package),
@ -126,39 +125,36 @@ impl Packages {
pub fn into_package_id_specs(&self, ws: &Workspace) -> CargoResult<Vec<PackageIdSpec>> { pub fn into_package_id_specs(&self, ws: &Workspace) -> CargoResult<Vec<PackageIdSpec>> {
let specs = match *self { let specs = match *self {
Packages::All => { Packages::All => ws.members()
ws.members() .map(Package::package_id)
.map(Package::package_id) .map(PackageIdSpec::from_package_id)
.map(PackageIdSpec::from_package_id) .collect(),
.collect() Packages::OptOut(ref opt_out) => ws.members()
} .map(Package::package_id)
Packages::OptOut(ref opt_out) => { .map(PackageIdSpec::from_package_id)
ws.members() .filter(|p| opt_out.iter().position(|x| *x == p.name()).is_none())
.map(Package::package_id) .collect(),
.map(PackageIdSpec::from_package_id) Packages::Packages(ref packages) if packages.is_empty() => ws.current_opt()
.filter(|p| opt_out.iter().position(|x| *x == p.name()).is_none()) .map(Package::package_id)
.collect() .map(PackageIdSpec::from_package_id)
} .into_iter()
Packages::Packages(ref packages) if packages.is_empty() => { .collect(),
ws.current_opt() Packages::Packages(ref packages) => packages
.map(Package::package_id) .iter()
.map(PackageIdSpec::from_package_id) .map(|p| PackageIdSpec::parse(p))
.into_iter().collect() .collect::<CargoResult<Vec<_>>>()?,
} Packages::Default => ws.default_members()
Packages::Packages(ref packages) => { .map(Package::package_id)
packages.iter().map(|p| PackageIdSpec::parse(p)).collect::<CargoResult<Vec<_>>>()? .map(PackageIdSpec::from_package_id)
} .collect(),
Packages::Default => {
ws.default_members()
.map(Package::package_id)
.map(PackageIdSpec::from_package_id)
.collect()
}
}; };
if specs.is_empty() { if specs.is_empty() {
if ws.is_virtual() { if ws.is_virtual() {
bail!("manifest path `{}` contains no package: The manifest is virtual, \ bail!(
and the workspace has no members.", ws.root().display()) "manifest path `{}` contains no package: The manifest is virtual, \
and the workspace has no members.",
ws.root().display()
)
} }
bail!("no packages to compile") bail!("no packages to compile")
} }
@ -185,25 +181,30 @@ pub enum CompileFilter {
examples: FilterRule, examples: FilterRule,
tests: FilterRule, tests: FilterRule,
benches: FilterRule, benches: FilterRule,
} },
} }
pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions<'a>) pub fn compile<'a>(
-> CargoResult<ops::Compilation<'a>> { ws: &Workspace<'a>,
options: &CompileOptions<'a>,
) -> CargoResult<ops::Compilation<'a>> {
compile_with_exec(ws, options, Arc::new(DefaultExecutor)) compile_with_exec(ws, options, Arc::new(DefaultExecutor))
} }
pub fn compile_with_exec<'a>(ws: &Workspace<'a>, pub fn compile_with_exec<'a>(
options: &CompileOptions<'a>, ws: &Workspace<'a>,
exec: Arc<Executor>) options: &CompileOptions<'a>,
-> CargoResult<ops::Compilation<'a>> { exec: Arc<Executor>,
) -> CargoResult<ops::Compilation<'a>> {
for member in ws.members() { for member in ws.members() {
for warning in member.manifest().warnings().iter() { for warning in member.manifest().warnings().iter() {
if warning.is_critical { if warning.is_critical {
let err = format_err!("{}", warning.message); let err = format_err!("{}", warning.message);
let cx = format_err!("failed to parse manifest at `{}`", let cx = format_err!(
member.manifest_path().display()); "failed to parse manifest at `{}`",
return Err(err.context(cx).into()) member.manifest_path().display()
);
return Err(err.context(cx).into());
} else { } else {
options.config.shell().warn(&warning.message)? options.config.shell().warn(&warning.message)?
} }
@ -212,17 +213,27 @@ pub fn compile_with_exec<'a>(ws: &Workspace<'a>,
compile_ws(ws, None, options, exec) compile_ws(ws, None, options, exec)
} }
pub fn compile_ws<'a>(ws: &Workspace<'a>, pub fn compile_ws<'a>(
source: Option<Box<Source + 'a>>, ws: &Workspace<'a>,
options: &CompileOptions<'a>, source: Option<Box<Source + 'a>>,
exec: Arc<Executor>) options: &CompileOptions<'a>,
-> CargoResult<ops::Compilation<'a>> { exec: Arc<Executor>,
let CompileOptions { config, jobs, ref target, ref spec, ref features, ) -> CargoResult<ops::Compilation<'a>> {
all_features, no_default_features, let CompileOptions {
release, mode, message_format, config,
ref filter, jobs,
ref target_rustdoc_args, ref target,
ref target_rustc_args } = *options; ref spec,
ref features,
all_features,
no_default_features,
release,
mode,
message_format,
ref filter,
ref target_rustdoc_args,
ref target_rustc_args,
} = *options;
let target = target.clone(); let target = target.clone();
@ -240,69 +251,68 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>,
all_features, all_features,
uses_default_features: !no_default_features, uses_default_features: !no_default_features,
}; };
let resolve = ops::resolve_ws_with_method(ws, let resolve = ops::resolve_ws_with_method(ws, source, method, &specs)?;
source,
method,
&specs,
)?;
let (packages, resolve_with_overrides) = resolve; let (packages, resolve_with_overrides) = resolve;
let to_builds = specs.iter().map(|p| { let to_builds = specs
let pkgid = p.query(resolve_with_overrides.iter())?; .iter()
let p = packages.get(pkgid)?; .map(|p| {
p.manifest().print_teapot(ws.config()); let pkgid = p.query(resolve_with_overrides.iter())?;
Ok(p) let p = packages.get(pkgid)?;
}).collect::<CargoResult<Vec<_>>>()?; p.manifest().print_teapot(ws.config());
Ok(p)
})
.collect::<CargoResult<Vec<_>>>()?;
let mut general_targets = Vec::new(); let mut general_targets = Vec::new();
let mut package_targets = Vec::new(); let mut package_targets = Vec::new();
match (target_rustc_args, target_rustdoc_args) { match (target_rustc_args, target_rustdoc_args) {
(&Some(..), _) | (&Some(..), _) | (_, &Some(..)) if to_builds.len() != 1 => {
(_, &Some(..)) if to_builds.len() != 1 => {
panic!("`rustc` and `rustdoc` should not accept multiple `-p` flags") panic!("`rustc` and `rustdoc` should not accept multiple `-p` flags")
} }
(&Some(ref args), _) => { (&Some(ref args), _) => {
let all_features = resolve_all_features(&resolve_with_overrides, let all_features =
to_builds[0].package_id()); resolve_all_features(&resolve_with_overrides, to_builds[0].package_id());
let targets = generate_targets(to_builds[0], profiles, let targets =
mode, filter, &all_features, release)?; generate_targets(to_builds[0], profiles, mode, filter, &all_features, release)?;
if targets.len() == 1 { if targets.len() == 1 {
let (target, profile) = targets[0]; let (target, profile) = targets[0];
let mut profile = profile.clone(); let mut profile = profile.clone();
profile.rustc_args = Some(args.to_vec()); profile.rustc_args = Some(args.to_vec());
general_targets.push((target, profile)); general_targets.push((target, profile));
} else { } else {
bail!("extra arguments to `rustc` can only be passed to one \ bail!(
target, consider filtering\nthe package by passing \ "extra arguments to `rustc` can only be passed to one \
e.g. `--lib` or `--bin NAME` to specify a single target") target, consider filtering\nthe package by passing \
e.g. `--lib` or `--bin NAME` to specify a single target"
)
} }
} }
(&None, &Some(ref args)) => { (&None, &Some(ref args)) => {
let all_features = resolve_all_features(&resolve_with_overrides, let all_features =
to_builds[0].package_id()); resolve_all_features(&resolve_with_overrides, to_builds[0].package_id());
let targets = generate_targets(to_builds[0], profiles, let targets =
mode, filter, &all_features, release)?; generate_targets(to_builds[0], profiles, mode, filter, &all_features, release)?;
if targets.len() == 1 { if targets.len() == 1 {
let (target, profile) = targets[0]; let (target, profile) = targets[0];
let mut profile = profile.clone(); let mut profile = profile.clone();
profile.rustdoc_args = Some(args.to_vec()); profile.rustdoc_args = Some(args.to_vec());
general_targets.push((target, profile)); general_targets.push((target, profile));
} else { } else {
bail!("extra arguments to `rustdoc` can only be passed to one \ bail!(
target, consider filtering\nthe package by passing e.g. \ "extra arguments to `rustdoc` can only be passed to one \
`--lib` or `--bin NAME` to specify a single target") target, consider filtering\nthe package by passing e.g. \
} `--lib` or `--bin NAME` to specify a single target"
} )
(&None, &None) => {
for &to_build in to_builds.iter() {
let all_features = resolve_all_features(&resolve_with_overrides,
to_build.package_id());
let targets = generate_targets(to_build, profiles, mode,
filter, &all_features, release)?;
package_targets.push((to_build, targets));
} }
} }
(&None, &None) => for &to_build in to_builds.iter() {
let all_features = resolve_all_features(&resolve_with_overrides, to_build.package_id());
let targets =
generate_targets(to_build, profiles, mode, filter, &all_features, release)?;
package_targets.push((to_build, targets));
},
}; };
for &(target, ref profile) in &general_targets { for &(target, ref profile) in &general_targets {
@ -321,23 +331,26 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>,
build_config.doc_all = deps; build_config.doc_all = deps;
} }
ops::compile_targets(ws, ops::compile_targets(
&package_targets, ws,
&packages, &package_targets,
&resolve_with_overrides, &packages,
config, &resolve_with_overrides,
build_config, config,
profiles, build_config,
&exec)? profiles,
&exec,
)?
}; };
ret.to_doc_test = to_builds.into_iter().cloned().collect(); ret.to_doc_test = to_builds.into_iter().cloned().collect();
return Ok(ret); return Ok(ret);
fn resolve_all_features(resolve_with_overrides: &Resolve, fn resolve_all_features(
package_id: &PackageId) resolve_with_overrides: &Resolve,
-> HashSet<String> { package_id: &PackageId,
) -> HashSet<String> {
let mut features = resolve_with_overrides.features(package_id).clone(); let mut features = resolve_with_overrides.features(package_id).clone();
// Include features enabled for use by dependencies so targets can also use them with the // Include features enabled for use by dependencies so targets can also use them with the
@ -365,9 +378,7 @@ impl FilterRule {
fn matches(&self, target: &Target) -> bool { fn matches(&self, target: &Target) -> bool {
match *self { match *self {
FilterRule::All => true, FilterRule::All => true,
FilterRule::Just(ref targets) => { FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()),
targets.iter().any(|x| *x == target.name())
},
} }
} }
@ -387,12 +398,18 @@ impl FilterRule {
} }
impl CompileFilter { impl CompileFilter {
pub fn new(lib_only: bool, pub fn new(
bins: Vec<String>, all_bins: bool, lib_only: bool,
tsts: Vec<String>, all_tsts: bool, bins: Vec<String>,
exms: Vec<String>, all_exms: bool, all_bins: bool,
bens: Vec<String>, all_bens: bool, tsts: Vec<String>,
all_targets: bool) -> CompileFilter { all_tsts: bool,
exms: Vec<String>,
all_exms: bool,
bens: Vec<String>,
all_bens: bool,
all_targets: bool,
) -> CompileFilter {
let rule_bins = FilterRule::new(bins, all_bins); let rule_bins = FilterRule::new(bins, all_bins);
let rule_tsts = FilterRule::new(tsts, all_tsts); let rule_tsts = FilterRule::new(tsts, all_tsts);
let rule_exms = FilterRule::new(exms, all_exms); let rule_exms = FilterRule::new(exms, all_exms);
@ -401,16 +418,21 @@ impl CompileFilter {
if all_targets { if all_targets {
CompileFilter::Only { CompileFilter::Only {
all_targets: true, all_targets: true,
lib: true, bins: FilterRule::All, lib: true,
examples: FilterRule::All, benches: FilterRule::All, bins: FilterRule::All,
examples: FilterRule::All,
benches: FilterRule::All,
tests: FilterRule::All, tests: FilterRule::All,
} }
} else if lib_only || rule_bins.is_specific() || rule_tsts.is_specific() } else if lib_only || rule_bins.is_specific() || rule_tsts.is_specific()
|| rule_exms.is_specific() || rule_bens.is_specific() { || rule_exms.is_specific() || rule_bens.is_specific()
{
CompileFilter::Only { CompileFilter::Only {
all_targets: false, all_targets: false,
lib: lib_only, bins: rule_bins, lib: lib_only,
examples: rule_exms, benches: rule_bens, bins: rule_bins,
examples: rule_exms,
benches: rule_bens,
tests: rule_tsts, tests: rule_tsts,
} }
} else { } else {
@ -423,21 +445,31 @@ impl CompileFilter {
pub fn need_dev_deps(&self) -> bool { pub fn need_dev_deps(&self) -> bool {
match *self { match *self {
CompileFilter::Default { .. } => true, CompileFilter::Default { .. } => true,
CompileFilter::Only { ref examples, ref tests, ref benches, .. } => CompileFilter::Only {
examples.is_specific() || tests.is_specific() || benches.is_specific() ref examples,
ref tests,
ref benches,
..
} => examples.is_specific() || tests.is_specific() || benches.is_specific(),
} }
} }
pub fn matches(&self, target: &Target) -> bool { pub fn matches(&self, target: &Target) -> bool {
match *self { match *self {
CompileFilter::Default { .. } => true, CompileFilter::Default { .. } => true,
CompileFilter::Only { lib, ref bins, ref examples, ref tests, ref benches, .. } => { CompileFilter::Only {
lib,
ref bins,
ref examples,
ref tests,
ref benches,
..
} => {
let rule = match *target.kind() { let rule = match *target.kind() {
TargetKind::Bin => bins, TargetKind::Bin => bins,
TargetKind::Test => tests, TargetKind::Test => tests,
TargetKind::Bench => benches, TargetKind::Bench => benches,
TargetKind::ExampleBin | TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples,
TargetKind::ExampleLib(..) => examples,
TargetKind::Lib(..) => return lib, TargetKind::Lib(..) => return lib,
TargetKind::CustomBuild => return false, TargetKind::CustomBuild => return false,
}; };
@ -461,30 +493,33 @@ struct BuildProposal<'a> {
required: bool, required: bool,
} }
fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target], fn generate_auto_targets<'a>(
profile: &'a Profile, mode: CompileMode,
dep: &'a Profile, targets: &'a [Target],
required_features_filterable: bool) -> Vec<BuildProposal<'a>> { profile: &'a Profile,
dep: &'a Profile,
required_features_filterable: bool,
) -> Vec<BuildProposal<'a>> {
match mode { match mode {
CompileMode::Bench => { CompileMode::Bench => targets
targets.iter().filter(|t| t.benched()).map(|t| { .iter()
BuildProposal { .filter(|t| t.benched())
target: t, .map(|t| BuildProposal {
profile, target: t,
required: !required_features_filterable, profile,
} required: !required_features_filterable,
}).collect::<Vec<_>>() })
} .collect::<Vec<_>>(),
CompileMode::Test => { CompileMode::Test => {
let mut base = targets.iter().filter(|t| { let mut base = targets
t.tested() .iter()
}).map(|t| { .filter(|t| t.tested())
BuildProposal { .map(|t| BuildProposal {
target: t, target: t,
profile: if t.is_example() {dep} else {profile}, profile: if t.is_example() { dep } else { profile },
required: !required_features_filterable, required: !required_features_filterable,
} })
}).collect::<Vec<_>>(); .collect::<Vec<_>>();
// Always compile the library if we're testing everything as // Always compile the library if we're testing everything as
// it'll be needed for doctests // it'll be needed for doctests
@ -499,35 +534,37 @@ fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target],
} }
base base
} }
CompileMode::Build | CompileMode::Check{..} => { CompileMode::Build | CompileMode::Check { .. } => targets
targets.iter().filter(|t| { .iter()
t.is_bin() || t.is_lib() .filter(|t| t.is_bin() || t.is_lib())
}).map(|t| BuildProposal { .map(|t| BuildProposal {
target: t, target: t,
profile, profile,
required: !required_features_filterable, required: !required_features_filterable,
}).collect() })
} .collect(),
CompileMode::Doc { .. } => { CompileMode::Doc { .. } => targets
targets.iter().filter(|t| { .iter()
t.documented() && ( .filter(|t| {
!t.is_bin() || t.documented()
!targets.iter().any(|l| l.is_lib() && l.name() == t.name()) && (!t.is_bin() || !targets.iter().any(|l| l.is_lib() && l.name() == t.name()))
) })
}).map(|t| BuildProposal { .map(|t| BuildProposal {
target: t, target: t,
profile, profile,
required: !required_features_filterable, required: !required_features_filterable,
}).collect() })
} .collect(),
CompileMode::Doctest => { CompileMode::Doctest => {
if let Some(t) = targets.iter().find(|t| t.is_lib()) { if let Some(t) = targets.iter().find(|t| t.is_lib()) {
if t.doctested() { if t.doctested() {
return vec![BuildProposal { return vec![
target: t, BuildProposal {
profile, target: t,
required: !required_features_filterable, profile,
}]; required: !required_features_filterable,
},
];
} }
} }
@ -537,28 +574,31 @@ fn generate_auto_targets<'a>(mode: CompileMode, targets: &'a [Target],
} }
/// Given a filter rule and some context, propose a list of targets /// Given a filter rule and some context, propose a list of targets
fn propose_indicated_targets<'a>(pkg: &'a Package, fn propose_indicated_targets<'a>(
rule: &FilterRule, pkg: &'a Package,
desc: &'static str, rule: &FilterRule,
is_expected_kind: fn(&Target) -> bool, desc: &'static str,
profile: &'a Profile) -> CargoResult<Vec<BuildProposal<'a>>> { is_expected_kind: fn(&Target) -> bool,
profile: &'a Profile,
) -> CargoResult<Vec<BuildProposal<'a>>> {
match *rule { match *rule {
FilterRule::All => { FilterRule::All => {
let result = pkg.targets().iter().filter(|t| is_expected_kind(t)).map(|t| { let result = pkg.targets()
BuildProposal { .iter()
.filter(|t| is_expected_kind(t))
.map(|t| BuildProposal {
target: t, target: t,
profile, profile,
required: false, required: false,
} });
});
Ok(result.collect()) Ok(result.collect())
} }
FilterRule::Just(ref names) => { FilterRule::Just(ref names) => {
let mut targets = Vec::new(); let mut targets = Vec::new();
for name in names { for name in names {
let target = pkg.targets().iter().find(|t| { let target = pkg.targets()
t.name() == *name && is_expected_kind(t) .iter()
}); .find(|t| t.name() == *name && is_expected_kind(t));
let t = match target { let t = match target {
Some(t) => t, Some(t) => t,
None => { None => {
@ -566,8 +606,12 @@ fn propose_indicated_targets<'a>(pkg: &'a Package,
match suggestion { match suggestion {
Some(s) => { Some(s) => {
let suggested_name = s.name(); let suggested_name = s.name();
bail!("no {} target named `{}`\n\nDid you mean `{}`?", bail!(
desc, name, suggested_name) "no {} target named `{}`\n\nDid you mean `{}`?",
desc,
name,
suggested_name
)
} }
None => bail!("no {} target named `{}`", desc, name), None => bail!("no {} target named `{}`", desc, name),
} }
@ -586,9 +630,10 @@ fn propose_indicated_targets<'a>(pkg: &'a Package,
} }
/// Collect the targets that are libraries or have all required features available. /// Collect the targets that are libraries or have all required features available.
fn filter_compatible_targets<'a>(mut proposals: Vec<BuildProposal<'a>>, fn filter_compatible_targets<'a>(
features: &HashSet<String>) mut proposals: Vec<BuildProposal<'a>>,
-> CargoResult<Vec<(&'a Target, &'a Profile)>> { features: &HashSet<String>,
) -> CargoResult<Vec<(&'a Target, &'a Profile)>> {
let mut compatible = Vec::with_capacity(proposals.len()); let mut compatible = Vec::with_capacity(proposals.len());
for proposal in proposals.drain(..) { for proposal in proposals.drain(..) {
let unavailable_features = match proposal.target.required_features() { let unavailable_features = match proposal.target.required_features() {
@ -599,14 +644,17 @@ fn filter_compatible_targets<'a>(mut proposals: Vec<BuildProposal<'a>>,
compatible.push((proposal.target, proposal.profile)); compatible.push((proposal.target, proposal.profile));
} else if proposal.required { } else if proposal.required {
let required_features = proposal.target.required_features().unwrap(); let required_features = proposal.target.required_features().unwrap();
let quoted_required_features: Vec<String> = required_features.iter() let quoted_required_features: Vec<String> = required_features
.map(|s| format!("`{}`",s)) .iter()
.collect(); .map(|s| format!("`{}`", s))
bail!("target `{}` requires the features: {}\n\ .collect();
Consider enabling them by passing e.g. `--features=\"{}\"`", bail!(
proposal.target.name(), "target `{}` requires the features: {}\n\
quoted_required_features.join(", "), Consider enabling them by passing e.g. `--features=\"{}\"`",
required_features.join(" ")); proposal.target.name(),
quoted_required_features.join(", "),
required_features.join(" ")
);
} }
} }
Ok(compatible) Ok(compatible)
@ -614,21 +662,30 @@ fn filter_compatible_targets<'a>(mut proposals: Vec<BuildProposal<'a>>,
/// Given the configuration for a build, this function will generate all /// Given the configuration for a build, this function will generate all
/// target/profile combinations needed to be built. /// target/profile combinations needed to be built.
fn generate_targets<'a>(pkg: &'a Package, fn generate_targets<'a>(
profiles: &'a Profiles, pkg: &'a Package,
mode: CompileMode, profiles: &'a Profiles,
filter: &CompileFilter, mode: CompileMode,
features: &HashSet<String>, filter: &CompileFilter,
release: bool) features: &HashSet<String>,
-> CargoResult<Vec<(&'a Target, &'a Profile)>> { release: bool,
let build = if release {&profiles.release} else {&profiles.dev}; ) -> CargoResult<Vec<(&'a Target, &'a Profile)>> {
let test = if release {&profiles.bench} else {&profiles.test}; let build = if release {
&profiles.release
} else {
&profiles.dev
};
let test = if release {
&profiles.bench
} else {
&profiles.test
};
let profile = match mode { let profile = match mode {
CompileMode::Test => test, CompileMode::Test => test,
CompileMode::Bench => &profiles.bench, CompileMode::Bench => &profiles.bench,
CompileMode::Build => build, CompileMode::Build => build,
CompileMode::Check {test: false} => &profiles.check, CompileMode::Check { test: false } => &profiles.check,
CompileMode::Check {test: true} => &profiles.check_test, CompileMode::Check { test: true } => &profiles.check_test,
CompileMode::Doc { .. } => &profiles.doc, CompileMode::Doc { .. } => &profiles.doc,
CompileMode::Doctest => &profiles.doctest, CompileMode::Doctest => &profiles.doctest,
}; };
@ -650,15 +707,30 @@ fn generate_targets<'a>(pkg: &'a Package,
}; };
let targets = match *filter { let targets = match *filter {
CompileFilter::Default { required_features_filterable } => { CompileFilter::Default {
required_features_filterable,
} => {
let deps = if release { let deps = if release {
&profiles.bench_deps &profiles.bench_deps
} else { } else {
&profiles.test_deps &profiles.test_deps
}; };
generate_auto_targets(mode, pkg.targets(), profile, deps, required_features_filterable) generate_auto_targets(
mode,
pkg.targets(),
profile,
deps,
required_features_filterable,
)
} }
CompileFilter::Only { all_targets, lib, ref bins, ref examples, ref tests, ref benches } => { CompileFilter::Only {
all_targets,
lib,
ref bins,
ref examples,
ref tests,
ref benches,
} => {
let mut targets = Vec::new(); let mut targets = Vec::new();
if lib { if lib {
@ -673,25 +745,45 @@ fn generate_targets<'a>(pkg: &'a Package,
} }
} }
targets.append(&mut propose_indicated_targets( targets.append(&mut propose_indicated_targets(
pkg, bins, "bin", Target::is_bin, profile)?); pkg,
bins,
"bin",
Target::is_bin,
profile,
)?);
targets.append(&mut propose_indicated_targets( targets.append(&mut propose_indicated_targets(
pkg, examples, "example", Target::is_example, profile)?); pkg,
examples,
"example",
Target::is_example,
profile,
)?);
// If --tests was specified, add all targets that would be // If --tests was specified, add all targets that would be
// generated by `cargo test`. // generated by `cargo test`.
let test_filter = match *tests { let test_filter = match *tests {
FilterRule::All => Target::tested, FilterRule::All => Target::tested,
FilterRule::Just(_) => Target::is_test FilterRule::Just(_) => Target::is_test,
}; };
targets.append(&mut propose_indicated_targets( targets.append(&mut propose_indicated_targets(
pkg, tests, "test", test_filter, test_profile)?); pkg,
tests,
"test",
test_filter,
test_profile,
)?);
// If --benches was specified, add all targets that would be // If --benches was specified, add all targets that would be
// generated by `cargo bench`. // generated by `cargo bench`.
let bench_filter = match *benches { let bench_filter = match *benches {
FilterRule::All => Target::benched, FilterRule::All => Target::benched,
FilterRule::Just(_) => Target::is_bench FilterRule::Just(_) => Target::is_bench,
}; };
targets.append(&mut propose_indicated_targets( targets.append(&mut propose_indicated_targets(
pkg, benches, "bench", bench_filter, bench_profile)?); pkg,
benches,
"bench",
bench_filter,
bench_profile,
)?);
targets targets
} }
}; };
@ -707,23 +799,32 @@ fn generate_targets<'a>(pkg: &'a Package,
/// * target.$target.ar /// * target.$target.ar
/// * target.$target.linker /// * target.$target.linker
/// * target.$target.libfoo.metadata /// * target.$target.libfoo.metadata
fn scrape_build_config(config: &Config, fn scrape_build_config(
jobs: Option<u32>, config: &Config,
target: Option<String>) jobs: Option<u32>,
-> CargoResult<ops::BuildConfig> { target: Option<String>,
) -> CargoResult<ops::BuildConfig> {
if jobs.is_some() && config.jobserver_from_env().is_some() { if jobs.is_some() && config.jobserver_from_env().is_some() {
config.shell().warn("a `-j` argument was passed to Cargo but Cargo is \ config.shell().warn(
also configured with an external jobserver in \ "a `-j` argument was passed to Cargo but Cargo is \
its environment, ignoring the `-j` parameter")?; also configured with an external jobserver in \
its environment, ignoring the `-j` parameter",
)?;
} }
let cfg_jobs = match config.get_i64("build.jobs")? { let cfg_jobs = match config.get_i64("build.jobs")? {
Some(v) => { Some(v) => {
if v.val <= 0 { if v.val <= 0 {
bail!("build.jobs must be positive, but found {} in {}", bail!(
v.val, v.definition) "build.jobs must be positive, but found {} in {}",
v.val,
v.definition
)
} else if v.val >= i64::from(u32::max_value()) { } else if v.val >= i64::from(u32::max_value()) {
bail!("build.jobs is too large: found {} in {}", v.val, bail!(
v.definition) "build.jobs is too large: found {} in {}",
v.val,
v.definition
)
} else { } else {
Some(v.val as u32) Some(v.val as u32)
} }
@ -747,9 +848,7 @@ fn scrape_build_config(config: &Config,
Ok(base) Ok(base)
} }
fn scrape_target_config(config: &Config, triple: &str) fn scrape_target_config(config: &Config, triple: &str) -> CargoResult<ops::TargetConfig> {
-> CargoResult<ops::TargetConfig> {
let key = format!("target.{}", triple); let key = format!("target.{}", triple);
let mut ret = ops::TargetConfig { let mut ret = ops::TargetConfig {
ar: config.get_path(&format!("{}.ar", key))?.map(|v| v.val), ar: config.get_path(&format!("{}.ar", key))?.map(|v| v.val),
@ -762,9 +861,7 @@ fn scrape_target_config(config: &Config, triple: &str)
}; };
for (lib_name, value) in table { for (lib_name, value) in table {
match lib_name.as_str() { match lib_name.as_str() {
"ar" | "linker" | "runner" | "rustflags" => { "ar" | "linker" | "runner" | "rustflags" => continue,
continue
},
_ => {} _ => {}
} }
@ -781,46 +878,40 @@ fn scrape_target_config(config: &Config, triple: &str)
// We require deterministic order of evaluation, so we must sort the pairs by key first. // We require deterministic order of evaluation, so we must sort the pairs by key first.
let mut pairs = Vec::new(); let mut pairs = Vec::new();
for (k, value) in value.table(&lib_name)?.0 { for (k, value) in value.table(&lib_name)?.0 {
pairs.push((k,value)); pairs.push((k, value));
} }
pairs.sort_by_key( |p| p.0 ); pairs.sort_by_key(|p| p.0);
for (k,value) in pairs{ for (k, value) in pairs {
let key = format!("{}.{}", key, k); let key = format!("{}.{}", key, k);
match &k[..] { match &k[..] {
"rustc-flags" => { "rustc-flags" => {
let (flags, definition) = value.string(k)?; let (flags, definition) = value.string(k)?;
let whence = format!("in `{}` (in {})", key, let whence = format!("in `{}` (in {})", key, definition.display());
definition.display()); let (paths, links) = BuildOutput::parse_rustc_flags(flags, &whence)?;
let (paths, links) =
BuildOutput::parse_rustc_flags(flags, &whence)
?;
output.library_paths.extend(paths); output.library_paths.extend(paths);
output.library_links.extend(links); output.library_links.extend(links);
} }
"rustc-link-lib" => { "rustc-link-lib" => {
let list = value.list(k)?; let list = value.list(k)?;
output.library_links.extend(list.iter() output
.map(|v| v.0.clone())); .library_links
.extend(list.iter().map(|v| v.0.clone()));
} }
"rustc-link-search" => { "rustc-link-search" => {
let list = value.list(k)?; let list = value.list(k)?;
output.library_paths.extend(list.iter().map(|v| { output
PathBuf::from(&v.0) .library_paths
})); .extend(list.iter().map(|v| PathBuf::from(&v.0)));
} }
"rustc-cfg" => { "rustc-cfg" => {
let list = value.list(k)?; let list = value.list(k)?;
output.cfgs.extend(list.iter().map(|v| v.0.clone())); output.cfgs.extend(list.iter().map(|v| v.0.clone()));
} }
"rustc-env" => { "rustc-env" => for (name, val) in value.table(k)?.0 {
for (name, val) in value.table(k)?.0 { let val = val.string(name)?.0;
let val = val.string(name)?.0; output.env.push((name.clone(), val.to_string()));
output.env.push((name.clone(), val.to_string())); },
} "warning" | "rerun-if-changed" | "rerun-if-env-changed" => {
}
"warning" |
"rerun-if-changed" |
"rerun-if-env-changed" => {
bail!("`{}` is not supported in build script overrides", k); bail!("`{}` is not supported in build script overrides", k);
} }
_ => { _ => {

View file

@ -14,18 +14,23 @@ pub struct DocOptions<'a> {
pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> { pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
let specs = options.compile_opts.spec.into_package_id_specs(ws)?; let specs = options.compile_opts.spec.into_package_id_specs(ws)?;
let resolve = ops::resolve_ws_precisely(ws, let resolve = ops::resolve_ws_precisely(
None, ws,
&options.compile_opts.features, None,
options.compile_opts.all_features, &options.compile_opts.features,
options.compile_opts.no_default_features, options.compile_opts.all_features,
&specs)?; options.compile_opts.no_default_features,
&specs,
)?;
let (packages, resolve_with_overrides) = resolve; let (packages, resolve_with_overrides) = resolve;
let pkgs = specs.iter().map(|p| { let pkgs = specs
let pkgid = p.query(resolve_with_overrides.iter())?; .iter()
packages.get(pkgid) .map(|p| {
}).collect::<CargoResult<Vec<_>>>()?; let pkgid = p.query(resolve_with_overrides.iter())?;
packages.get(pkgid)
})
.collect::<CargoResult<Vec<_>>>()?;
let mut lib_names = HashMap::new(); let mut lib_names = HashMap::new();
let mut bin_names = HashMap::new(); let mut bin_names = HashMap::new();
@ -33,16 +38,24 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
for target in package.targets().iter().filter(|t| t.documented()) { for target in package.targets().iter().filter(|t| t.documented()) {
if target.is_lib() { if target.is_lib() {
if let Some(prev) = lib_names.insert(target.crate_name(), package) { if let Some(prev) = lib_names.insert(target.crate_name(), package) {
bail!("The library `{}` is specified by packages `{}` and \ bail!(
`{}` but can only be documented once. Consider renaming \ "The library `{}` is specified by packages `{}` and \
or marking one of the targets as `doc = false`.", `{}` but can only be documented once. Consider renaming \
target.crate_name(), prev, package); or marking one of the targets as `doc = false`.",
target.crate_name(),
prev,
package
);
} }
} else if let Some(prev) = bin_names.insert(target.crate_name(), package) { } else if let Some(prev) = bin_names.insert(target.crate_name(), package) {
bail!("The binary `{}` is specified by packages `{}` and \ bail!(
`{}` but can be documented only once. Consider renaming \ "The binary `{}` is specified by packages `{}` and \
or marking one of the targets as `doc = false`.", `{}` but can be documented only once. Consider renaming \
target.crate_name(), prev, package); or marking one of the targets as `doc = false`.",
target.crate_name(),
prev,
package
);
} }
} }
} }
@ -51,10 +64,15 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
if options.open_result { if options.open_result {
let name = if pkgs.len() > 1 { let name = if pkgs.len() > 1 {
bail!("Passing multiple packages and `open` is not supported.\n\ bail!(
Please re-run this command with `-p <spec>` where `<spec>` \ "Passing multiple packages and `open` is not supported.\n\
is one of the following:\n {}", Please re-run this command with `-p <spec>` where `<spec>` \
pkgs.iter().map(|p| p.name().to_inner()).collect::<Vec<_>>().join("\n ")); is one of the following:\n {}",
pkgs.iter()
.map(|p| p.name().to_inner())
.collect::<Vec<_>>()
.join("\n ")
);
} else if pkgs.len() == 1 { } else if pkgs.len() == 1 {
pkgs[0].name().replace("-", "_") pkgs[0].name().replace("-", "_")
} else { } else {
@ -79,8 +97,7 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
match open_docs(&path) { match open_docs(&path) {
Ok(m) => shell.status("Launching", m)?, Ok(m) => shell.status("Launching", m)?,
Err(e) => { Err(e) => {
shell.warn( shell.warn("warning: could not determine a browser to open docs with, tried:")?;
"warning: could not determine a browser to open docs with, tried:")?;
for method in e { for method in e {
shell.warn(format!("\t{}", method))?; shell.warn(format!("\t{}", method))?;
} }

View file

@ -1,4 +1,4 @@
use core::{Resolve, PackageSet, Workspace}; use core::{PackageSet, Resolve, Workspace};
use ops; use ops;
use util::CargoResult; use util::CargoResult;

View file

@ -19,21 +19,21 @@ pub struct UpdateOptions<'a> {
pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> { pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> {
let mut registry = PackageRegistry::new(ws.config())?; let mut registry = PackageRegistry::new(ws.config())?;
let resolve = ops::resolve_with_previous(&mut registry, let resolve = ops::resolve_with_previous(
ws, &mut registry,
Method::Everything, ws,
None, Method::Everything,
None, None,
&[], None,
true, &[],
true)?; true,
true,
)?;
ops::write_pkg_lockfile(ws, &resolve)?; ops::write_pkg_lockfile(ws, &resolve)?;
Ok(()) Ok(())
} }
pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()> {
-> CargoResult<()> {
if opts.aggressive && opts.precise.is_some() { if opts.aggressive && opts.precise.is_some() {
bail!("cannot specify both aggressive and precise simultaneously") bail!("cannot specify both aggressive and precise simultaneously")
} }
@ -60,8 +60,7 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
for name in opts.to_update.iter() { for name in opts.to_update.iter() {
let dep = previous_resolve.query(name)?; let dep = previous_resolve.query(name)?;
if opts.aggressive { if opts.aggressive {
fill_with_deps(&previous_resolve, dep, &mut to_avoid, fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new());
&mut HashSet::new());
} else { } else {
to_avoid.insert(dep); to_avoid.insert(dep);
sources.push(match opts.precise { sources.push(match opts.precise {
@ -76,23 +75,23 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
}; };
dep.source_id().clone().with_precise(Some(precise)) dep.source_id().clone().with_precise(Some(precise))
} }
None => { None => dep.source_id().clone().with_precise(None),
dep.source_id().clone().with_precise(None)
}
}); });
} }
} }
registry.add_sources(&sources)?; registry.add_sources(&sources)?;
} }
let resolve = ops::resolve_with_previous(&mut registry, let resolve = ops::resolve_with_previous(
ws, &mut registry,
Method::Everything, ws,
Some(&previous_resolve), Method::Everything,
Some(&to_avoid), Some(&previous_resolve),
&[], Some(&to_avoid),
true, &[],
true)?; true,
true,
)?;
// Summarize what is changing for the user. // Summarize what is changing for the user.
let print_change = |status: &str, msg: String, color: Color| { let print_change = |status: &str, msg: String, color: Color| {
@ -101,8 +100,11 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) { for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) {
if removed.len() == 1 && added.len() == 1 { if removed.len() == 1 && added.len() == 1 {
let msg = if removed[0].source_id().is_git() { let msg = if removed[0].source_id().is_git() {
format!("{} -> #{}", removed[0], format!(
&added[0].source_id().precise().unwrap()[..8]) "{} -> #{}",
removed[0],
&added[0].source_id().precise().unwrap()[..8]
)
} else { } else {
format!("{} -> v{}", removed[0], added[0].version()) format!("{} -> v{}", removed[0], added[0].version())
}; };
@ -120,11 +122,14 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
ops::write_pkg_lockfile(ws, &resolve)?; ops::write_pkg_lockfile(ws, &resolve)?;
return Ok(()); return Ok(());
fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId, fn fill_with_deps<'a>(
set: &mut HashSet<&'a PackageId>, resolve: &'a Resolve,
visited: &mut HashSet<&'a PackageId>) { dep: &'a PackageId,
set: &mut HashSet<&'a PackageId>,
visited: &mut HashSet<&'a PackageId>,
) {
if !visited.insert(dep) { if !visited.insert(dep) {
return return;
} }
set.insert(dep); set.insert(dep);
for dep in resolve.deps(dep) { for dep in resolve.deps(dep) {
@ -132,9 +137,10 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
} }
} }
fn compare_dependency_graphs<'a>(previous_resolve: &'a Resolve, fn compare_dependency_graphs<'a>(
resolve: &'a Resolve) -> previous_resolve: &'a Resolve,
Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> { resolve: &'a Resolve,
) -> Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> {
fn key(dep: &PackageId) -> (&str, &SourceId) { fn key(dep: &PackageId) -> (&str, &SourceId) {
(dep.name().to_inner(), dep.source_id()) (dep.name().to_inner(), dep.source_id())
} }
@ -143,41 +149,52 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
// more complicated because the equality for source ids does not take // more complicated because the equality for source ids does not take
// precise versions into account (e.g. git shas), but we want to take // precise versions into account (e.g. git shas), but we want to take
// that into account here. // that into account here.
fn vec_subtract<'a>(a: &[&'a PackageId], fn vec_subtract<'a>(a: &[&'a PackageId], b: &[&'a PackageId]) -> Vec<&'a PackageId> {
b: &[&'a PackageId]) -> Vec<&'a PackageId> { a.iter()
a.iter().filter(|a| { .filter(|a| {
// If this package id is not found in `b`, then it's definitely // If this package id is not found in `b`, then it's definitely
// in the subtracted set // in the subtracted set
let i = match b.binary_search(a) { let i = match b.binary_search(a) {
Ok(i) => i, Ok(i) => i,
Err(..) => return true, Err(..) => return true,
}; };
// If we've found `a` in `b`, then we iterate over all instances // If we've found `a` in `b`, then we iterate over all instances
// (we know `b` is sorted) and see if they all have different // (we know `b` is sorted) and see if they all have different
// precise versions. If so, then `a` isn't actually in `b` so // precise versions. If so, then `a` isn't actually in `b` so
// we'll let it through. // we'll let it through.
// //
// Note that we only check this for non-registry sources, // Note that we only check this for non-registry sources,
// however, as registries contain enough version information in // however, as registries contain enough version information in
// the package id to disambiguate // the package id to disambiguate
if a.source_id().is_registry() { if a.source_id().is_registry() {
return false return false;
} }
b[i..].iter().take_while(|b| a == b).all(|b| { b[i..]
a.source_id().precise() != b.source_id().precise() .iter()
.take_while(|b| a == b)
.all(|b| a.source_id().precise() != b.source_id().precise())
}) })
}).cloned().collect() .cloned()
.collect()
} }
// Map (package name, package source) to (removed versions, added versions). // Map (package name, package source) to (removed versions, added versions).
let mut changes = BTreeMap::new(); let mut changes = BTreeMap::new();
let empty = (Vec::new(), Vec::new()); let empty = (Vec::new(), Vec::new());
for dep in previous_resolve.iter() { for dep in previous_resolve.iter() {
changes.entry(key(dep)).or_insert_with(||empty.clone()).0.push(dep); changes
.entry(key(dep))
.or_insert_with(|| empty.clone())
.0
.push(dep);
} }
for dep in resolve.iter() { for dep in resolve.iter() {
changes.entry(key(dep)).or_insert_with(||empty.clone()).1.push(dep); changes
.entry(key(dep))
.or_insert_with(|| empty.clone())
.1
.push(dep);
} }
for v in changes.values_mut() { for v in changes.values_mut() {

View file

@ -10,12 +10,12 @@ use semver::{Version, VersionReq};
use tempdir::TempDir; use tempdir::TempDir;
use toml; use toml;
use core::{SourceId, Source, Package, Dependency, PackageIdSpec}; use core::{Dependency, Package, PackageIdSpec, Source, SourceId};
use core::{PackageId, Workspace}; use core::{PackageId, Workspace};
use ops::{self, CompileFilter, DefaultExecutor}; use ops::{self, CompileFilter, DefaultExecutor};
use sources::{GitSource, PathSource, SourceConfigMap}; use sources::{GitSource, PathSource, SourceConfigMap};
use util::{Config, internal}; use util::{internal, Config};
use util::{Filesystem, FileLock}; use util::{FileLock, Filesystem};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
use util::paths; use util::paths;
@ -53,18 +53,28 @@ impl Drop for Transaction {
} }
} }
pub fn install(root: Option<&str>, pub fn install(
krates: Vec<&str>, root: Option<&str>,
source_id: &SourceId, krates: Vec<&str>,
vers: Option<&str>, source_id: &SourceId,
opts: &ops::CompileOptions, vers: Option<&str>,
force: bool) -> CargoResult<()> { opts: &ops::CompileOptions,
force: bool,
) -> CargoResult<()> {
let root = resolve_root(root, opts.config)?; let root = resolve_root(root, opts.config)?;
let map = SourceConfigMap::new(opts.config)?; let map = SourceConfigMap::new(opts.config)?;
let (installed_anything, scheduled_error) = if krates.len() <= 1 { let (installed_anything, scheduled_error) = if krates.len() <= 1 {
install_one(&root, &map, krates.into_iter().next(), source_id, vers, opts, install_one(
force, true)?; &root,
&map,
krates.into_iter().next(),
source_id,
vers,
opts,
force,
true,
)?;
(true, false) (true, false)
} else { } else {
let mut succeeded = vec![]; let mut succeeded = vec![];
@ -73,8 +83,16 @@ pub fn install(root: Option<&str>,
for krate in krates { for krate in krates {
let root = root.clone(); let root = root.clone();
let map = map.clone(); let map = map.clone();
match install_one(&root, &map, Some(krate), source_id, vers, match install_one(
opts, force, first) { &root,
&map,
Some(krate),
source_id,
vers,
opts,
force,
first,
) {
Ok(()) => succeeded.push(krate), Ok(()) => succeeded.push(krate),
Err(e) => { Err(e) => {
::handle_error(e, &mut opts.config.shell()); ::handle_error(e, &mut opts.config.shell());
@ -89,7 +107,10 @@ pub fn install(root: Option<&str>,
summary.push(format!("Successfully installed {}!", succeeded.join(", "))); summary.push(format!("Successfully installed {}!", succeeded.join(", ")));
} }
if !failed.is_empty() { if !failed.is_empty() {
summary.push(format!("Failed to install {} (see error(s) above).", failed.join(", "))); summary.push(format!(
"Failed to install {} (see error(s) above).",
failed.join(", ")
));
} }
if !succeeded.is_empty() || !failed.is_empty() { if !succeeded.is_empty() || !failed.is_empty() {
opts.config.shell().status("Summary", summary.join(" "))?; opts.config.shell().status("Summary", summary.join(" "))?;
@ -105,13 +126,15 @@ pub fn install(root: Option<&str>,
let path = env::var_os("PATH").unwrap_or_default(); let path = env::var_os("PATH").unwrap_or_default();
for path in env::split_paths(&path) { for path in env::split_paths(&path) {
if path == dst { if path == dst {
return Ok(()) return Ok(());
} }
} }
opts.config.shell().warn(&format!("be sure to add `{}` to your PATH to be \ opts.config.shell().warn(&format!(
able to run the installed binaries", "be sure to add `{}` to your PATH to be \
dst.display()))?; able to run the installed binaries",
dst.display()
))?;
} }
if scheduled_error { if scheduled_error {
@ -121,42 +144,64 @@ pub fn install(root: Option<&str>,
Ok(()) Ok(())
} }
fn install_one(root: &Filesystem, fn install_one(
map: &SourceConfigMap, root: &Filesystem,
krate: Option<&str>, map: &SourceConfigMap,
source_id: &SourceId, krate: Option<&str>,
vers: Option<&str>, source_id: &SourceId,
opts: &ops::CompileOptions, vers: Option<&str>,
force: bool, opts: &ops::CompileOptions,
is_first_install: bool) -> CargoResult<()> { force: bool,
is_first_install: bool,
) -> CargoResult<()> {
let config = opts.config; let config = opts.config;
let (pkg, source) = if source_id.is_git() { let (pkg, source) = if source_id.is_git() {
select_pkg(GitSource::new(source_id, config)?, select_pkg(
krate, vers, config, is_first_install, GitSource::new(source_id, config)?,
&mut |git| git.read_packages())? krate,
vers,
config,
is_first_install,
&mut |git| git.read_packages(),
)?
} else if source_id.is_path() { } else if source_id.is_path() {
let path = source_id.url().to_file_path().map_err(|()| { let path = source_id
format_err!("path sources must have a valid path") .url()
})?; .to_file_path()
.map_err(|()| format_err!("path sources must have a valid path"))?;
let mut src = PathSource::new(&path, source_id, config); let mut src = PathSource::new(&path, source_id, config);
src.update().chain_err(|| { src.update().chain_err(|| {
format_err!("`{}` is not a crate root; specify a crate to \ format_err!(
install from crates.io, or use --path or --git to \ "`{}` is not a crate root; specify a crate to \
specify an alternate source", path.display()) install from crates.io, or use --path or --git to \
specify an alternate source",
path.display()
)
})?; })?;
select_pkg(PathSource::new(&path, source_id, config), select_pkg(
krate, vers, config, is_first_install, PathSource::new(&path, source_id, config),
&mut |path| path.read_packages())? krate,
vers,
config,
is_first_install,
&mut |path| path.read_packages(),
)?
} else { } else {
select_pkg(map.load(source_id)?, select_pkg(
krate, vers, config, is_first_install, map.load(source_id)?,
&mut |_| { krate,
bail!("must specify a crate to install from \ vers,
crates.io, or use --path or --git to \ config,
specify alternate source") is_first_install,
})? &mut |_| {
bail!(
"must specify a crate to install from \
crates.io, or use --path or --git to \
specify alternate source"
)
},
)?
}; };
let mut td_opt = None; let mut td_opt = None;
@ -196,36 +241,43 @@ fn install_one(root: &Filesystem,
check_overwrites(&dst, pkg, &opts.filter, &list, force)?; check_overwrites(&dst, pkg, &opts.filter, &list, force)?;
} }
let compile = ops::compile_ws(&ws, let compile =
Some(source), ops::compile_ws(&ws, Some(source), opts, Arc::new(DefaultExecutor)).chain_err(|| {
opts, if let Some(td) = td_opt.take() {
Arc::new(DefaultExecutor)).chain_err(|| { // preserve the temporary directory, so the user can inspect it
if let Some(td) = td_opt.take() { td.into_path();
// preserve the temporary directory, so the user can inspect it }
td.into_path();
}
format_err!("failed to compile `{}`, intermediate artifacts can be \ format_err!(
found at `{}`", pkg, ws.target_dir().display()) "failed to compile `{}`, intermediate artifacts can be \
})?; found at `{}`",
let binaries: Vec<(&str, &Path)> = compile.binaries.iter().map(|bin| { pkg,
let name = bin.file_name().unwrap(); ws.target_dir().display()
if let Some(s) = name.to_str() { )
Ok((s, bin.as_ref())) })?;
} else { let binaries: Vec<(&str, &Path)> = compile
bail!("Binary `{:?}` name can't be serialized into string", name) .binaries
} .iter()
}).collect::<CargoResult<_>>()?; .map(|bin| {
let name = bin.file_name().unwrap();
if let Some(s) = name.to_str() {
Ok((s, bin.as_ref()))
} else {
bail!("Binary `{:?}` name can't be serialized into string", name)
}
})
.collect::<CargoResult<_>>()?;
if binaries.is_empty() { if binaries.is_empty() {
bail!("no binaries are available for install using the selected \ bail!(
features"); "no binaries are available for install using the selected \
features"
);
} }
let metadata = metadata(config, root)?; let metadata = metadata(config, root)?;
let mut list = read_crate_list(&metadata)?; let mut list = read_crate_list(&metadata)?;
let dst = metadata.parent().join("bin"); let dst = metadata.parent().join("bin");
let duplicates = check_overwrites(&dst, pkg, &opts.filter, let duplicates = check_overwrites(&dst, pkg, &opts.filter, &list, force)?;
&list, force)?;
fs::create_dir_all(&dst)?; fs::create_dir_all(&dst)?;
@ -237,17 +289,17 @@ fn install_one(root: &Filesystem,
let dst = staging_dir.path().join(bin); let dst = staging_dir.path().join(bin);
// Try to move if `target_dir` is transient. // Try to move if `target_dir` is transient.
if !source_id.is_path() && fs::rename(src, &dst).is_ok() { if !source_id.is_path() && fs::rename(src, &dst).is_ok() {
continue continue;
} }
fs::copy(src, &dst).chain_err(|| { fs::copy(src, &dst).chain_err(|| {
format_err!("failed to copy `{}` to `{}`", src.display(), format_err!("failed to copy `{}` to `{}`", src.display(), dst.display())
dst.display())
})?; })?;
} }
let (to_replace, to_install): (Vec<&str>, Vec<&str>) = let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries
binaries.iter().map(|&(bin, _)| bin) .iter()
.partition(|&bin| duplicates.contains_key(bin)); .map(|&(bin, _)| bin)
.partition(|&bin| duplicates.contains_key(bin));
let mut installed = Transaction { bins: Vec::new() }; let mut installed = Transaction { bins: Vec::new() };
@ -257,8 +309,7 @@ fn install_one(root: &Filesystem,
let dst = dst.join(bin); let dst = dst.join(bin);
config.shell().status("Installing", dst.display())?; config.shell().status("Installing", dst.display())?;
fs::rename(&src, &dst).chain_err(|| { fs::rename(&src, &dst).chain_err(|| {
format_err!("failed to move `{}` to `{}`", src.display(), format_err!("failed to move `{}` to `{}`", src.display(), dst.display())
dst.display())
})?; })?;
installed.bins.push(dst); installed.bins.push(dst);
} }
@ -273,8 +324,7 @@ fn install_one(root: &Filesystem,
let dst = dst.join(bin); let dst = dst.join(bin);
config.shell().status("Replacing", dst.display())?; config.shell().status("Replacing", dst.display())?;
fs::rename(&src, &dst).chain_err(|| { fs::rename(&src, &dst).chain_err(|| {
format_err!("failed to move `{}` to `{}`", src.display(), format_err!("failed to move `{}` to `{}`", src.display(), dst.display())
dst.display())
})?; })?;
replaced_names.push(bin); replaced_names.push(bin);
} }
@ -290,24 +340,33 @@ fn install_one(root: &Filesystem,
set.remove(bin); set.remove(bin);
} }
} }
list.v1.entry(pkg.package_id().clone()) list.v1
.or_insert_with(BTreeSet::new) .entry(pkg.package_id().clone())
.insert(bin.to_string()); .or_insert_with(BTreeSet::new)
.insert(bin.to_string());
} }
// Remove empty metadata lines. // Remove empty metadata lines.
let pkgs = list.v1.iter() let pkgs = list.v1
.filter_map(|(p, set)| if set.is_empty() { Some(p.clone()) } else { None }) .iter()
.collect::<Vec<_>>(); .filter_map(|(p, set)| {
if set.is_empty() {
Some(p.clone())
} else {
None
}
})
.collect::<Vec<_>>();
for p in pkgs.iter() { for p in pkgs.iter() {
list.v1.remove(p); list.v1.remove(p);
} }
// If installation was successful record newly installed binaries. // If installation was successful record newly installed binaries.
if result.is_ok() { if result.is_ok() {
list.v1.entry(pkg.package_id().clone()) list.v1
.or_insert_with(BTreeSet::new) .entry(pkg.package_id().clone())
.extend(to_install.iter().map(|s| s.to_string())); .or_insert_with(BTreeSet::new)
.extend(to_install.iter().map(|s| s.to_string()));
} }
let write_result = write_crate_list(&metadata, list); let write_result = write_crate_list(&metadata, list);
@ -330,14 +389,16 @@ fn install_one(root: &Filesystem,
Ok(()) Ok(())
} }
fn select_pkg<'a, T>(mut source: T, fn select_pkg<'a, T>(
name: Option<&str>, mut source: T,
vers: Option<&str>, name: Option<&str>,
config: &Config, vers: Option<&str>,
needs_update: bool, config: &Config,
list_all: &mut FnMut(&mut T) -> CargoResult<Vec<Package>>) needs_update: bool,
-> CargoResult<(Package, Box<Source + 'a>)> list_all: &mut FnMut(&mut T) -> CargoResult<Vec<Package>>,
where T: Source + 'a ) -> CargoResult<(Package, Box<Source + 'a>)>
where
T: Source + 'a,
{ {
if needs_update { if needs_update {
source.update()?; source.update()?;
@ -347,47 +408,52 @@ fn select_pkg<'a, T>(mut source: T,
Some(name) => { Some(name) => {
let vers = match vers { let vers = match vers {
Some(v) => { Some(v) => {
// If the version begins with character <, >, =, ^, ~ parse it as a // If the version begins with character <, >, =, ^, ~ parse it as a
// version range, otherwise parse it as a specific version // version range, otherwise parse it as a specific version
let first = v.chars() let first = v.chars()
.nth(0) .nth(0)
.ok_or_else(||format_err!("no version provided for the `--vers` flag"))?; .ok_or_else(|| format_err!("no version provided for the `--vers` flag"))?;
match first { match first {
'<' | '>' | '=' | '^' | '~' => match v.parse::<VersionReq>() { '<' | '>' | '=' | '^' | '~' => match v.parse::<VersionReq>() {
Ok(v) => Some(v.to_string()), Ok(v) => Some(v.to_string()),
Err(_) => { Err(_) => bail!(
bail!("the `--vers` provided, `{}`, is \ "the `--vers` provided, `{}`, is \
not a valid semver version requirement\n\n not a valid semver version requirement\n\n
Please have a look at \ Please have a look at \
http://doc.crates.io/specifying-dependencies.html \ http://doc.crates.io/specifying-dependencies.html \
for the correct format", v) for the correct format",
} v
),
}, },
_ => match v.parse::<Version>() { _ => match v.parse::<Version>() {
Ok(v) => Some(format!("={}", v)), Ok(v) => Some(format!("={}", v)),
Err(_) => { Err(_) => {
let mut msg = format!("\ let mut msg = format!(
the `--vers` provided, `{}`, is \ "\
not a valid semver version\n\n\ the `--vers` provided, `{}`, is \
historically Cargo treated this \ not a valid semver version\n\n\
as a semver version requirement \ historically Cargo treated this \
accidentally\nand will continue \ as a semver version requirement \
to do so, but this behavior \ accidentally\nand will continue \
will be removed eventually", v to do so, but this behavior \
will be removed eventually",
v
); );
// If it is not a valid version but it is a valid version // If it is not a valid version but it is a valid version
// requirement, add a note to the warning // requirement, add a note to the warning
if v.parse::<VersionReq>().is_ok() { if v.parse::<VersionReq>().is_ok() {
msg.push_str(&format!("\nif you want to specify semver range, \ msg.push_str(&format!(
add an explicit qualifier, like ^{}", v)); "\nif you want to specify semver range, \
add an explicit qualifier, like ^{}",
v
));
} }
config.shell().warn(&msg)?; config.shell().warn(&msg)?;
Some(v.to_string()) Some(v.to_string())
} }
} },
} }
} }
None => None, None => None,
@ -402,45 +468,55 @@ fn select_pkg<'a, T>(mut source: T,
} }
None => { None => {
let vers_info = vers.map(|v| format!(" with version `{}`", v)) let vers_info = vers.map(|v| format!(" with version `{}`", v))
.unwrap_or_default(); .unwrap_or_default();
Err(format_err!("could not find `{}` in {}{}", name, Err(format_err!(
source.source_id(), vers_info)) "could not find `{}` in {}{}",
name,
source.source_id(),
vers_info
))
} }
} }
} }
None => { None => {
let candidates = list_all(&mut source)?; let candidates = list_all(&mut source)?;
let binaries = candidates.iter().filter(|cand| { let binaries = candidates
cand.targets().iter().filter(|t| t.is_bin()).count() > 0 .iter()
}); .filter(|cand| cand.targets().iter().filter(|t| t.is_bin()).count() > 0);
let examples = candidates.iter().filter(|cand| { let examples = candidates
cand.targets().iter().filter(|t| t.is_example()).count() > 0 .iter()
}); .filter(|cand| cand.targets().iter().filter(|t| t.is_example()).count() > 0);
let pkg = match one(binaries, |v| multi_err("binaries", v))? { let pkg = match one(binaries, |v| multi_err("binaries", v))? {
Some(p) => p, Some(p) => p,
None => { None => match one(examples, |v| multi_err("examples", v))? {
match one(examples, |v| multi_err("examples", v))? { Some(p) => p,
Some(p) => p, None => bail!(
None => bail!("no packages found with binaries or \ "no packages found with binaries or \
examples"), examples"
} ),
} },
}; };
return Ok((pkg.clone(), Box::new(source))); return Ok((pkg.clone(), Box::new(source)));
fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String { fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String {
pkgs.sort_by(|a, b| a.name().cmp(&b.name())); pkgs.sort_by(|a, b| a.name().cmp(&b.name()));
format!("multiple packages with {} found: {}", kind, format!(
pkgs.iter().map(|p| p.name().to_inner()).collect::<Vec<_>>() "multiple packages with {} found: {}",
.join(", ")) kind,
pkgs.iter()
.map(|p| p.name().to_inner())
.collect::<Vec<_>>()
.join(", ")
)
} }
} }
} }
} }
fn one<I, F>(mut i: I, f: F) -> CargoResult<Option<I::Item>> fn one<I, F>(mut i: I, f: F) -> CargoResult<Option<I::Item>>
where I: Iterator, where
F: FnOnce(Vec<I::Item>) -> String I: Iterator,
F: FnOnce(Vec<I::Item>) -> String,
{ {
match (i.next(), i.next()) { match (i.next(), i.next()) {
(Some(i1), Some(i2)) => { (Some(i1), Some(i2)) => {
@ -449,15 +525,17 @@ fn one<I, F>(mut i: I, f: F) -> CargoResult<Option<I::Item>>
Err(format_err!("{}", f(v))) Err(format_err!("{}", f(v)))
} }
(Some(i), None) => Ok(Some(i)), (Some(i), None) => Ok(Some(i)),
(None, _) => Ok(None) (None, _) => Ok(None),
} }
} }
fn check_overwrites(dst: &Path, fn check_overwrites(
pkg: &Package, dst: &Path,
filter: &ops::CompileFilter, pkg: &Package,
prev: &CrateListingV1, filter: &ops::CompileFilter,
force: bool) -> CargoResult<BTreeMap<String, Option<PackageId>>> { prev: &CrateListingV1,
force: bool,
) -> CargoResult<BTreeMap<String, Option<PackageId>>> {
// If explicit --bin or --example flags were passed then those'll // If explicit --bin or --example flags were passed then those'll
// get checked during cargo_compile, we only care about the "build // get checked during cargo_compile, we only care about the "build
// everything" case here // everything" case here
@ -466,7 +544,7 @@ fn check_overwrites(dst: &Path,
} }
let duplicates = find_duplicates(dst, pkg, filter, prev); let duplicates = find_duplicates(dst, pkg, filter, prev);
if force || duplicates.is_empty() { if force || duplicates.is_empty() {
return Ok(duplicates) return Ok(duplicates);
} }
// Format the error message. // Format the error message.
let mut msg = String::new(); let mut msg = String::new();
@ -482,10 +560,12 @@ fn check_overwrites(dst: &Path,
Err(format_err!("{}", msg)) Err(format_err!("{}", msg))
} }
fn find_duplicates(dst: &Path, fn find_duplicates(
pkg: &Package, dst: &Path,
filter: &ops::CompileFilter, pkg: &Package,
prev: &CrateListingV1) -> BTreeMap<String, Option<PackageId>> { filter: &ops::CompileFilter,
prev: &CrateListingV1,
) -> BTreeMap<String, Option<PackageId>> {
let check = |name: String| { let check = |name: String| {
// Need to provide type, works around Rust Issue #93349 // Need to provide type, works around Rust Issue #93349
let name = format!("{}{}", name, env::consts::EXE_SUFFIX); let name = format!("{}{}", name, env::consts::EXE_SUFFIX);
@ -498,27 +578,36 @@ fn find_duplicates(dst: &Path,
} }
}; };
match *filter { match *filter {
CompileFilter::Default { .. } => { CompileFilter::Default { .. } => pkg.targets()
pkg.targets().iter() .iter()
.filter(|t| t.is_bin()) .filter(|t| t.is_bin())
.filter_map(|t| check(t.name().to_string())) .filter_map(|t| check(t.name().to_string()))
.collect() .collect(),
} CompileFilter::Only {
CompileFilter::Only { ref bins, ref examples, .. } => { ref bins,
ref examples,
..
} => {
let all_bins: Vec<String> = bins.try_collect().unwrap_or_else(|| { let all_bins: Vec<String> = bins.try_collect().unwrap_or_else(|| {
pkg.targets().iter().filter(|t| t.is_bin()) pkg.targets()
.map(|t| t.name().to_string()) .iter()
.collect() .filter(|t| t.is_bin())
.map(|t| t.name().to_string())
.collect()
}); });
let all_examples: Vec<String> = examples.try_collect().unwrap_or_else(|| { let all_examples: Vec<String> = examples.try_collect().unwrap_or_else(|| {
pkg.targets().iter().filter(|t| t.is_bin_example()) pkg.targets()
.map(|t| t.name().to_string()) .iter()
.collect() .filter(|t| t.is_bin_example())
.map(|t| t.name().to_string())
.collect()
}); });
all_bins.iter().chain(all_examples.iter()) all_bins
.filter_map(|t| check(t.clone())) .iter()
.collect::<BTreeMap<String, Option<PackageId>>>() .chain(all_examples.iter())
.filter_map(|t| check(t.clone()))
.collect::<BTreeMap<String, Option<PackageId>>>()
} }
} }
} }
@ -527,18 +616,20 @@ fn read_crate_list(file: &FileLock) -> CargoResult<CrateListingV1> {
let listing = (|| -> CargoResult<_> { let listing = (|| -> CargoResult<_> {
let mut contents = String::new(); let mut contents = String::new();
file.file().read_to_string(&mut contents)?; file.file().read_to_string(&mut contents)?;
let listing = toml::from_str(&contents).chain_err(|| { let listing =
internal("invalid TOML found for metadata") toml::from_str(&contents).chain_err(|| internal("invalid TOML found for metadata"))?;
})?;
match listing { match listing {
CrateListing::V1(v1) => Ok(v1), CrateListing::V1(v1) => Ok(v1),
CrateListing::Empty(_) => { CrateListing::Empty(_) => Ok(CrateListingV1 {
Ok(CrateListingV1 { v1: BTreeMap::new() }) v1: BTreeMap::new(),
} }),
} }
})().chain_err(|| { })()
format_err!("failed to parse crate metadata at `{}`", .chain_err(|| {
file.path().to_string_lossy()) format_err!(
"failed to parse crate metadata at `{}`",
file.path().to_string_lossy()
)
})?; })?;
Ok(listing) Ok(listing)
} }
@ -551,9 +642,12 @@ fn write_crate_list(file: &FileLock, listing: CrateListingV1) -> CargoResult<()>
let data = toml::to_string(&CrateListing::V1(listing))?; let data = toml::to_string(&CrateListing::V1(listing))?;
file.write_all(data.as_bytes())?; file.write_all(data.as_bytes())?;
Ok(()) Ok(())
})().chain_err(|| { })()
format_err!("failed to write crate metadata at `{}`", .chain_err(|| {
file.path().to_string_lossy()) format_err!(
"failed to write crate metadata at `{}`",
file.path().to_string_lossy()
)
})?; })?;
Ok(()) Ok(())
} }
@ -571,10 +665,12 @@ pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> {
Ok(()) Ok(())
} }
pub fn uninstall(root: Option<&str>, pub fn uninstall(
specs: Vec<&str>, root: Option<&str>,
bins: &[String], specs: Vec<&str>,
config: &Config) -> CargoResult<()> { bins: &[String],
config: &Config,
) -> CargoResult<()> {
if specs.len() > 1 && !bins.is_empty() { if specs.len() > 1 && !bins.is_empty() {
bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant."); bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.");
} }
@ -599,10 +695,16 @@ pub fn uninstall(root: Option<&str>,
let mut summary = vec![]; let mut summary = vec![];
if !succeeded.is_empty() { if !succeeded.is_empty() {
summary.push(format!("Successfully uninstalled {}!", succeeded.join(", "))); summary.push(format!(
"Successfully uninstalled {}!",
succeeded.join(", ")
));
} }
if !failed.is_empty() { if !failed.is_empty() {
summary.push(format!("Failed to uninstall {} (see error(s) above).", failed.join(", "))); summary.push(format!(
"Failed to uninstall {} (see error(s) above).",
failed.join(", ")
));
} }
if !succeeded.is_empty() || !failed.is_empty() { if !succeeded.is_empty() || !failed.is_empty() {
@ -619,16 +721,17 @@ pub fn uninstall(root: Option<&str>,
Ok(()) Ok(())
} }
pub fn uninstall_one(root: &Filesystem, pub fn uninstall_one(
spec: &str, root: &Filesystem,
bins: &[String], spec: &str,
config: &Config) -> CargoResult<()> { bins: &[String],
config: &Config,
) -> CargoResult<()> {
let crate_metadata = metadata(config, root)?; let crate_metadata = metadata(config, root)?;
let mut metadata = read_crate_list(&crate_metadata)?; let mut metadata = read_crate_list(&crate_metadata)?;
let mut to_remove = Vec::new(); let mut to_remove = Vec::new();
{ {
let result = PackageIdSpec::query_str(spec, metadata.v1.keys())? let result = PackageIdSpec::query_str(spec, metadata.v1.keys())?.clone();
.clone();
let mut installed = match metadata.v1.entry(result.clone()) { let mut installed = match metadata.v1.entry(result.clone()) {
Entry::Occupied(e) => e, Entry::Occupied(e) => e,
Entry::Vacant(..) => panic!("entry not found: {}", result), Entry::Vacant(..) => panic!("entry not found: {}", result),
@ -637,18 +740,22 @@ pub fn uninstall_one(root: &Filesystem,
for bin in installed.get() { for bin in installed.get() {
let bin = dst.join(bin); let bin = dst.join(bin);
if fs::metadata(&bin).is_err() { if fs::metadata(&bin).is_err() {
bail!("corrupt metadata, `{}` does not exist when it should", bail!(
bin.display()) "corrupt metadata, `{}` does not exist when it should",
bin.display()
)
} }
} }
let bins = bins.iter().map(|s| { let bins = bins.iter()
if s.ends_with(env::consts::EXE_SUFFIX) { .map(|s| {
s.to_string() if s.ends_with(env::consts::EXE_SUFFIX) {
} else { s.to_string()
format!("{}{}", s, env::consts::EXE_SUFFIX) } else {
} format!("{}{}", s, env::consts::EXE_SUFFIX)
}).collect::<Vec<_>>(); }
})
.collect::<Vec<_>>();
for bin in bins.iter() { for bin in bins.iter() {
if !installed.get().contains(bin) { if !installed.get().contains(bin) {
@ -682,14 +789,11 @@ fn metadata(config: &Config, root: &Filesystem) -> CargoResult<FileLock> {
root.open_rw(Path::new(".crates.toml"), config, "crate metadata") root.open_rw(Path::new(".crates.toml"), config, "crate metadata")
} }
fn resolve_root(flag: Option<&str>, fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult<Filesystem> {
config: &Config) -> CargoResult<Filesystem> {
let config_root = config.get_path("install.root")?; let config_root = config.get_path("install.root")?;
Ok(flag.map(PathBuf::from).or_else(|| { Ok(flag.map(PathBuf::from)
env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from) .or_else(|| env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from))
}).or_else(move || { .or_else(move || config_root.map(|v| v.val))
config_root.map(|v| v.val) .map(Filesystem::new)
}).map(Filesystem::new).unwrap_or_else(|| { .unwrap_or_else(|| config.home().clone()))
config.home().clone()
}))
} }

View file

@ -9,14 +9,20 @@ use git2::Repository as GitRepository;
use core::Workspace; use core::Workspace;
use ops::is_bad_artifact_name; use ops::is_bad_artifact_name;
use util::{GitRepo, HgRepo, PijulRepo, FossilRepo, internal}; use util::{internal, FossilRepo, GitRepo, HgRepo, PijulRepo};
use util::{Config, paths}; use util::{paths, Config};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
use toml; use toml;
#[derive(Clone, Copy, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub enum VersionControl { Git, Hg, Pijul, Fossil, NoVcs } pub enum VersionControl {
Git,
Hg,
Pijul,
Fossil,
NoVcs,
}
#[derive(Debug)] #[derive(Debug)]
pub struct NewOptions { pub struct NewOptions {
@ -62,12 +68,13 @@ struct MkOptions<'a> {
} }
impl NewOptions { impl NewOptions {
pub fn new(version_control: Option<VersionControl>, pub fn new(
bin: bool, version_control: Option<VersionControl>,
lib: bool, bin: bool,
path: String, lib: bool,
name: Option<String>) -> CargoResult<NewOptions> { path: String,
name: Option<String>,
) -> CargoResult<NewOptions> {
let kind = match (bin, lib) { let kind = match (bin, lib) {
(true, true) => bail!("can't specify both lib and binary outputs"), (true, true) => bail!("can't specify both lib and binary outputs"),
(false, true) => NewProjectKind::Lib, (false, true) => NewProjectKind::Lib,
@ -75,7 +82,12 @@ impl NewOptions {
(_, false) => NewProjectKind::Bin, (_, false) => NewProjectKind::Bin,
}; };
let opts = NewOptions { version_control, kind, path, name }; let opts = NewOptions {
version_control,
kind,
path,
name,
};
Ok(opts) Ok(opts)
} }
} }
@ -92,16 +104,21 @@ fn get_name<'a>(path: &'a Path, opts: &'a NewOptions) -> CargoResult<&'a str> {
} }
let file_name = path.file_name().ok_or_else(|| { let file_name = path.file_name().ok_or_else(|| {
format_err!("cannot auto-detect project name from path {:?} ; use --name to override", path.as_os_str()) format_err!(
"cannot auto-detect project name from path {:?} ; use --name to override",
path.as_os_str()
)
})?; })?;
file_name.to_str().ok_or_else(|| { file_name.to_str().ok_or_else(|| {
format_err!("cannot create project with a non-unicode name: {:?}", file_name) format_err!(
"cannot create project with a non-unicode name: {:?}",
file_name
)
}) })
} }
fn check_name(name: &str, opts: &NewOptions) -> CargoResult<()> { fn check_name(name: &str, opts: &NewOptions) -> CargoResult<()> {
// If --name is already used to override, no point in suggesting it // If --name is already used to override, no point in suggesting it
// again as a fix. // again as a fix.
let name_help = match opts.name { let name_help = match opts.name {
@ -111,45 +128,52 @@ fn check_name(name: &str, opts: &NewOptions) -> CargoResult<()> {
// Ban keywords + test list found at // Ban keywords + test list found at
// https://doc.rust-lang.org/grammar.html#keywords // https://doc.rust-lang.org/grammar.html#keywords
let blacklist = ["abstract", "alignof", "as", "become", "box", let blacklist = [
"break", "const", "continue", "crate", "do", "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do",
"else", "enum", "extern", "false", "final", "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop",
"fn", "for", "if", "impl", "in", "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub",
"let", "loop", "macro", "match", "mod", "pure", "ref", "return", "self", "sizeof", "static", "struct", "super", "test", "trait",
"move", "mut", "offsetof", "override", "priv", "true", "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
"proc", "pub", "pure", "ref", "return", ];
"self", "sizeof", "static", "struct",
"super", "test", "trait", "true", "type", "typeof",
"unsafe", "unsized", "use", "virtual", "where",
"while", "yield"];
if blacklist.contains(&name) || (opts.kind.is_bin() && is_bad_artifact_name(name)) { if blacklist.contains(&name) || (opts.kind.is_bin() && is_bad_artifact_name(name)) {
bail!("The name `{}` cannot be used as a crate name{}", bail!(
"The name `{}` cannot be used as a crate name{}",
name, name,
name_help) name_help
)
} }
if let Some(ref c) = name.chars().nth(0) { if let Some(ref c) = name.chars().nth(0) {
if c.is_digit(10) { if c.is_digit(10) {
bail!("Package names starting with a digit cannot be used as a crate name{}", bail!(
name_help) "Package names starting with a digit cannot be used as a crate name{}",
name_help
)
} }
} }
for c in name.chars() { for c in name.chars() {
if c.is_alphanumeric() { continue } if c.is_alphanumeric() {
if c == '_' || c == '-' { continue } continue;
bail!("Invalid character `{}` in crate name: `{}`{}", }
if c == '_' || c == '-' {
continue;
}
bail!(
"Invalid character `{}` in crate name: `{}`{}",
c, c,
name, name,
name_help) name_help
)
} }
Ok(()) Ok(())
} }
fn detect_source_paths_and_types(project_path : &Path, fn detect_source_paths_and_types(
project_name: &str, project_path: &Path,
detected_files: &mut Vec<SourceFileInformation>, project_name: &str,
) -> CargoResult<()> { detected_files: &mut Vec<SourceFileInformation>,
) -> CargoResult<()> {
let path = project_path; let path = project_path;
let name = project_name; let name = project_name;
@ -165,44 +189,61 @@ fn detect_source_paths_and_types(project_path : &Path,
} }
let tests = vec![ let tests = vec![
Test { proposed_path: format!("src/main.rs"), handling: H::Bin }, Test {
Test { proposed_path: format!("main.rs"), handling: H::Bin }, proposed_path: format!("src/main.rs"),
Test { proposed_path: format!("src/{}.rs", name), handling: H::Detect }, handling: H::Bin,
Test { proposed_path: format!("{}.rs", name), handling: H::Detect }, },
Test { proposed_path: format!("src/lib.rs"), handling: H::Lib }, Test {
Test { proposed_path: format!("lib.rs"), handling: H::Lib }, proposed_path: format!("main.rs"),
handling: H::Bin,
},
Test {
proposed_path: format!("src/{}.rs", name),
handling: H::Detect,
},
Test {
proposed_path: format!("{}.rs", name),
handling: H::Detect,
},
Test {
proposed_path: format!("src/lib.rs"),
handling: H::Lib,
},
Test {
proposed_path: format!("lib.rs"),
handling: H::Lib,
},
]; ];
for i in tests { for i in tests {
let pp = i.proposed_path; let pp = i.proposed_path;
// path/pp does not exist or is not a file // path/pp does not exist or is not a file
if !fs::metadata(&path.join(&pp)).map(|x| x.is_file()).unwrap_or(false) { if !fs::metadata(&path.join(&pp))
.map(|x| x.is_file())
.unwrap_or(false)
{
continue; continue;
} }
let sfi = match i.handling { let sfi = match i.handling {
H::Bin => { H::Bin => SourceFileInformation {
SourceFileInformation { relative_path: pp,
relative_path: pp, target_name: project_name.to_string(),
target_name: project_name.to_string(), bin: true,
bin: true },
} H::Lib => SourceFileInformation {
} relative_path: pp,
H::Lib => { target_name: project_name.to_string(),
SourceFileInformation { bin: false,
relative_path: pp, },
target_name: project_name.to_string(),
bin: false
}
}
H::Detect => { H::Detect => {
let content = paths::read(&path.join(pp.clone()))?; let content = paths::read(&path.join(pp.clone()))?;
let isbin = content.contains("fn main"); let isbin = content.contains("fn main");
SourceFileInformation { SourceFileInformation {
relative_path: pp, relative_path: pp,
target_name: project_name.to_string(), target_name: project_name.to_string(),
bin: isbin bin: isbin,
} }
} }
}; };
@ -211,26 +252,32 @@ fn detect_source_paths_and_types(project_path : &Path,
// Check for duplicate lib attempt // Check for duplicate lib attempt
let mut previous_lib_relpath : Option<&str> = None; let mut previous_lib_relpath: Option<&str> = None;
let mut duplicates_checker : BTreeMap<&str, &SourceFileInformation> = BTreeMap::new(); let mut duplicates_checker: BTreeMap<&str, &SourceFileInformation> = BTreeMap::new();
for i in detected_files { for i in detected_files {
if i.bin { if i.bin {
if let Some(x) = BTreeMap::get::<str>(&duplicates_checker, i.target_name.as_ref()) { if let Some(x) = BTreeMap::get::<str>(&duplicates_checker, i.target_name.as_ref()) {
bail!("\ bail!(
"\
multiple possible binary sources found: multiple possible binary sources found:
{} {}
{} {}
cannot automatically generate Cargo.toml as the main target would be ambiguous", cannot automatically generate Cargo.toml as the main target would be ambiguous",
&x.relative_path, &i.relative_path); &x.relative_path,
&i.relative_path
);
} }
duplicates_checker.insert(i.target_name.as_ref(), i); duplicates_checker.insert(i.target_name.as_ref(), i);
} else { } else {
if let Some(plp) = previous_lib_relpath { if let Some(plp) = previous_lib_relpath {
bail!("cannot have a project with \ bail!(
multiple libraries, \ "cannot have a project with \
found both `{}` and `{}`", multiple libraries, \
plp, i.relative_path) found both `{}` and `{}`",
plp,
i.relative_path
)
} }
previous_lib_relpath = Some(&i.relative_path); previous_lib_relpath = Some(&i.relative_path);
} }
@ -242,15 +289,15 @@ cannot automatically generate Cargo.toml as the main target would be ambiguous",
fn plan_new_source_file(bin: bool, project_name: String) -> SourceFileInformation { fn plan_new_source_file(bin: bool, project_name: String) -> SourceFileInformation {
if bin { if bin {
SourceFileInformation { SourceFileInformation {
relative_path: "src/main.rs".to_string(), relative_path: "src/main.rs".to_string(),
target_name: project_name, target_name: project_name,
bin: true, bin: true,
} }
} else { } else {
SourceFileInformation { SourceFileInformation {
relative_path: "src/lib.rs".to_string(), relative_path: "src/lib.rs".to_string(),
target_name: project_name, target_name: project_name,
bin: false, bin: false,
} }
} }
} }
@ -258,9 +305,11 @@ fn plan_new_source_file(bin: bool, project_name: String) -> SourceFileInformatio
pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> { pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> {
let path = config.cwd().join(&opts.path); let path = config.cwd().join(&opts.path);
if fs::metadata(&path).is_ok() { if fs::metadata(&path).is_ok() {
bail!("destination `{}` already exists\n\n\ bail!(
Use `cargo init` to initialize the directory\ "destination `{}` already exists\n\n\
", path.display() Use `cargo init` to initialize the directory\
",
path.display()
) )
} }
@ -276,8 +325,11 @@ pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> {
}; };
mk(config, &mkopts).chain_err(|| { mk(config, &mkopts).chain_err(|| {
format_err!("Failed to create project `{}` at `{}`", format_err!(
name, path.display()) "Failed to create project `{}` at `{}`",
name,
path.display()
)
})?; })?;
Ok(()) Ok(())
} }
@ -333,9 +385,11 @@ pub fn init(opts: &NewOptions, config: &Config) -> CargoResult<()> {
// if none exists, maybe create git, like in `cargo new` // if none exists, maybe create git, like in `cargo new`
if num_detected_vsces > 1 { if num_detected_vsces > 1 {
bail!("more than one of .hg, .git, .pijul, .fossil configurations \ bail!(
found and the ignore file can't be filled in as \ "more than one of .hg, .git, .pijul, .fossil configurations \
a result. specify --vcs to override detection"); found and the ignore file can't be filled in as \
a result. specify --vcs to override detection"
);
} }
} }
@ -343,13 +397,16 @@ pub fn init(opts: &NewOptions, config: &Config) -> CargoResult<()> {
version_control, version_control,
path: &path, path: &path,
name, name,
bin: src_paths_types.iter().any(|x|x.bin), bin: src_paths_types.iter().any(|x| x.bin),
source_files: src_paths_types, source_files: src_paths_types,
}; };
mk(config, &mkopts).chain_err(|| { mk(config, &mkopts).chain_err(|| {
format_err!("Failed to create project `{}` at `{}`", format_err!(
name, path.display()) "Failed to create project `{}` at `{}`",
name,
path.display()
)
})?; })?;
Ok(()) Ok(())
} }
@ -363,26 +420,30 @@ fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> {
let name = opts.name; let name = opts.name;
let cfg = global_config(config)?; let cfg = global_config(config)?;
// Please ensure that ignore and hgignore are in sync. // Please ensure that ignore and hgignore are in sync.
let ignore = ["\n", "/target\n", "**/*.rs.bk\n", let ignore = [
if !opts.bin { "Cargo.lock\n" } else { "" }] "\n",
.concat(); "/target\n",
"**/*.rs.bk\n",
if !opts.bin { "Cargo.lock\n" } else { "" },
].concat();
// Mercurial glob ignores can't be rooted, so just sticking a 'syntax: glob' at the top of the // Mercurial glob ignores can't be rooted, so just sticking a 'syntax: glob' at the top of the
// file will exclude too much. Instead, use regexp-based ignores. See 'hg help ignore' for // file will exclude too much. Instead, use regexp-based ignores. See 'hg help ignore' for
// more. // more.
let hgignore = ["\n", "^target/\n", "glob:*.rs.bk\n", let hgignore = [
if !opts.bin { "glob:Cargo.lock\n" } else { "" }] "\n",
.concat(); "^target/\n",
"glob:*.rs.bk\n",
if !opts.bin { "glob:Cargo.lock\n" } else { "" },
].concat();
let vcs = opts.version_control let vcs = opts.version_control.unwrap_or_else(|| {
.unwrap_or_else(|| { let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd());
let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), match (cfg.version_control, in_existing_vcs) {
config.cwd()); (None, false) => VersionControl::Git,
match (cfg.version_control, in_existing_vcs) { (Some(opt), false) => opt,
(None, false) => VersionControl::Git, (_, true) => VersionControl::NoVcs,
(Some(opt), false) => opt, }
(_, true) => VersionControl::NoVcs, });
}
});
match vcs { match vcs {
VersionControl::Git => { VersionControl::Git => {
@ -390,38 +451,37 @@ fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> {
GitRepo::init(path, config.cwd())?; GitRepo::init(path, config.cwd())?;
} }
paths::append(&path.join(".gitignore"), ignore.as_bytes())?; paths::append(&path.join(".gitignore"), ignore.as_bytes())?;
}, }
VersionControl::Hg => { VersionControl::Hg => {
if !fs::metadata(&path.join(".hg")).is_ok() { if !fs::metadata(&path.join(".hg")).is_ok() {
HgRepo::init(path, config.cwd())?; HgRepo::init(path, config.cwd())?;
} }
paths::append(&path.join(".hgignore"), hgignore.as_bytes())?; paths::append(&path.join(".hgignore"), hgignore.as_bytes())?;
}, }
VersionControl::Pijul => { VersionControl::Pijul => {
if !fs::metadata(&path.join(".pijul")).is_ok() { if !fs::metadata(&path.join(".pijul")).is_ok() {
PijulRepo::init(path, config.cwd())?; PijulRepo::init(path, config.cwd())?;
} }
paths::append(&path.join(".ignore"), ignore.as_bytes())?; paths::append(&path.join(".ignore"), ignore.as_bytes())?;
}, }
VersionControl::Fossil => { VersionControl::Fossil => {
if !fs::metadata(&path.join(".fossil")).is_ok() { if !fs::metadata(&path.join(".fossil")).is_ok() {
FossilRepo::init(path, config.cwd())?; FossilRepo::init(path, config.cwd())?;
} }
}, }
VersionControl::NoVcs => { VersionControl::NoVcs => {
fs::create_dir_all(path)?; fs::create_dir_all(path)?;
}, }
}; };
let (author_name, email) = discover_author()?; let (author_name, email) = discover_author()?;
// Hoo boy, sure glad we've got exhaustiveness checking behind us. // Hoo boy, sure glad we've got exhaustiveness checking behind us.
let author = match (cfg.name, cfg.email, author_name, email) { let author = match (cfg.name, cfg.email, author_name, email) {
(Some(name), Some(email), _, _) | (Some(name), Some(email), _, _)
(Some(name), None, _, Some(email)) | | (Some(name), None, _, Some(email))
(None, Some(email), name, _) | | (None, Some(email), name, _)
(None, None, name, Some(email)) => format!("{} <{}>", name, email), | (None, None, name, Some(email)) => format!("{} <{}>", name, email),
(Some(name), None, _, None) | (Some(name), None, _, None) | (None, None, name, None) => name,
(None, None, name, None) => name,
}; };
let mut cargotoml_path_specifier = String::new(); let mut cargotoml_path_specifier = String::new();
@ -431,32 +491,46 @@ fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> {
for i in &opts.source_files { for i in &opts.source_files {
if i.bin { if i.bin {
if i.relative_path != "src/main.rs" { if i.relative_path != "src/main.rs" {
cargotoml_path_specifier.push_str(&format!(r#" cargotoml_path_specifier.push_str(&format!(
r#"
[[bin]] [[bin]]
name = "{}" name = "{}"
path = {} path = {}
"#, i.target_name, toml::Value::String(i.relative_path.clone()))); "#,
i.target_name,
toml::Value::String(i.relative_path.clone())
));
} }
} else if i.relative_path != "src/lib.rs" { } else if i.relative_path != "src/lib.rs" {
cargotoml_path_specifier.push_str(&format!(r#" cargotoml_path_specifier.push_str(&format!(
r#"
[lib] [lib]
name = "{}" name = "{}"
path = {} path = {}
"#, i.target_name, toml::Value::String(i.relative_path.clone()))); "#,
i.target_name,
toml::Value::String(i.relative_path.clone())
));
} }
} }
// Create Cargo.toml file with necessary [lib] and [[bin]] sections, if needed // Create Cargo.toml file with necessary [lib] and [[bin]] sections, if needed
paths::write(&path.join("Cargo.toml"), format!( paths::write(
r#"[package] &path.join("Cargo.toml"),
format!(
r#"[package]
name = "{}" name = "{}"
version = "0.1.0" version = "0.1.0"
authors = [{}] authors = [{}]
[dependencies] [dependencies]
{}"#, name, toml::Value::String(author), cargotoml_path_specifier).as_bytes())?; {}"#,
name,
toml::Value::String(author),
cargotoml_path_specifier
).as_bytes(),
)?;
// Create all specified source files // Create all specified source files
// (with respective parent directories) // (with respective parent directories)
@ -469,7 +543,7 @@ authors = [{}]
fs::create_dir_all(src_dir)?; fs::create_dir_all(src_dir)?;
} }
let default_file_content : &[u8] = if i.bin { let default_file_content: &[u8] = if i.bin {
b"\ b"\
fn main() { fn main() {
println!(\"Hello, world!\"); println!(\"Hello, world!\");
@ -487,53 +561,71 @@ mod tests {
" "
}; };
if !fs::metadata(&path_of_source_file).map(|x| x.is_file()).unwrap_or(false) { if !fs::metadata(&path_of_source_file)
.map(|x| x.is_file())
.unwrap_or(false)
{
paths::write(&path_of_source_file, default_file_content)?; paths::write(&path_of_source_file, default_file_content)?;
} }
} }
if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) { if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) {
let msg = format!("compiling this new crate may not work due to invalid \ let msg = format!(
workspace configuration\n\n{}", e); "compiling this new crate may not work due to invalid \
workspace configuration\n\n{}",
e
);
config.shell().warn(msg)?; config.shell().warn(msg)?;
} }
Ok(()) Ok(())
} }
fn get_environment_variable(variables: &[&str] ) -> Option<String>{ fn get_environment_variable(variables: &[&str]) -> Option<String> {
variables.iter() variables.iter().filter_map(|var| env::var(var).ok()).next()
.filter_map(|var| env::var(var).ok())
.next()
} }
fn discover_author() -> CargoResult<(String, Option<String>)> { fn discover_author() -> CargoResult<(String, Option<String>)> {
let cwd = env::current_dir()?; let cwd = env::current_dir()?;
let git_config = if let Ok(repo) = GitRepository::discover(&cwd) { let git_config = if let Ok(repo) = GitRepository::discover(&cwd) {
repo.config().ok().or_else(|| GitConfig::open_default().ok()) repo.config()
.ok()
.or_else(|| GitConfig::open_default().ok())
} else { } else {
GitConfig::open_default().ok() GitConfig::open_default().ok()
}; };
let git_config = git_config.as_ref(); let git_config = git_config.as_ref();
let name_variables = ["CARGO_NAME", "GIT_AUTHOR_NAME", "GIT_COMMITTER_NAME", let name_variables = [
"USER", "USERNAME", "NAME"]; "CARGO_NAME",
"GIT_AUTHOR_NAME",
"GIT_COMMITTER_NAME",
"USER",
"USERNAME",
"NAME",
];
let name = get_environment_variable(&name_variables[0..3]) let name = get_environment_variable(&name_variables[0..3])
.or_else(|| git_config.and_then(|g| g.get_string("user.name").ok())) .or_else(|| git_config.and_then(|g| g.get_string("user.name").ok()))
.or_else(|| get_environment_variable(&name_variables[3..])); .or_else(|| get_environment_variable(&name_variables[3..]));
let name = match name { let name = match name {
Some(name) => name, Some(name) => name,
None => { None => {
let username_var = if cfg!(windows) {"USERNAME"} else {"USER"}; let username_var = if cfg!(windows) { "USERNAME" } else { "USER" };
bail!("could not determine the current user, please set ${}", bail!(
username_var) "could not determine the current user, please set ${}",
username_var
)
} }
}; };
let email_variables = ["CARGO_EMAIL", "GIT_AUTHOR_EMAIL", "GIT_COMMITTER_EMAIL", let email_variables = [
"EMAIL"]; "CARGO_EMAIL",
"GIT_AUTHOR_EMAIL",
"GIT_COMMITTER_EMAIL",
"EMAIL",
];
let email = get_environment_variable(&email_variables[0..3]) let email = get_environment_variable(&email_variables[0..3])
.or_else(|| git_config.and_then(|g| g.get_string("user.email").ok())) .or_else(|| git_config.and_then(|g| g.get_string("user.email").ok()))
.or_else(|| get_environment_variable(&email_variables[3..])); .or_else(|| get_environment_variable(&email_variables[3..]));
let name = name.trim().to_string(); let name = name.trim().to_string();
let email = email.map(|s| s.trim().to_string()); let email = email.map(|s| s.trim().to_string());
@ -552,11 +644,14 @@ fn global_config(config: &Config) -> CargoResult<CargoNewConfig> {
Some(("pijul", _)) => Some(VersionControl::Pijul), Some(("pijul", _)) => Some(VersionControl::Pijul),
Some(("none", _)) => Some(VersionControl::NoVcs), Some(("none", _)) => Some(VersionControl::NoVcs),
Some((s, p)) => { Some((s, p)) => {
return Err(internal(format!("invalid configuration for key \ return Err(internal(format!(
`cargo-new.vcs`, unknown vcs `{}` \ "invalid configuration for key \
(found in {})", s, p))) `cargo-new.vcs`, unknown vcs `{}` \
(found in {})",
s, p
)))
} }
None => None None => None,
}; };
Ok(CargoNewConfig { Ok(CargoNewConfig {
name, name,

View file

@ -18,11 +18,13 @@ pub struct OutputMetadataOptions {
/// Loads the manifest, resolves the dependencies of the project to the concrete /// Loads the manifest, resolves the dependencies of the project to the concrete
/// used versions - considering overrides - and writes all dependencies in a JSON /// used versions - considering overrides - and writes all dependencies in a JSON
/// format to stdout. /// format to stdout.
pub fn output_metadata(ws: &Workspace, pub fn output_metadata(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
if opt.version != VERSION { if opt.version != VERSION {
bail!("metadata version {} not supported, only {} is currently supported", bail!(
opt.version, VERSION); "metadata version {} not supported, only {} is currently supported",
opt.version,
VERSION
);
} }
if opt.no_deps { if opt.no_deps {
metadata_no_deps(ws, opt) metadata_no_deps(ws, opt)
@ -31,8 +33,7 @@ pub fn output_metadata(ws: &Workspace,
} }
} }
fn metadata_no_deps(ws: &Workspace, fn metadata_no_deps(ws: &Workspace, _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
_opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
Ok(ExportInfo { Ok(ExportInfo {
packages: ws.members().cloned().collect(), packages: ws.members().cloned().collect(),
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(), workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
@ -43,25 +44,27 @@ fn metadata_no_deps(ws: &Workspace,
}) })
} }
fn metadata_full(ws: &Workspace, fn metadata_full(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
let specs = Packages::All.into_package_id_specs(ws)?; let specs = Packages::All.into_package_id_specs(ws)?;
let deps = ops::resolve_ws_precisely(ws, let deps = ops::resolve_ws_precisely(
None, ws,
&opt.features, None,
opt.all_features, &opt.features,
opt.no_default_features, opt.all_features,
&specs)?; opt.no_default_features,
&specs,
)?;
let (packages, resolve) = deps; let (packages, resolve) = deps;
let packages = packages.package_ids() let packages = packages
.map(|i| packages.get(i).map(|p| p.clone())) .package_ids()
.collect::<CargoResult<Vec<_>>>()?; .map(|i| packages.get(i).map(|p| p.clone()))
.collect::<CargoResult<Vec<_>>>()?;
Ok(ExportInfo { Ok(ExportInfo {
packages, packages,
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(), workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
resolve: Some(MetadataResolve{ resolve: Some(MetadataResolve {
resolve, resolve,
root: ws.current_opt().map(|pkg| pkg.package_id().clone()), root: ws.current_opt().map(|pkg| pkg.package_id().clone()),
}), }),
@ -86,13 +89,13 @@ pub struct ExportInfo {
/// format for `PackageId`s /// format for `PackageId`s
#[derive(Serialize)] #[derive(Serialize)]
struct MetadataResolve { struct MetadataResolve {
#[serde(rename = "nodes", serialize_with = "serialize_resolve")] #[serde(rename = "nodes", serialize_with = "serialize_resolve")] resolve: Resolve,
resolve: Resolve,
root: Option<PackageId>, root: Option<PackageId>,
} }
fn serialize_resolve<S>(resolve: &Resolve, s: S) -> Result<S::Ok, S::Error> fn serialize_resolve<S>(resolve: &Resolve, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer, where
S: ser::Serializer,
{ {
#[derive(Serialize)] #[derive(Serialize)]
struct Node<'a> { struct Node<'a> {
@ -101,11 +104,13 @@ fn serialize_resolve<S>(resolve: &Resolve, s: S) -> Result<S::Ok, S::Error>
features: Vec<&'a str>, features: Vec<&'a str>,
} }
resolve.iter().map(|id| { resolve
Node { .iter()
.map(|id| Node {
id, id,
dependencies: resolve.deps(id).collect(), dependencies: resolve.deps(id).collect(),
features: resolve.features_sorted(id), features: resolve.features_sorted(id),
} })
}).collect::<Vec<_>>().serialize(s) .collect::<Vec<_>>()
.serialize(s)
} }

View file

@ -5,11 +5,11 @@ use std::path::{self, Path};
use std::sync::Arc; use std::sync::Arc;
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use flate2::{GzBuilder, Compression}; use flate2::{Compression, GzBuilder};
use git2; use git2;
use tar::{Archive, Builder, Header, EntryType}; use tar::{Archive, Builder, EntryType, Header};
use core::{Package, Workspace, Source, SourceId}; use core::{Package, Source, SourceId, Workspace};
use sources::PathSource; use sources::PathSource;
use util::{self, internal, Config, FileLock}; use util::{self, internal, Config, FileLock};
use util::paths; use util::paths;
@ -27,15 +27,12 @@ pub struct PackageOpts<'cfg> {
pub registry: Option<String>, pub registry: Option<String>,
} }
pub fn package(ws: &Workspace, pub fn package(ws: &Workspace, opts: &PackageOpts) -> CargoResult<Option<FileLock>> {
opts: &PackageOpts) -> CargoResult<Option<FileLock>> {
ops::resolve_ws(ws)?; ops::resolve_ws(ws)?;
let pkg = ws.current()?; let pkg = ws.current()?;
let config = ws.config(); let config = ws.config();
let mut src = PathSource::new(pkg.root(), let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config);
pkg.package_id().source_id(),
config);
src.update()?; src.update()?;
if opts.check_metadata { if opts.check_metadata {
@ -46,9 +43,10 @@ pub fn package(ws: &Workspace,
if opts.list { if opts.list {
let root = pkg.root(); let root = pkg.root();
let mut list: Vec<_> = src.list_files(pkg)?.iter().map(|file| { let mut list: Vec<_> = src.list_files(pkg)?
util::without_prefix(file, root).unwrap().to_path_buf() .iter()
}).collect(); .map(|file| util::without_prefix(file, root).unwrap().to_path_buf())
.collect();
if include_lockfile(&pkg) { if include_lockfile(&pkg) {
list.push("Cargo.lock".into()); list.push("Cargo.lock".into());
} }
@ -56,7 +54,7 @@ pub fn package(ws: &Workspace,
for file in list.iter() { for file in list.iter() {
println!("{}", file.display()); println!("{}", file.display());
} }
return Ok(None) return Ok(None);
} }
if !opts.allow_dirty { if !opts.allow_dirty {
@ -74,31 +72,28 @@ pub fn package(ws: &Workspace,
// location if it actually passes all our tests. Any previously existing // location if it actually passes all our tests. Any previously existing
// tarball can be assumed as corrupt or invalid, so we just blow it away if // tarball can be assumed as corrupt or invalid, so we just blow it away if
// it exists. // it exists.
config.shell().status("Packaging", pkg.package_id().to_string())?; config
.shell()
.status("Packaging", pkg.package_id().to_string())?;
dst.file().set_len(0)?; dst.file().set_len(0)?;
tar(ws, &src, dst.file(), &filename).chain_err(|| { tar(ws, &src, dst.file(), &filename)
format_err!("failed to prepare local package for uploading") .chain_err(|| format_err!("failed to prepare local package for uploading"))?;
})?;
if opts.verify { if opts.verify {
dst.seek(SeekFrom::Start(0))?; dst.seek(SeekFrom::Start(0))?;
run_verify(ws, &dst, opts).chain_err(|| { run_verify(ws, &dst, opts).chain_err(|| "failed to verify package tarball")?
"failed to verify package tarball"
})?
} }
dst.seek(SeekFrom::Start(0))?; dst.seek(SeekFrom::Start(0))?;
{ {
let src_path = dst.path(); let src_path = dst.path();
let dst_path = dst.parent().join(&filename); let dst_path = dst.parent().join(&filename);
fs::rename(&src_path, &dst_path).chain_err(|| { fs::rename(&src_path, &dst_path)
"failed to move temporary tarball into final location" .chain_err(|| "failed to move temporary tarball into final location")?;
})?;
} }
Ok(Some(dst)) Ok(Some(dst))
} }
fn include_lockfile(pkg: &Package) -> bool { fn include_lockfile(pkg: &Package) -> bool {
pkg.manifest().publish_lockfile() && pkg.manifest().publish_lockfile() && pkg.targets().iter().any(|t| t.is_example() || t.is_bin())
pkg.targets().iter().any(|t| t.is_example() || t.is_bin())
} }
// check that the package has some piece of metadata that a human can // check that the package has some piece of metadata that a human can
@ -117,7 +112,11 @@ fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
)* )*
}} }}
} }
lacking!(description, license || license_file, documentation || homepage || repository); lacking!(
description,
license || license_file,
documentation || homepage || repository
);
if !missing.is_empty() { if !missing.is_empty() {
let mut things = missing[..missing.len() - 1].join(", "); let mut things = missing[..missing.len() - 1].join(", ");
@ -128,10 +127,11 @@ fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
} }
things.push_str(missing.last().unwrap()); things.push_str(missing.last().unwrap());
config.shell().warn( config.shell().warn(&format!(
&format!("manifest has no {things}.\n\ "manifest has no {things}.\n\
See http://doc.crates.io/manifest.html#package-metadata for more info.", See http://doc.crates.io/manifest.html#package-metadata for more info.",
things = things))? things = things
))?
} }
Ok(()) Ok(())
} }
@ -140,9 +140,12 @@ fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> {
fn verify_dependencies(pkg: &Package) -> CargoResult<()> { fn verify_dependencies(pkg: &Package) -> CargoResult<()> {
for dep in pkg.dependencies() { for dep in pkg.dependencies() {
if dep.source_id().is_path() && !dep.specified_req() { if dep.source_id().is_path() && !dep.specified_req() {
bail!("all path dependencies must have a version specified \ bail!(
when packaging.\ndependency `{}` does not specify \ "all path dependencies must have a version specified \
a version.", dep.name()) when packaging.\ndependency `{}` does not specify \
a version.",
dep.name()
)
} }
} }
Ok(()) Ok(())
@ -151,14 +154,16 @@ fn verify_dependencies(pkg: &Package) -> CargoResult<()> {
fn check_not_dirty(p: &Package, src: &PathSource) -> CargoResult<()> { fn check_not_dirty(p: &Package, src: &PathSource) -> CargoResult<()> {
if let Ok(repo) = git2::Repository::discover(p.root()) { if let Ok(repo) = git2::Repository::discover(p.root()) {
if let Some(workdir) = repo.workdir() { if let Some(workdir) = repo.workdir() {
debug!("found a git repo at {:?}, checking if index present", debug!(
workdir); "found a git repo at {:?}, checking if index present",
workdir
);
let path = p.manifest_path(); let path = p.manifest_path();
let path = path.strip_prefix(workdir).unwrap_or(path); let path = path.strip_prefix(workdir).unwrap_or(path);
if let Ok(status) = repo.status_file(path) { if let Ok(status) = repo.status_file(path) {
if (status & git2::Status::IGNORED).is_empty() { if (status & git2::Status::IGNORED).is_empty() {
debug!("Cargo.toml found in repo, checking if dirty"); debug!("Cargo.toml found in repo, checking if dirty");
return git(p, src, &repo) return git(p, src, &repo);
} }
} }
} }
@ -168,39 +173,45 @@ fn check_not_dirty(p: &Package, src: &PathSource) -> CargoResult<()> {
// have to assume that it's clean. // have to assume that it's clean.
return Ok(()); return Ok(());
fn git(p: &Package, fn git(p: &Package, src: &PathSource, repo: &git2::Repository) -> CargoResult<()> {
src: &PathSource,
repo: &git2::Repository) -> CargoResult<()> {
let workdir = repo.workdir().unwrap(); let workdir = repo.workdir().unwrap();
let dirty = src.list_files(p)?.iter().filter(|file| { let dirty = src.list_files(p)?
let relative = file.strip_prefix(workdir).unwrap(); .iter()
if let Ok(status) = repo.status_file(relative) { .filter(|file| {
status != git2::Status::CURRENT let relative = file.strip_prefix(workdir).unwrap();
} else { if let Ok(status) = repo.status_file(relative) {
false status != git2::Status::CURRENT
} } else {
}).map(|path| { false
path.strip_prefix(p.root()).unwrap_or(path).display().to_string() }
}).collect::<Vec<_>>(); })
.map(|path| {
path.strip_prefix(p.root())
.unwrap_or(path)
.display()
.to_string()
})
.collect::<Vec<_>>();
if dirty.is_empty() { if dirty.is_empty() {
Ok(()) Ok(())
} else { } else {
bail!("{} files in the working directory contain changes that were \ bail!(
not yet committed into git:\n\n{}\n\n\ "{} files in the working directory contain changes that were \
to proceed despite this, pass the `--allow-dirty` flag", not yet committed into git:\n\n{}\n\n\
dirty.len(), dirty.join("\n")) to proceed despite this, pass the `--allow-dirty` flag",
dirty.len(),
dirty.join("\n")
)
} }
} }
} }
fn tar(ws: &Workspace, fn tar(ws: &Workspace, src: &PathSource, dst: &File, filename: &str) -> CargoResult<()> {
src: &PathSource,
dst: &File,
filename: &str) -> CargoResult<()> {
// Prepare the encoder and its header // Prepare the encoder and its header
let filename = Path::new(filename); let filename = Path::new(filename);
let encoder = GzBuilder::new().filename(util::path2bytes(filename)?) let encoder = GzBuilder::new()
.write(dst, Compression::best()); .filename(util::path2bytes(filename)?)
.write(dst, Compression::best());
// Put all package files into a compressed archive // Put all package files into a compressed archive
let mut ar = Builder::new(encoder); let mut ar = Builder::new(encoder);
@ -211,14 +222,18 @@ fn tar(ws: &Workspace,
let relative = util::without_prefix(file, root).unwrap(); let relative = util::without_prefix(file, root).unwrap();
check_filename(relative)?; check_filename(relative)?;
let relative = relative.to_str().ok_or_else(|| { let relative = relative.to_str().ok_or_else(|| {
format_err!("non-utf8 path in source directory: {}", format_err!("non-utf8 path in source directory: {}", relative.display())
relative.display())
})?; })?;
config.shell().verbose(|shell| { config
shell.status("Archiving", &relative) .shell()
})?; .verbose(|shell| shell.status("Archiving", &relative))?;
let path = format!("{}-{}{}{}", pkg.name(), pkg.version(), let path = format!(
path::MAIN_SEPARATOR, relative); "{}-{}{}{}",
pkg.name(),
pkg.version(),
path::MAIN_SEPARATOR,
relative
);
// The tar::Builder type by default will build GNU archives, but // The tar::Builder type by default will build GNU archives, but
// unfortunately we force it here to use UStar archives instead. The // unfortunately we force it here to use UStar archives instead. The
@ -239,24 +254,21 @@ fn tar(ws: &Workspace,
// unpack the selectors 0.4.0 crate on crates.io. Either that or take a // unpack the selectors 0.4.0 crate on crates.io. Either that or take a
// look at rust-lang/cargo#2326 // look at rust-lang/cargo#2326
let mut header = Header::new_ustar(); let mut header = Header::new_ustar();
header.set_path(&path).chain_err(|| { header
format!("failed to add to archive: `{}`", relative) .set_path(&path)
})?; .chain_err(|| format!("failed to add to archive: `{}`", relative))?;
let mut file = File::open(file).chain_err(|| { let mut file = File::open(file)
format!("failed to open for archiving: `{}`", file.display()) .chain_err(|| format!("failed to open for archiving: `{}`", file.display()))?;
})?; let metadata = file.metadata()
let metadata = file.metadata().chain_err(|| { .chain_err(|| format!("could not learn metadata for: `{}`", relative))?;
format!("could not learn metadata for: `{}`", relative)
})?;
header.set_metadata(&metadata); header.set_metadata(&metadata);
if relative == "Cargo.toml" { if relative == "Cargo.toml" {
let orig = Path::new(&path).with_file_name("Cargo.toml.orig"); let orig = Path::new(&path).with_file_name("Cargo.toml.orig");
header.set_path(&orig)?; header.set_path(&orig)?;
header.set_cksum(); header.set_cksum();
ar.append(&header, &mut file).chain_err(|| { ar.append(&header, &mut file)
internal(format!("could not archive source file `{}`", relative)) .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
})?;
let mut header = Header::new_ustar(); let mut header = Header::new_ustar();
let toml = pkg.to_registry_toml(ws.config())?; let toml = pkg.to_registry_toml(ws.config())?;
@ -265,30 +277,31 @@ fn tar(ws: &Workspace,
header.set_mode(0o644); header.set_mode(0o644);
header.set_size(toml.len() as u64); header.set_size(toml.len() as u64);
header.set_cksum(); header.set_cksum();
ar.append(&header, toml.as_bytes()).chain_err(|| { ar.append(&header, toml.as_bytes())
internal(format!("could not archive source file `{}`", relative)) .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
})?;
} else { } else {
header.set_cksum(); header.set_cksum();
ar.append(&header, &mut file).chain_err(|| { ar.append(&header, &mut file)
internal(format!("could not archive source file `{}`", relative)) .chain_err(|| internal(format!("could not archive source file `{}`", relative)))?;
})?;
} }
} }
if include_lockfile(pkg) { if include_lockfile(pkg) {
let toml = paths::read(&ws.root().join("Cargo.lock"))?; let toml = paths::read(&ws.root().join("Cargo.lock"))?;
let path = format!("{}-{}{}Cargo.lock", pkg.name(), pkg.version(), let path = format!(
path::MAIN_SEPARATOR); "{}-{}{}Cargo.lock",
pkg.name(),
pkg.version(),
path::MAIN_SEPARATOR
);
let mut header = Header::new_ustar(); let mut header = Header::new_ustar();
header.set_path(&path)?; header.set_path(&path)?;
header.set_entry_type(EntryType::file()); header.set_entry_type(EntryType::file());
header.set_mode(0o644); header.set_mode(0o644);
header.set_size(toml.len() as u64); header.set_size(toml.len() as u64);
header.set_cksum(); header.set_cksum();
ar.append(&header, toml.as_bytes()).chain_err(|| { ar.append(&header, toml.as_bytes())
internal("could not archive source file `Cargo.lock`") .chain_err(|| internal("could not archive source file `Cargo.lock`"))?;
})?;
} }
let encoder = ar.into_inner()?; let encoder = ar.into_inner()?;
@ -303,7 +316,8 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult
config.shell().status("Verifying", pkg)?; config.shell().status("Verifying", pkg)?;
let f = GzDecoder::new(tar.file()); let f = GzDecoder::new(tar.file());
let dst = tar.parent().join(&format!("{}-{}", pkg.name(), pkg.version())); let dst = tar.parent()
.join(&format!("{}-{}", pkg.name(), pkg.version()));
if dst.exists() { if dst.exists() {
paths::remove_dir_all(&dst)?; paths::remove_dir_all(&dst)?;
} }
@ -317,21 +331,28 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult
let new_pkg = src.root_package()?; let new_pkg = src.root_package()?;
let ws = Workspace::ephemeral(new_pkg, config, None, true)?; let ws = Workspace::ephemeral(new_pkg, config, None, true)?;
ops::compile_ws(&ws, None, &ops::CompileOptions { ops::compile_ws(
config, &ws,
jobs: opts.jobs, None,
target: opts.target.clone(), &ops::CompileOptions {
features: Vec::new(), config,
no_default_features: false, jobs: opts.jobs,
all_features: false, target: opts.target.clone(),
spec: ops::Packages::Packages(Vec::new()), features: Vec::new(),
filter: ops::CompileFilter::Default { required_features_filterable: true }, no_default_features: false,
release: false, all_features: false,
message_format: ops::MessageFormat::Human, spec: ops::Packages::Packages(Vec::new()),
mode: ops::CompileMode::Build, filter: ops::CompileFilter::Default {
target_rustdoc_args: None, required_features_filterable: true,
target_rustc_args: None, },
}, Arc::new(DefaultExecutor))?; release: false,
message_format: ops::MessageFormat::Human,
mode: ops::CompileMode::Build,
target_rustdoc_args: None,
target_rustc_args: None,
},
Arc::new(DefaultExecutor),
)?;
Ok(()) Ok(())
} }
@ -349,15 +370,19 @@ fn check_filename(file: &Path) -> CargoResult<()> {
}; };
let name = match name.to_str() { let name = match name.to_str() {
Some(name) => name, Some(name) => name,
None => { None => bail!(
bail!("path does not have a unicode filename which may not unpack \ "path does not have a unicode filename which may not unpack \
on all platforms: {}", file.display()) on all platforms: {}",
} file.display()
),
}; };
let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*']; let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) { if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
bail!("cannot package a filename with a special character `{}`: {}", bail!(
c, file.display()) "cannot package a filename with a special character `{}`: {}",
c,
file.display()
)
} }
Ok(()) Ok(())
} }

View file

@ -3,34 +3,49 @@ use std::fs;
use std::io; use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use core::{Package, SourceId, PackageId, EitherManifest}; use core::{EitherManifest, Package, PackageId, SourceId};
use util::{self, Config}; use util::{self, Config};
use util::errors::{CargoResult, CargoError}; use util::errors::{CargoError, CargoResult};
use util::important_paths::find_project_manifest_exact; use util::important_paths::find_project_manifest_exact;
use util::toml::read_manifest; use util::toml::read_manifest;
pub fn read_package(path: &Path, source_id: &SourceId, config: &Config) pub fn read_package(
-> CargoResult<(Package, Vec<PathBuf>)> { path: &Path,
trace!("read_package; path={}; source-id={}", path.display(), source_id); source_id: &SourceId,
config: &Config,
) -> CargoResult<(Package, Vec<PathBuf>)> {
trace!(
"read_package; path={}; source-id={}",
path.display(),
source_id
);
let (manifest, nested) = read_manifest(path, source_id, config)?; let (manifest, nested) = read_manifest(path, source_id, config)?;
let manifest = match manifest { let manifest = match manifest {
EitherManifest::Real(manifest) => manifest, EitherManifest::Real(manifest) => manifest,
EitherManifest::Virtual(..) => { EitherManifest::Virtual(..) => bail!(
bail!("found a virtual manifest at `{}` instead of a package \ "found a virtual manifest at `{}` instead of a package \
manifest", path.display()) manifest",
} path.display()
),
}; };
Ok((Package::new(manifest, path), nested)) Ok((Package::new(manifest, path), nested))
} }
pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config) pub fn read_packages(
-> CargoResult<Vec<Package>> { path: &Path,
source_id: &SourceId,
config: &Config,
) -> CargoResult<Vec<Package>> {
let mut all_packages = HashMap::new(); let mut all_packages = HashMap::new();
let mut visited = HashSet::<PathBuf>::new(); let mut visited = HashSet::<PathBuf>::new();
let mut errors = Vec::<CargoError>::new(); let mut errors = Vec::<CargoError>::new();
trace!("looking for root package: {}, source_id={}", path.display(), source_id); trace!(
"looking for root package: {}, source_id={}",
path.display(),
source_id
);
walk(path, &mut |dir| { walk(path, &mut |dir| {
trace!("looking for child package: {}", dir.display()); trace!("looking for child package: {}", dir.display());
@ -39,24 +54,31 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config)
if dir != path { if dir != path {
let name = dir.file_name().and_then(|s| s.to_str()); let name = dir.file_name().and_then(|s| s.to_str());
if name.map(|s| s.starts_with('.')) == Some(true) { if name.map(|s| s.starts_with('.')) == Some(true) {
return Ok(false) return Ok(false);
} }
// Don't automatically discover packages across git submodules // Don't automatically discover packages across git submodules
if fs::metadata(&dir.join(".git")).is_ok() { if fs::metadata(&dir.join(".git")).is_ok() {
return Ok(false) return Ok(false);
} }
} }
// Don't ever look at target directories // Don't ever look at target directories
if dir.file_name().and_then(|s| s.to_str()) == Some("target") && if dir.file_name().and_then(|s| s.to_str()) == Some("target")
has_manifest(dir.parent().unwrap()) { && has_manifest(dir.parent().unwrap())
return Ok(false) {
return Ok(false);
} }
if has_manifest(dir) { if has_manifest(dir) {
read_nested_packages(dir, &mut all_packages, source_id, config, read_nested_packages(
&mut visited, &mut errors)?; dir,
&mut all_packages,
source_id,
config,
&mut visited,
&mut errors,
)?;
} }
Ok(true) Ok(true)
})?; })?;
@ -64,31 +86,31 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config)
if all_packages.is_empty() { if all_packages.is_empty() {
match errors.pop() { match errors.pop() {
Some(err) => Err(err), Some(err) => Err(err),
None => Err(format_err!("Could not find Cargo.toml in `{}`", path.display())), None => Err(format_err!(
"Could not find Cargo.toml in `{}`",
path.display()
)),
} }
} else { } else {
Ok(all_packages.into_iter().map(|(_, v)| v).collect()) Ok(all_packages.into_iter().map(|(_, v)| v).collect())
} }
} }
fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult<bool>) fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult<bool>) -> CargoResult<()> {
-> CargoResult<()> {
if !callback(path)? { if !callback(path)? {
trace!("not processing {}", path.display()); trace!("not processing {}", path.display());
return Ok(()) return Ok(());
} }
// Ignore any permission denied errors because temporary directories // Ignore any permission denied errors because temporary directories
// can often have some weird permissions on them. // can often have some weird permissions on them.
let dirs = match fs::read_dir(path) { let dirs = match fs::read_dir(path) {
Ok(dirs) => dirs, Ok(dirs) => dirs,
Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => { Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => return Ok(()),
return Ok(())
}
Err(e) => { Err(e) => {
let cx = format!("failed to read directory `{}`", path.display()); let cx = format!("failed to read directory `{}`", path.display());
let e = CargoError::from(e); let e = CargoError::from(e);
return Err(e.context(cx).into()) return Err(e.context(cx).into());
} }
}; };
for dir in dirs { for dir in dirs {
@ -104,13 +126,17 @@ fn has_manifest(path: &Path) -> bool {
find_project_manifest_exact(path, "Cargo.toml").is_ok() find_project_manifest_exact(path, "Cargo.toml").is_ok()
} }
fn read_nested_packages(path: &Path, fn read_nested_packages(
all_packages: &mut HashMap<PackageId, Package>, path: &Path,
source_id: &SourceId, all_packages: &mut HashMap<PackageId, Package>,
config: &Config, source_id: &SourceId,
visited: &mut HashSet<PathBuf>, config: &Config,
errors: &mut Vec<CargoError>) -> CargoResult<()> { visited: &mut HashSet<PathBuf>,
if !visited.insert(path.to_path_buf()) { return Ok(()) } errors: &mut Vec<CargoError>,
) -> CargoResult<()> {
if !visited.insert(path.to_path_buf()) {
return Ok(());
}
let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?; let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?;
@ -123,12 +149,14 @@ fn read_nested_packages(path: &Path,
// it's safer to ignore malformed manifests to avoid // it's safer to ignore malformed manifests to avoid
// //
// TODO: Add a way to exclude folders? // TODO: Add a way to exclude folders?
info!("skipping malformed package found at `{}`", info!(
path.to_string_lossy()); "skipping malformed package found at `{}`",
path.to_string_lossy()
);
errors.push(err); errors.push(err);
return Ok(()); return Ok(());
} }
Ok(tuple) => tuple Ok(tuple) => tuple,
}; };
let manifest = match manifest { let manifest = match manifest {
@ -138,12 +166,17 @@ fn read_nested_packages(path: &Path,
let pkg = Package::new(manifest, &manifest_path); let pkg = Package::new(manifest, &manifest_path);
let pkg_id = pkg.package_id().clone(); let pkg_id = pkg.package_id().clone();
use ::std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
match all_packages.entry(pkg_id) { match all_packages.entry(pkg_id) {
Entry::Vacant(v) => { v.insert(pkg); }, Entry::Vacant(v) => {
v.insert(pkg);
}
Entry::Occupied(_) => { Entry::Occupied(_) => {
info!("skipping nested package `{}` found at `{}`", info!(
pkg.name(), path.to_string_lossy()); "skipping nested package `{}` found at `{}`",
pkg.name(),
path.to_string_lossy()
);
} }
} }
@ -158,8 +191,7 @@ fn read_nested_packages(path: &Path,
if !source_id.is_registry() { if !source_id.is_registry() {
for p in nested.iter() { for p in nested.iter() {
let path = util::normalize_path(&path.join(p)); let path = util::normalize_path(&path.join(p));
read_nested_packages(&path, all_packages, source_id, read_nested_packages(&path, all_packages, source_id, config, visited, errors)?;
config, visited, errors)?;
} }
} }

View file

@ -4,35 +4,40 @@ use ops::{self, Packages};
use util::{self, CargoResult, ProcessError}; use util::{self, CargoResult, ProcessError};
use core::Workspace; use core::Workspace;
pub fn run(ws: &Workspace, pub fn run(
options: &ops::CompileOptions, ws: &Workspace,
args: &[String]) -> CargoResult<Option<ProcessError>> { options: &ops::CompileOptions,
args: &[String],
) -> CargoResult<Option<ProcessError>> {
let config = ws.config(); let config = ws.config();
let pkg = match options.spec { let pkg = match options.spec {
Packages::All | Packages::All | Packages::Default | Packages::OptOut(_) => {
Packages::Default | unreachable!("cargo run supports single package only")
Packages::OptOut(_) => unreachable!("cargo run supports single package only"), }
Packages::Packages(ref xs) => match xs.len() { Packages::Packages(ref xs) => match xs.len() {
0 => ws.current()?, 0 => ws.current()?,
1 => ws.members() 1 => ws.members()
.find(|pkg| &*pkg.name() == xs[0]) .find(|pkg| &*pkg.name() == xs[0])
.ok_or_else(|| .ok_or_else(|| {
format_err!("package `{}` is not a member of the workspace", xs[0]) format_err!("package `{}` is not a member of the workspace", xs[0])
)?, })?,
_ => unreachable!("cargo run supports single package only"), _ => unreachable!("cargo run supports single package only"),
} },
}; };
let bins: Vec<_> = pkg.manifest().targets().iter().filter(|a| { let bins: Vec<_> = pkg.manifest()
!a.is_lib() && !a.is_custom_build() && if !options.filter.is_specific() { .targets()
a.is_bin() .iter()
} else { .filter(|a| {
options.filter.matches(a) !a.is_lib() && !a.is_custom_build() && if !options.filter.is_specific() {
} a.is_bin()
}) } else {
.map(|bin| bin.name()) options.filter.matches(a)
.collect(); }
})
.map(|bin| bin.name())
.collect();
if bins.is_empty() { if bins.is_empty() {
if !options.filter.is_specific() { if !options.filter.is_specific() {
@ -43,12 +48,17 @@ pub fn run(ws: &Workspace,
} }
if bins.len() > 1 { if bins.len() > 1 {
if !options.filter.is_specific() { if !options.filter.is_specific() {
bail!("`cargo run` requires that a project only have one \ bail!(
executable; use the `--bin` option to specify which one \ "`cargo run` requires that a project only have one \
to run\navailable binaries: {}", bins.join(", ")) executable; use the `--bin` option to specify which one \
to run\navailable binaries: {}",
bins.join(", ")
)
} else { } else {
bail!("`cargo run` can run at most one executable, but \ bail!(
multiple were specified") "`cargo run` can run at most one executable, but \
multiple were specified"
)
} }
} }
@ -56,8 +66,9 @@ pub fn run(ws: &Workspace,
assert_eq!(compile.binaries.len(), 1); assert_eq!(compile.binaries.len(), 1);
let exe = &compile.binaries[0]; let exe = &compile.binaries[0];
let exe = match util::without_prefix(exe, config.cwd()) { let exe = match util::without_prefix(exe, config.cwd()) {
Some(path) if path.file_name() == Some(path.as_os_str()) Some(path) if path.file_name() == Some(path.as_os_str()) => {
=> Path::new(".").join(path).to_path_buf(), Path::new(".").join(path).to_path_buf()
}
Some(path) => path.to_path_buf(), Some(path) => path.to_path_buf(),
None => exe.to_path_buf(), None => exe.to_path_buf(),
}; };

View file

@ -1,12 +1,12 @@
use std::collections::{HashMap, HashSet, BTreeSet}; use std::collections::{BTreeSet, HashMap, HashSet};
use std::ffi::OsStr; use std::ffi::OsStr;
use std::path::PathBuf; use std::path::PathBuf;
use semver::Version; use semver::Version;
use lazycell::LazyCell; use lazycell::LazyCell;
use core::{PackageId, Package, Target, TargetKind}; use core::{Package, PackageId, Target, TargetKind};
use util::{self, CargoResult, Config, ProcessBuilder, process, join_paths}; use util::{self, join_paths, process, CargoResult, Config, ProcessBuilder};
/// A structure returning the result of a compilation. /// A structure returning the result of a compilation.
pub struct Compilation<'cfg> { pub struct Compilation<'cfg> {
@ -68,7 +68,7 @@ impl<'cfg> Compilation<'cfg> {
pub fn new(config: &'cfg Config) -> Compilation<'cfg> { pub fn new(config: &'cfg Config) -> Compilation<'cfg> {
Compilation { Compilation {
libraries: HashMap::new(), libraries: HashMap::new(),
native_dirs: BTreeSet::new(), // TODO: deprecated, remove native_dirs: BTreeSet::new(), // TODO: deprecated, remove
root_output: PathBuf::from("/"), root_output: PathBuf::from("/"),
deps_output: PathBuf::from("/"), deps_output: PathBuf::from("/"),
host_deps_output: PathBuf::from("/"), host_deps_output: PathBuf::from("/"),
@ -97,8 +97,11 @@ impl<'cfg> Compilation<'cfg> {
} }
/// See `process`. /// See `process`.
pub fn host_process<T: AsRef<OsStr>>(&self, cmd: T, pkg: &Package) pub fn host_process<T: AsRef<OsStr>>(
-> CargoResult<ProcessBuilder> { &self,
cmd: T,
pkg: &Package,
) -> CargoResult<ProcessBuilder> {
self.fill_env(process(cmd), pkg, true) self.fill_env(process(cmd), pkg, true)
} }
@ -110,8 +113,11 @@ impl<'cfg> Compilation<'cfg> {
} }
/// See `process`. /// See `process`.
pub fn target_process<T: AsRef<OsStr>>(&self, cmd: T, pkg: &Package) pub fn target_process<T: AsRef<OsStr>>(
-> CargoResult<ProcessBuilder> { &self,
cmd: T,
pkg: &Package,
) -> CargoResult<ProcessBuilder> {
let builder = if let Some((ref runner, ref args)) = *self.target_runner()? { let builder = if let Some((ref runner, ref args)) = *self.target_runner()? {
let mut builder = process(runner); let mut builder = process(runner);
builder.args(args); builder.args(args);
@ -128,17 +134,19 @@ impl<'cfg> Compilation<'cfg> {
/// ///
/// The package argument is also used to configure environment variables as /// The package argument is also used to configure environment variables as
/// well as the working directory of the child process. /// well as the working directory of the child process.
fn fill_env(&self, mut cmd: ProcessBuilder, pkg: &Package, is_host: bool) fn fill_env(
-> CargoResult<ProcessBuilder> { &self,
mut cmd: ProcessBuilder,
pkg: &Package,
is_host: bool,
) -> CargoResult<ProcessBuilder> {
let mut search_path = if is_host { let mut search_path = if is_host {
let mut search_path = vec![self.host_deps_output.clone()]; let mut search_path = vec![self.host_deps_output.clone()];
search_path.extend(self.host_dylib_path.clone()); search_path.extend(self.host_dylib_path.clone());
search_path search_path
} else { } else {
let mut search_path = let mut search_path =
super::filter_dynamic_search_path(self.native_dirs.iter(), super::filter_dynamic_search_path(self.native_dirs.iter(), &self.root_output);
&self.root_output);
search_path.push(self.root_output.clone()); search_path.push(self.root_output.clone());
search_path.push(self.deps_output.clone()); search_path.push(self.deps_output.clone());
search_path.extend(self.target_dylib_path.clone()); search_path.extend(self.target_dylib_path.clone());
@ -165,16 +173,25 @@ impl<'cfg> Compilation<'cfg> {
// consider adding the corresponding properties to the hash // consider adding the corresponding properties to the hash
// in Context::target_metadata() // in Context::target_metadata()
cmd.env("CARGO_MANIFEST_DIR", pkg.root()) cmd.env("CARGO_MANIFEST_DIR", pkg.root())
.env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string()) .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string())
.env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string()) .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string())
.env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string()) .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string())
.env("CARGO_PKG_VERSION_PRE", &pre_version_component(pkg.version())) .env(
.env("CARGO_PKG_VERSION", &pkg.version().to_string()) "CARGO_PKG_VERSION_PRE",
.env("CARGO_PKG_NAME", &*pkg.name()) &pre_version_component(pkg.version()),
.env("CARGO_PKG_DESCRIPTION", metadata.description.as_ref().unwrap_or(&String::new())) )
.env("CARGO_PKG_HOMEPAGE", metadata.homepage.as_ref().unwrap_or(&String::new())) .env("CARGO_PKG_VERSION", &pkg.version().to_string())
.env("CARGO_PKG_AUTHORS", &pkg.authors().join(":")) .env("CARGO_PKG_NAME", &*pkg.name())
.cwd(pkg.root()); .env(
"CARGO_PKG_DESCRIPTION",
metadata.description.as_ref().unwrap_or(&String::new()),
)
.env(
"CARGO_PKG_HOMEPAGE",
metadata.homepage.as_ref().unwrap_or(&String::new()),
)
.env("CARGO_PKG_AUTHORS", &pkg.authors().join(":"))
.cwd(pkg.root());
Ok(cmd) Ok(cmd)
} }
} }
@ -187,7 +204,9 @@ fn pre_version_component(v: &Version) -> String {
let mut ret = String::new(); let mut ret = String::new();
for (i, x) in v.pre.iter().enumerate() { for (i, x) in v.pre.iter().enumerate() {
if i != 0 { ret.push('.') }; if i != 0 {
ret.push('.')
};
ret.push_str(&x.to_string()); ret.push_str(&x.to_string());
} }

View file

@ -1,10 +1,10 @@
#![allow(deprecated)] #![allow(deprecated)]
use std::collections::{HashSet, HashMap, BTreeSet}; use std::collections::{BTreeSet, HashMap, HashSet};
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use std::env; use std::env;
use std::fmt; use std::fmt;
use std::hash::{Hasher, Hash, SipHasher}; use std::hash::{Hash, Hasher, SipHasher};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::str::{self, FromStr}; use std::str::{self, FromStr};
use std::sync::Arc; use std::sync::Arc;
@ -12,18 +12,18 @@ use std::cell::RefCell;
use jobserver::Client; use jobserver::Client;
use core::{Package, PackageId, PackageSet, Resolve, Target, Profile}; use core::{Package, PackageId, PackageSet, Profile, Resolve, Target};
use core::{TargetKind, Profiles, Dependency, Workspace}; use core::{Dependency, Profiles, TargetKind, Workspace};
use core::dependency::Kind as DepKind; use core::dependency::Kind as DepKind;
use util::{self, ProcessBuilder, internal, Config, profile, Cfg, CfgExpr}; use util::{self, internal, profile, Cfg, CfgExpr, Config, ProcessBuilder};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
use super::TargetConfig; use super::TargetConfig;
use super::custom_build::{BuildState, BuildScripts, BuildDeps}; use super::custom_build::{BuildDeps, BuildScripts, BuildState};
use super::fingerprint::Fingerprint; use super::fingerprint::Fingerprint;
use super::layout::Layout; use super::layout::Layout;
use super::links::Links; use super::links::Links;
use super::{Kind, Compilation, BuildConfig}; use super::{BuildConfig, Compilation, Kind};
/// All information needed to define a Unit. /// All information needed to define a Unit.
/// ///
@ -126,8 +126,11 @@ impl TargetInfo {
process.arg("--crate-type").arg(crate_type); process.arg("--crate-type").arg(crate_type);
let output = process.exec_with_output().chain_err(|| { let output = process.exec_with_output().chain_err(|| {
format!("failed to run `rustc` to learn about \ format!(
crate-type {} information", crate_type) "failed to run `rustc` to learn about \
crate-type {} information",
crate_type
)
})?; })?;
let error = str::from_utf8(&output.stderr).unwrap(); let error = str::from_utf8(&output.stderr).unwrap();
@ -140,14 +143,19 @@ impl TargetInfo {
pub struct Metadata(u64); pub struct Metadata(u64);
impl<'a, 'cfg> Context<'a, 'cfg> { impl<'a, 'cfg> Context<'a, 'cfg> {
pub fn new(ws: &'a Workspace<'cfg>, pub fn new(
resolve: &'a Resolve, ws: &'a Workspace<'cfg>,
packages: &'a PackageSet<'cfg>, resolve: &'a Resolve,
config: &'cfg Config, packages: &'a PackageSet<'cfg>,
build_config: BuildConfig, config: &'cfg Config,
profiles: &'a Profiles) -> CargoResult<Context<'a, 'cfg>> { build_config: BuildConfig,
profiles: &'a Profiles,
let dest = if build_config.release { "release" } else { "debug" }; ) -> CargoResult<Context<'a, 'cfg>> {
let dest = if build_config.release {
"release"
} else {
"debug"
};
let host_layout = Layout::new(ws, None, dest)?; let host_layout = Layout::new(ws, None, dest)?;
let target_layout = match build_config.requested_target.as_ref() { let target_layout = match build_config.requested_target.as_ref() {
Some(target) => Some(Layout::new(ws, Some(target), dest)?), Some(target) => Some(Layout::new(ws, Some(target), dest)?),
@ -169,9 +177,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// is ourself, a running process. // is ourself, a running process.
let jobserver = match config.jobserver_from_env() { let jobserver = match config.jobserver_from_env() {
Some(c) => c.clone(), Some(c) => c.clone(),
None => Client::new(build_config.jobs as usize - 1).chain_err(|| { None => Client::new(build_config.jobs as usize - 1)
"failed to create jobserver" .chain_err(|| "failed to create jobserver")?,
})?,
}; };
Ok(Context { Ok(Context {
@ -208,13 +215,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
pub fn prepare(&mut self) -> CargoResult<()> { pub fn prepare(&mut self) -> CargoResult<()> {
let _p = profile::start("preparing layout"); let _p = profile::start("preparing layout");
self.host.prepare().chain_err(|| { self.host
internal("couldn't prepare build directories") .prepare()
})?; .chain_err(|| internal("couldn't prepare build directories"))?;
if let Some(ref mut target) = self.target { if let Some(ref mut target) = self.target {
target.prepare().chain_err(|| { target
internal("couldn't prepare build directories") .prepare()
})?; .chain_err(|| internal("couldn't prepare build directories"))?;
} }
self.compilation.host_deps_output = self.host.deps().to_path_buf(); self.compilation.host_deps_output = self.host.deps().to_path_buf();
@ -252,11 +259,12 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// for this unit and its dependencies. /// for this unit and its dependencies.
/// ///
/// Tracks visited units to avoid unnecessary work. /// Tracks visited units to avoid unnecessary work.
fn visit_crate_type(&self, fn visit_crate_type(
unit: &Unit<'a>, &self,
crate_types: &mut BTreeSet<String>, unit: &Unit<'a>,
visited_units: &mut HashSet<Unit<'a>>) crate_types: &mut BTreeSet<String>,
-> CargoResult<()> { visited_units: &mut HashSet<Unit<'a>>,
) -> CargoResult<()> {
if !visited_units.insert(*unit) { if !visited_units.insert(*unit) {
return Ok(()); return Ok(());
} }
@ -275,21 +283,26 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
Ok(()) Ok(())
} }
fn probe_target_info_kind(&mut self, fn probe_target_info_kind(
crate_types: &BTreeSet<String>, &mut self,
kind: Kind) crate_types: &BTreeSet<String>,
-> CargoResult<()> { kind: Kind,
let rustflags = env_args(self.config, ) -> CargoResult<()> {
&self.build_config, let rustflags = env_args(
self.info(&kind), self.config,
kind, &self.build_config,
"RUSTFLAGS")?; self.info(&kind),
kind,
"RUSTFLAGS",
)?;
let mut process = self.config.rustc()?.process(); let mut process = self.config.rustc()?.process();
process.arg("-") process
.arg("--crate-name").arg("___") .arg("-")
.arg("--print=file-names") .arg("--crate-name")
.args(&rustflags) .arg("___")
.env_remove("RUST_LOG"); .arg("--print=file-names")
.args(&rustflags)
.env_remove("RUST_LOG");
if kind == Kind::Target { if kind == Kind::Target {
process.arg("--target").arg(&self.target_triple()); process.arg("--target").arg(&self.target_triple());
@ -306,12 +319,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
with_cfg.arg("--print=cfg"); with_cfg.arg("--print=cfg");
let mut has_cfg_and_sysroot = true; let mut has_cfg_and_sysroot = true;
let output = with_cfg.exec_with_output().or_else(|_| { let output = with_cfg
has_cfg_and_sysroot = false; .exec_with_output()
process.exec_with_output() .or_else(|_| {
}).chain_err(|| { has_cfg_and_sysroot = false;
"failed to run `rustc` to learn about target-specific information" process.exec_with_output()
})?; })
.chain_err(|| "failed to run `rustc` to learn about target-specific information")?;
let error = str::from_utf8(&output.stderr).unwrap(); let error = str::from_utf8(&output.stderr).unwrap();
let output = str::from_utf8(&output.stdout).unwrap(); let output = str::from_utf8(&output.stdout).unwrap();
@ -325,8 +339,10 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
if has_cfg_and_sysroot { if has_cfg_and_sysroot {
let line = match lines.next() { let line = match lines.next() {
Some(line) => line, Some(line) => line,
None => bail!("output of --print=sysroot missing when learning about \ None => bail!(
target-specific information from rustc"), "output of --print=sysroot missing when learning about \
target-specific information from rustc"
),
}; };
let mut rustlib = PathBuf::from(line); let mut rustlib = PathBuf::from(line);
if kind == Kind::Host { if kind == Kind::Host {
@ -366,32 +382,28 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// ///
/// This will recursively walk `units` and all of their dependencies to /// This will recursively walk `units` and all of their dependencies to
/// determine which crate are going to be used in plugins or not. /// determine which crate are going to be used in plugins or not.
pub fn build_used_in_plugin_map(&mut self, units: &[Unit<'a>]) pub fn build_used_in_plugin_map(&mut self, units: &[Unit<'a>]) -> CargoResult<()> {
-> CargoResult<()> {
let mut visited = HashSet::new(); let mut visited = HashSet::new();
for unit in units { for unit in units {
self.walk_used_in_plugin_map(unit, self.walk_used_in_plugin_map(unit, unit.target.for_host(), &mut visited)?;
unit.target.for_host(),
&mut visited)?;
} }
Ok(()) Ok(())
} }
fn walk_used_in_plugin_map(&mut self, fn walk_used_in_plugin_map(
unit: &Unit<'a>, &mut self,
is_plugin: bool, unit: &Unit<'a>,
visited: &mut HashSet<(Unit<'a>, bool)>) is_plugin: bool,
-> CargoResult<()> { visited: &mut HashSet<(Unit<'a>, bool)>,
) -> CargoResult<()> {
if !visited.insert((*unit, is_plugin)) { if !visited.insert((*unit, is_plugin)) {
return Ok(()) return Ok(());
} }
if is_plugin { if is_plugin {
self.used_in_plugin.insert(*unit); self.used_in_plugin.insert(*unit);
} }
for unit in self.dep_targets(unit)? { for unit in self.dep_targets(unit)? {
self.walk_used_in_plugin_map(&unit, self.walk_used_in_plugin_map(&unit, is_plugin || unit.target.for_host(), visited)?;
is_plugin || unit.target.for_host(),
visited)?;
} }
Ok(()) Ok(())
} }
@ -400,7 +412,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
fn layout(&self, kind: Kind) -> &Layout { fn layout(&self, kind: Kind) -> &Layout {
match kind { match kind {
Kind::Host => &self.host, Kind::Host => &self.host,
Kind::Target => self.target.as_ref().unwrap_or(&self.host) Kind::Target => self.target.as_ref().unwrap_or(&self.host),
} }
} }
@ -471,7 +483,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// Return the target triple which this context is targeting. /// Return the target triple which this context is targeting.
pub fn target_triple(&self) -> &str { pub fn target_triple(&self) -> &str {
self.requested_target().unwrap_or_else(|| self.host_triple()) self.requested_target()
.unwrap_or_else(|| self.host_triple())
} }
/// Requested (not actual) target for the build /// Requested (not actual) target for the build
@ -492,7 +505,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// like `target/debug/libfoo.{a,so,rlib}` and such. /// like `target/debug/libfoo.{a,so,rlib}` and such.
pub fn target_metadata(&mut self, unit: &Unit<'a>) -> Option<Metadata> { pub fn target_metadata(&mut self, unit: &Unit<'a>) -> Option<Metadata> {
if let Some(cache) = self.target_metadatas.get(unit) { if let Some(cache) = self.target_metadatas.get(unit) {
return cache.clone() return cache.clone();
} }
let metadata = self.calc_target_metadata(unit); let metadata = self.calc_target_metadata(unit);
@ -522,11 +535,11 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// just here for rustbuild. We need a more principled method // just here for rustbuild. We need a more principled method
// doing this eventually. // doing this eventually.
let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA"); let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA");
if !(unit.profile.test || unit.profile.check) && if !(unit.profile.test || unit.profile.check)
(unit.target.is_dylib() || unit.target.is_cdylib() || && (unit.target.is_dylib() || unit.target.is_cdylib()
(unit.target.is_bin() && self.target_triple().starts_with("wasm32-"))) && || (unit.target.is_bin() && self.target_triple().starts_with("wasm32-")))
unit.pkg.package_id().source_id().is_path() && && unit.pkg.package_id().source_id().is_path()
!__cargo_default_lib_metadata.is_ok() && !__cargo_default_lib_metadata.is_ok()
{ {
return None; return None;
} }
@ -535,7 +548,10 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// Unique metadata per (name, source, version) triple. This'll allow us // Unique metadata per (name, source, version) triple. This'll allow us
// to pull crates from anywhere w/o worrying about conflicts // to pull crates from anywhere w/o worrying about conflicts
unit.pkg.package_id().stable_hash(self.ws.root()).hash(&mut hasher); unit.pkg
.package_id()
.stable_hash(self.ws.root())
.hash(&mut hasher);
// Add package properties which map to environment variables // Add package properties which map to environment variables
// exposed by Cargo // exposed by Cargo
@ -546,13 +562,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// Also mix in enabled features to our metadata. This'll ensure that // Also mix in enabled features to our metadata. This'll ensure that
// when changing feature sets each lib is separately cached. // when changing feature sets each lib is separately cached.
self.resolve.features_sorted(unit.pkg.package_id()).hash(&mut hasher); self.resolve
.features_sorted(unit.pkg.package_id())
.hash(&mut hasher);
// Mix in the target-metadata of all the dependencies of this target // Mix in the target-metadata of all the dependencies of this target
if let Ok(deps) = self.dep_targets(unit) { if let Ok(deps) = self.dep_targets(unit) {
let mut deps_metadata = deps.into_iter().map(|dep_unit| { let mut deps_metadata = deps.into_iter()
self.target_metadata(&dep_unit) .map(|dep_unit| self.target_metadata(&dep_unit))
}).collect::<Vec<_>>(); .collect::<Vec<_>>();
deps_metadata.sort(); deps_metadata.sort();
deps_metadata.hash(&mut hasher); deps_metadata.hash(&mut hasher);
} }
@ -588,8 +606,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// Returns the file stem for a given target/profile combo (with metadata) /// Returns the file stem for a given target/profile combo (with metadata)
pub fn file_stem(&mut self, unit: &Unit<'a>) -> String { pub fn file_stem(&mut self, unit: &Unit<'a>) -> String {
match self.target_metadata(unit) { match self.target_metadata(unit) {
Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), metadata),
metadata),
None => self.bin_stem(unit), None => self.bin_stem(unit),
} }
} }
@ -621,19 +638,21 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// we don't want to link it up. // we don't want to link it up.
if src_dir.ends_with("deps") { if src_dir.ends_with("deps") {
// Don't lift up library dependencies // Don't lift up library dependencies
if self.ws.members().find(|&p| p == unit.pkg).is_none() && if self.ws.members().find(|&p| p == unit.pkg).is_none() && !unit.target.is_bin() {
!unit.target.is_bin() {
None None
} else { } else {
Some(( Some((
src_dir.parent().unwrap().to_owned(), src_dir.parent().unwrap().to_owned(),
if unit.profile.test {file_stem} else {bin_stem}, if unit.profile.test {
file_stem
} else {
bin_stem
},
)) ))
} }
} else if bin_stem == file_stem { } else if bin_stem == file_stem {
None None
} else if src_dir.ends_with("examples") } else if src_dir.ends_with("examples") || src_dir.parent().unwrap().ends_with("build") {
|| src_dir.parent().unwrap().ends_with("build") {
Some((src_dir, bin_stem)) Some((src_dir, bin_stem))
} else { } else {
None None
@ -646,10 +665,12 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// - filename: filename rustc compiles to. (Often has metadata suffix). /// - filename: filename rustc compiles to. (Often has metadata suffix).
/// - link_dst: Optional file to link/copy the result to (without metadata suffix) /// - link_dst: Optional file to link/copy the result to (without metadata suffix)
/// - linkable: Whether possible to link against file (eg it's a library) /// - linkable: Whether possible to link against file (eg it's a library)
pub fn target_filenames(&mut self, unit: &Unit<'a>) pub fn target_filenames(
-> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> { &mut self,
unit: &Unit<'a>,
) -> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> {
if let Some(cache) = self.target_filenames.get(unit) { if let Some(cache) = self.target_filenames.get(unit) {
return Ok(Arc::clone(cache)) return Ok(Arc::clone(cache));
} }
let result = self.calc_target_filenames(unit); let result = self.calc_target_filenames(unit);
@ -659,8 +680,10 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
result result
} }
fn calc_target_filenames(&mut self, unit: &Unit<'a>) fn calc_target_filenames(
-> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> { &mut self,
unit: &Unit<'a>,
) -> CargoResult<Arc<Vec<(PathBuf, Option<PathBuf>, TargetFileType)>>> {
let out_dir = self.out_dir(unit); let out_dir = self.out_dir(unit);
let stem = self.file_stem(unit); let stem = self.file_stem(unit);
let link_stem = self.link_stem(unit); let link_stem = self.link_stem(unit);
@ -675,13 +698,17 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
{ {
if unit.profile.check { if unit.profile.check {
let filename = out_dir.join(format!("lib{}.rmeta", stem)); let filename = out_dir.join(format!("lib{}.rmeta", stem));
let link_dst = link_stem.clone().map(|(ld, ls)| { let link_dst = link_stem
ld.join(format!("lib{}.rmeta", ls)) .clone()
}); .map(|(ld, ls)| ld.join(format!("lib{}.rmeta", ls)));
ret.push((filename, link_dst, TargetFileType::Linkable)); ret.push((filename, link_dst, TargetFileType::Linkable));
} else { } else {
let mut add = |crate_type: &str, file_type: TargetFileType| -> CargoResult<()> { let mut add = |crate_type: &str, file_type: TargetFileType| -> CargoResult<()> {
let crate_type = if crate_type == "lib" {"rlib"} else {crate_type}; let crate_type = if crate_type == "lib" {
"rlib"
} else {
crate_type
};
let mut crate_types = info.crate_types.borrow_mut(); let mut crate_types = info.crate_types.borrow_mut();
let entry = crate_types.entry(crate_type.to_string()); let entry = crate_types.entry(crate_type.to_string());
let crate_type_info = match entry { let crate_type_info = match entry {
@ -706,13 +733,19 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// "-" and "_". should_replace_hyphens is a flag to indicate that // "-" and "_". should_replace_hyphens is a flag to indicate that
// we need to convert the stem "web-stuff" to "web_stuff", so we // we need to convert the stem "web-stuff" to "web_stuff", so we
// won't miss "web_stuff.wasm". // won't miss "web_stuff.wasm".
let conv = |s: String| if should_replace_hyphens { let conv = |s: String| {
s.replace("-", "_") if should_replace_hyphens {
} else { s.replace("-", "_")
s } else {
s
}
}; };
let filename = let filename = out_dir.join(format!(
out_dir.join(format!("{}{}{}", prefix, conv(stem.clone()), suffix)); "{}{}{}",
prefix,
conv(stem.clone()),
suffix
));
let link_dst = link_stem.clone().map(|(ld, ls)| { let link_dst = link_stem.clone().map(|(ld, ls)| {
ld.join(format!("{}{}{}", prefix, conv(ls), suffix)) ld.join(format!("{}{}{}", prefix, conv(ls), suffix))
}); });
@ -729,26 +762,26 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}; };
//info!("{:?}", unit); //info!("{:?}", unit);
match *unit.target.kind() { match *unit.target.kind() {
TargetKind::Bin | TargetKind::Bin
TargetKind::CustomBuild | | TargetKind::CustomBuild
TargetKind::ExampleBin | | TargetKind::ExampleBin
TargetKind::Bench | | TargetKind::Bench
TargetKind::Test => { | TargetKind::Test => {
add("bin", TargetFileType::Normal)?; add("bin", TargetFileType::Normal)?;
} }
TargetKind::Lib(..) | TargetKind::Lib(..) | TargetKind::ExampleLib(..) if unit.profile.test => {
TargetKind::ExampleLib(..)
if unit.profile.test => {
add("bin", TargetFileType::Normal)?; add("bin", TargetFileType::Normal)?;
} }
TargetKind::ExampleLib(ref kinds) | TargetKind::ExampleLib(ref kinds) | TargetKind::Lib(ref kinds) => {
TargetKind::Lib(ref kinds) => {
for kind in kinds { for kind in kinds {
add(kind.crate_type(), if kind.linkable() { add(
TargetFileType::Linkable kind.crate_type(),
} else { if kind.linkable() {
TargetFileType::Normal TargetFileType::Linkable
})?; } else {
TargetFileType::Normal
},
)?;
} }
} }
} }
@ -756,13 +789,20 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
} }
if ret.is_empty() { if ret.is_empty() {
if !unsupported.is_empty() { if !unsupported.is_empty() {
bail!("cannot produce {} for `{}` as the target `{}` \ bail!(
does not support these crate types", "cannot produce {} for `{}` as the target `{}` \
unsupported.join(", "), unit.pkg, self.target_triple()) does not support these crate types",
unsupported.join(", "),
unit.pkg,
self.target_triple()
)
} }
bail!("cannot compile `{}` as the target `{}` does not \ bail!(
support any of the output crate types", "cannot compile `{}` as the target `{}` does not \
unit.pkg, self.target_triple()); support any of the output crate types",
unit.pkg,
self.target_triple()
);
} }
info!("Target filenames: {:?}", ret); info!("Target filenames: {:?}", ret);
@ -773,7 +813,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// for that package. /// for that package.
pub fn dep_targets(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> { pub fn dep_targets(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> {
if unit.profile.run_custom_build { if unit.profile.run_custom_build {
return self.dep_run_custom_build(unit) return self.dep_run_custom_build(unit);
} else if unit.profile.doc && !unit.profile.test { } else if unit.profile.doc && !unit.profile.test {
return self.doc_deps(unit); return self.doc_deps(unit);
} }
@ -781,61 +821,61 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
let id = unit.pkg.package_id(); let id = unit.pkg.package_id();
let deps = self.resolve.deps(id); let deps = self.resolve.deps(id);
let mut ret = deps.filter(|dep| { let mut ret = deps.filter(|dep| {
unit.pkg.dependencies().iter().filter(|d| { unit.pkg
d.name() == dep.name() && d.version_req().matches(dep.version()) .dependencies()
}).any(|d| { .iter()
// If this target is a build command, then we only want build .filter(|d| d.name() == dep.name() && d.version_req().matches(dep.version()))
// dependencies, otherwise we want everything *other than* build .any(|d| {
// dependencies. // If this target is a build command, then we only want build
if unit.target.is_custom_build() != d.is_build() { // dependencies, otherwise we want everything *other than* build
return false // dependencies.
} if unit.target.is_custom_build() != d.is_build() {
return false;
}
// If this dependency is *not* a transitive dependency, then it // If this dependency is *not* a transitive dependency, then it
// only applies to test/example targets // only applies to test/example targets
if !d.is_transitive() && !unit.target.is_test() && if !d.is_transitive() && !unit.target.is_test() && !unit.target.is_example()
!unit.target.is_example() && !unit.profile.test { && !unit.profile.test
return false {
} return false;
}
// If this dependency is only available for certain platforms, // If this dependency is only available for certain platforms,
// make sure we're only enabling it for that platform. // make sure we're only enabling it for that platform.
if !self.dep_platform_activated(d, unit.kind) { if !self.dep_platform_activated(d, unit.kind) {
return false return false;
} }
// If the dependency is optional, then we're only activating it // If the dependency is optional, then we're only activating it
// if the corresponding feature was activated // if the corresponding feature was activated
if d.is_optional() && !self.resolve.features(id).contains(&*d.name()) { if d.is_optional() && !self.resolve.features(id).contains(&*d.name()) {
return false; return false;
} }
// If we've gotten past all that, then this dependency is // If we've gotten past all that, then this dependency is
// actually used! // actually used!
true true
})
}).filter_map(|id| match self.get_package(id) {
Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
let unit = Unit {
pkg,
target: t,
profile: self.lib_or_check_profile(unit, t),
kind: unit.kind.for_target(t),
};
Ok(unit)
}),
Err(e) => Some(Err(e)),
}) })
}).filter_map(|id| { .collect::<CargoResult<Vec<_>>>()?;
match self.get_package(id) {
Ok(pkg) => {
pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
let unit = Unit {
pkg,
target: t,
profile: self.lib_or_check_profile(unit, t),
kind: unit.kind.for_target(t),
};
Ok(unit)
})
}
Err(e) => Some(Err(e))
}
}).collect::<CargoResult<Vec<_>>>()?;
// If this target is a build script, then what we've collected so far is // If this target is a build script, then what we've collected so far is
// all we need. If this isn't a build script, then it depends on the // all we need. If this isn't a build script, then it depends on the
// build script if there is one. // build script if there is one.
if unit.target.is_custom_build() { if unit.target.is_custom_build() {
return Ok(ret) return Ok(ret);
} }
ret.extend(self.dep_build_script(unit)); ret.extend(self.dep_build_script(unit));
@ -844,29 +884,32 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// didn't include `pkg` in the return values, so we need to special case // didn't include `pkg` in the return values, so we need to special case
// it here and see if we need to push `(pkg, pkg_lib_target)`. // it here and see if we need to push `(pkg, pkg_lib_target)`.
if unit.target.is_lib() && !unit.profile.doc { if unit.target.is_lib() && !unit.profile.doc {
return Ok(ret) return Ok(ret);
} }
ret.extend(self.maybe_lib(unit)); ret.extend(self.maybe_lib(unit));
// Integration tests/benchmarks require binaries to be built // Integration tests/benchmarks require binaries to be built
if unit.profile.test && if unit.profile.test && (unit.target.is_test() || unit.target.is_bench()) {
(unit.target.is_test() || unit.target.is_bench()) { ret.extend(
ret.extend(unit.pkg.targets().iter().filter(|t| { unit.pkg
let no_required_features = Vec::new(); .targets()
.iter()
.filter(|t| {
let no_required_features = Vec::new();
t.is_bin() && t.is_bin() &&
// Skip binaries with required features that have not been selected. // Skip binaries with required features that have not been selected.
t.required_features().unwrap_or(&no_required_features).iter().all(|f| { t.required_features().unwrap_or(&no_required_features).iter().all(|f| {
self.resolve.features(id).contains(f) self.resolve.features(id).contains(f)
}) })
}).map(|t| { })
Unit { .map(|t| Unit {
pkg: unit.pkg, pkg: unit.pkg,
target: t, target: t,
profile: self.lib_or_check_profile(unit, t), profile: self.lib_or_check_profile(unit, t),
kind: unit.kind.for_target(t), kind: unit.kind.for_target(t),
} }),
})); );
} }
Ok(ret) Ok(ret)
} }
@ -875,14 +918,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// ///
/// The `unit` provided must represent an execution of a build script, and /// The `unit` provided must represent an execution of a build script, and
/// the returned set of units must all be run before `unit` is run. /// the returned set of units must all be run before `unit` is run.
pub fn dep_run_custom_build(&self, unit: &Unit<'a>) pub fn dep_run_custom_build(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> {
-> CargoResult<Vec<Unit<'a>>> {
// If this build script's execution has been overridden then we don't // If this build script's execution has been overridden then we don't
// actually depend on anything, we've reached the end of the dependency // actually depend on anything, we've reached the end of the dependency
// chain as we've got all the info we're gonna get. // chain as we've got all the info we're gonna get.
let key = (unit.pkg.package_id().clone(), unit.kind); let key = (unit.pkg.package_id().clone(), unit.kind);
if self.build_script_overridden.contains(&key) { if self.build_script_overridden.contains(&key) {
return Ok(Vec::new()) return Ok(Vec::new());
} }
// When not overridden, then the dependencies to run a build script are: // When not overridden, then the dependencies to run a build script are:
@ -890,42 +932,47 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// 1. Compiling the build script itself // 1. Compiling the build script itself
// 2. For each immediate dependency of our package which has a `links` // 2. For each immediate dependency of our package which has a `links`
// key, the execution of that build script. // key, the execution of that build script.
let not_custom_build = unit.pkg.targets().iter().find(|t| { let not_custom_build = unit.pkg
!t.is_custom_build() .targets()
}).unwrap(); .iter()
.find(|t| !t.is_custom_build())
.unwrap();
let tmp = Unit { let tmp = Unit {
target: not_custom_build, target: not_custom_build,
profile: &self.profiles.dev, profile: &self.profiles.dev,
..*unit ..*unit
}; };
let deps = self.dep_targets(&tmp)?; let deps = self.dep_targets(&tmp)?;
Ok(deps.iter().filter_map(|unit| { Ok(deps.iter()
if !unit.target.linkable() || unit.pkg.manifest().links().is_none() { .filter_map(|unit| {
return None if !unit.target.linkable() || unit.pkg.manifest().links().is_none() {
} return None;
self.dep_build_script(unit) }
}).chain(Some(Unit { self.dep_build_script(unit)
profile: self.build_script_profile(unit.pkg.package_id()), })
kind: Kind::Host, // build scripts always compiled for the host .chain(Some(Unit {
..*unit profile: self.build_script_profile(unit.pkg.package_id()),
})).collect()) kind: Kind::Host, // build scripts always compiled for the host
..*unit
}))
.collect())
} }
/// Returns the dependencies necessary to document a package /// Returns the dependencies necessary to document a package
fn doc_deps(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> { fn doc_deps(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> {
let deps = self.resolve.deps(unit.pkg.package_id()).filter(|dep| { let deps = self.resolve
unit.pkg.dependencies().iter().filter(|d| { .deps(unit.pkg.package_id())
d.name() == dep.name() .filter(|dep| {
}).any(|dep| { unit.pkg
match dep.kind() { .dependencies()
DepKind::Normal => self.dep_platform_activated(dep, .iter()
unit.kind), .filter(|d| d.name() == dep.name())
_ => false, .any(|dep| match dep.kind() {
} DepKind::Normal => self.dep_platform_activated(dep, unit.kind),
_ => false,
})
}) })
}).map(|dep| { .map(|dep| self.get_package(dep));
self.get_package(dep)
});
// To document a library, we depend on dependencies actually being // To document a library, we depend on dependencies actually being
// built. If we're documenting *all* libraries, then we also depend on // built. If we're documenting *all* libraries, then we also depend on
@ -971,25 +1018,29 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// of work is still returned. `None` is only returned if the package has no /// of work is still returned. `None` is only returned if the package has no
/// build script. /// build script.
fn dep_build_script(&self, unit: &Unit<'a>) -> Option<Unit<'a>> { fn dep_build_script(&self, unit: &Unit<'a>) -> Option<Unit<'a>> {
unit.pkg.targets().iter().find(|t| t.is_custom_build()).map(|t| { unit.pkg
Unit { .targets()
.iter()
.find(|t| t.is_custom_build())
.map(|t| Unit {
pkg: unit.pkg, pkg: unit.pkg,
target: t, target: t,
profile: &self.profiles.custom_build, profile: &self.profiles.custom_build,
kind: unit.kind, kind: unit.kind,
} })
})
} }
fn maybe_lib(&self, unit: &Unit<'a>) -> Option<Unit<'a>> { fn maybe_lib(&self, unit: &Unit<'a>) -> Option<Unit<'a>> {
unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| { unit.pkg
Unit { .targets()
.iter()
.find(|t| t.linkable())
.map(|t| Unit {
pkg: unit.pkg, pkg: unit.pkg,
target: t, target: t,
profile: self.lib_or_check_profile(unit, t), profile: self.lib_or_check_profile(unit, t),
kind: unit.kind.for_target(t), kind: unit.kind.for_target(t),
} })
})
} }
fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool {
@ -1039,7 +1090,9 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
} }
/// Number of jobs specified for this build /// Number of jobs specified for this build
pub fn jobs(&self) -> u32 { self.build_config.jobs } pub fn jobs(&self) -> u32 {
self.build_config.jobs
}
pub fn lib_profile(&self) -> &'a Profile { pub fn lib_profile(&self) -> &'a Profile {
let (normal, test) = if self.build_config.release { let (normal, test) = if self.build_config.release {
@ -1056,8 +1109,9 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
pub fn lib_or_check_profile(&self, unit: &Unit, target: &Target) -> &'a Profile { pub fn lib_or_check_profile(&self, unit: &Unit, target: &Target) -> &'a Profile {
if !target.is_custom_build() && !target.for_host() if !target.is_custom_build() && !target.for_host()
&& (unit.profile.check || (unit.profile.doc && !unit.profile.test)) { && (unit.profile.check || (unit.profile.doc && !unit.profile.test))
return &self.profiles.check {
return &self.profiles.check;
} }
self.lib_profile() self.lib_profile()
} }
@ -1098,7 +1152,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
}; };
if !incremental { if !incremental {
return Ok(Vec::new()) return Ok(Vec::new());
} }
// Only enable incremental compilation for sources the user can // Only enable incremental compilation for sources the user can
@ -1108,22 +1162,31 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// //
// (see also https://github.com/rust-lang/cargo/issues/3972) // (see also https://github.com/rust-lang/cargo/issues/3972)
if !unit.pkg.package_id().source_id().is_path() { if !unit.pkg.package_id().source_id().is_path() {
return Ok(Vec::new()) return Ok(Vec::new());
} }
let dir = self.layout(unit.kind).incremental().display(); let dir = self.layout(unit.kind).incremental().display();
Ok(vec![ Ok(vec!["-C".to_string(), format!("incremental={}", dir)])
"-C".to_string(),
format!("incremental={}", dir),
])
} }
pub fn rustflags_args(&self, unit: &Unit) -> CargoResult<Vec<String>> { pub fn rustflags_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
env_args(self.config, &self.build_config, self.info(&unit.kind), unit.kind, "RUSTFLAGS") env_args(
self.config,
&self.build_config,
self.info(&unit.kind),
unit.kind,
"RUSTFLAGS",
)
} }
pub fn rustdocflags_args(&self, unit: &Unit) -> CargoResult<Vec<String>> { pub fn rustdocflags_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
env_args(self.config, &self.build_config, self.info(&unit.kind), unit.kind, "RUSTDOCFLAGS") env_args(
self.config,
&self.build_config,
self.info(&unit.kind),
unit.kind,
"RUSTDOCFLAGS",
)
} }
pub fn show_warnings(&self, pkg: &PackageId) -> bool { pub fn show_warnings(&self, pkg: &PackageId) -> bool {
@ -1155,11 +1218,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
/// ///
/// Note that if a `target` is specified, no args will be passed to host code (plugins, build /// Note that if a `target` is specified, no args will be passed to host code (plugins, build
/// scripts, ...), even if it is the same as the target. /// scripts, ...), even if it is the same as the target.
fn env_args(config: &Config, fn env_args(
build_config: &BuildConfig, config: &Config,
target_info: &TargetInfo, build_config: &BuildConfig,
kind: Kind, target_info: &TargetInfo,
name: &str) -> CargoResult<Vec<String>> { kind: Kind,
name: &str,
) -> CargoResult<Vec<String>> {
// We *want* to apply RUSTFLAGS only to builds for the // We *want* to apply RUSTFLAGS only to builds for the
// requested target architecture, and not to things like build // requested target architecture, and not to things like build
// scripts and plugins, which may be for an entirely different // scripts and plugins, which may be for an entirely different
@ -1200,9 +1265,14 @@ fn env_args(config: &Config,
let mut rustflags = Vec::new(); let mut rustflags = Vec::new();
let name = name.chars().flat_map(|c| c.to_lowercase()).collect::<String>(); let name = name.chars()
.flat_map(|c| c.to_lowercase())
.collect::<String>();
// Then the target.*.rustflags value... // Then the target.*.rustflags value...
let target = build_config.requested_target.as_ref().unwrap_or(&build_config.host_triple); let target = build_config
.requested_target
.as_ref()
.unwrap_or(&build_config.host_triple);
let key = format!("target.{}.{}", target, name); let key = format!("target.{}.{}", target, name);
if let Some(args) = config.get_list_or_split_string(&key)? { if let Some(args) = config.get_list_or_split_string(&key)? {
let args = args.val.into_iter(); let args = args.val.into_iter();
@ -1214,9 +1284,13 @@ fn env_args(config: &Config,
let cfgs = table.val.keys().filter_map(|t| { let cfgs = table.val.keys().filter_map(|t| {
if t.starts_with("cfg(") && t.ends_with(')') { if t.starts_with("cfg(") && t.ends_with(')') {
let cfg = &t[4..t.len() - 1]; let cfg = &t[4..t.len() - 1];
CfgExpr::from_str(cfg) CfgExpr::from_str(cfg).ok().and_then(|c| {
.ok() if c.matches(target_cfg) {
.and_then(|c| if c.matches(target_cfg) { Some(t) } else { None }) Some(t)
} else {
None
}
})
} else { } else {
None None
} }
@ -1275,24 +1349,28 @@ fn parse_crate_type(
lines: &mut str::Lines, lines: &mut str::Lines,
) -> CargoResult<Option<(String, String)>> { ) -> CargoResult<Option<(String, String)>> {
let not_supported = error.lines().any(|line| { let not_supported = error.lines().any(|line| {
(line.contains("unsupported crate type") || (line.contains("unsupported crate type") || line.contains("unknown crate type"))
line.contains("unknown crate type")) && && line.contains(crate_type)
line.contains(crate_type)
}); });
if not_supported { if not_supported {
return Ok(None); return Ok(None);
} }
let line = match lines.next() { let line = match lines.next() {
Some(line) => line, Some(line) => line,
None => bail!("malformed output when learning about \ None => bail!(
crate-type {} information", crate_type), "malformed output when learning about \
crate-type {} information",
crate_type
),
}; };
let mut parts = line.trim().split("___"); let mut parts = line.trim().split("___");
let prefix = parts.next().unwrap(); let prefix = parts.next().unwrap();
let suffix = match parts.next() { let suffix = match parts.next() {
Some(part) => part, Some(part) => part,
None => bail!("output of --print=file-names has changed in \ None => bail!(
the compiler, cannot parse"), "output of --print=file-names has changed in \
the compiler, cannot parse"
),
}; };
Ok(Some((prefix.to_string(), suffix.to_string()))) Ok(Some((prefix.to_string(), suffix.to_string())))
@ -1313,16 +1391,14 @@ fn add_target_specific_suffixes(
let mut ret = vec![(suffix.to_string(), file_type, false)]; let mut ret = vec![(suffix.to_string(), file_type, false)];
// rust-lang/cargo#4500 // rust-lang/cargo#4500
if target_triple.ends_with("pc-windows-msvc") && crate_type.ends_with("dylib") && if target_triple.ends_with("pc-windows-msvc") && crate_type.ends_with("dylib")
suffix == ".dll" && suffix == ".dll"
{ {
ret.push((".dll.lib".to_string(), TargetFileType::Normal, false)); ret.push((".dll.lib".to_string(), TargetFileType::Normal, false));
} }
// rust-lang/cargo#4535 // rust-lang/cargo#4535
if target_triple.starts_with("wasm32-") && crate_type == "bin" && if target_triple.starts_with("wasm32-") && crate_type == "bin" && suffix == ".js" {
suffix == ".js"
{
ret.push((".wasm".to_string(), TargetFileType::Normal, true)); ret.push((".wasm".to_string(), TargetFileType::Normal, true));
} }

View file

@ -1,17 +1,17 @@
use std::collections::{HashMap, BTreeSet, HashSet}; use std::collections::{BTreeSet, HashMap, HashSet};
use std::fs; use std::fs;
use std::path::{PathBuf, Path}; use std::path::{Path, PathBuf};
use std::str; use std::str;
use std::sync::{Mutex, Arc}; use std::sync::{Arc, Mutex};
use core::PackageId; use core::PackageId;
use util::{Freshness, Cfg}; use util::{Cfg, Freshness};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
use util::{self, internal, profile, paths}; use util::{self, internal, paths, profile};
use util::machine_message; use util::machine_message;
use super::job::Work; use super::job::Work;
use super::{fingerprint, Kind, Context, Unit}; use super::{fingerprint, Context, Kind, Unit};
/// Contains the parsed output of a custom build script. /// Contains the parsed output of a custom build script.
#[derive(Clone, Debug, Hash)] #[derive(Clone, Debug, Hash)]
@ -75,10 +75,15 @@ pub struct BuildDeps {
/// prepare work for. If the requirement is specified as both the target and the /// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is /// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice). /// only run once (not twice).
pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) pub fn prepare<'a, 'cfg>(
-> CargoResult<(Work, Work, Freshness)> { cx: &mut Context<'a, 'cfg>,
let _p = profile::start(format!("build script prepare: {}/{}", unit: &Unit<'a>,
unit.pkg, unit.target.name())); ) -> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!(
"build script prepare: {}/{}",
unit.pkg,
unit.target.name()
));
let key = (unit.pkg.package_id().clone(), unit.kind); let key = (unit.pkg.package_id().clone(), unit.kind);
let overridden = cx.build_script_overridden.contains(&key); let overridden = cx.build_script_overridden.contains(&key);
@ -90,18 +95,17 @@ pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// Now that we've prep'd our work, build the work needed to manage the // Now that we've prep'd our work, build the work needed to manage the
// fingerprint and then start returning that upwards. // fingerprint and then start returning that upwards.
let (freshness, dirty, fresh) = let (freshness, dirty, fresh) = fingerprint::prepare_build_cmd(cx, unit)?;
fingerprint::prepare_build_cmd(cx, unit)?;
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness)) Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
} }
fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work)> {
-> CargoResult<(Work, Work)> {
let dependencies = cx.dep_run_custom_build(unit)?; let dependencies = cx.dep_run_custom_build(unit)?;
let build_script_unit = dependencies.iter().find(|d| { let build_script_unit = dependencies
!d.profile.run_custom_build && d.target.is_custom_build() .iter()
}).expect("running a script not depending on an actual script"); .find(|d| !d.profile.run_custom_build && d.target.is_custom_build())
.expect("running a script not depending on an actual script");
let script_output = cx.build_script_dir(build_script_unit); let script_output = cx.build_script_dir(build_script_unit);
let build_output = cx.build_script_out_dir(unit); let build_output = cx.build_script_out_dir(unit);
@ -116,19 +120,29 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let to_exec = to_exec.into_os_string(); let to_exec = to_exec.into_os_string();
let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?; let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?;
cmd.env("OUT_DIR", &build_output) cmd.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", unit.pkg.root()) .env("CARGO_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string()) .env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET", &match unit.kind { .env(
Kind::Host => cx.host_triple(), "TARGET",
Kind::Target => cx.target_triple(), &match unit.kind {
}) Kind::Host => cx.host_triple(),
.env("DEBUG", &profile.debuginfo.is_some().to_string()) Kind::Target => cx.target_triple(),
.env("OPT_LEVEL", &profile.opt_level) },
.env("PROFILE", if cx.build_config.release { "release" } else { "debug" }) )
.env("HOST", cx.host_triple()) .env("DEBUG", &profile.debuginfo.is_some().to_string())
.env("RUSTC", &cx.config.rustc()?.path) .env("OPT_LEVEL", &profile.opt_level)
.env("RUSTDOC", &*cx.config.rustdoc()?) .env(
.inherit_jobserver(&cx.jobserver); "PROFILE",
if cx.build_config.release {
"release"
} else {
"debug"
},
)
.env("HOST", cx.host_triple())
.env("RUSTC", &cx.config.rustc()?.path)
.env("RUSTDOC", &*cx.config.rustdoc()?)
.inherit_jobserver(&cx.jobserver);
if let Some(links) = unit.pkg.manifest().links() { if let Some(links) = unit.pkg.manifest().links() {
cmd.env("CARGO_MANIFEST_LINKS", links); cmd.env("CARGO_MANIFEST_LINKS", links);
@ -143,9 +157,13 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let mut cfg_map = HashMap::new(); let mut cfg_map = HashMap::new();
for cfg in cx.cfg(unit.kind) { for cfg in cx.cfg(unit.kind) {
match *cfg { match *cfg {
Cfg::Name(ref n) => { cfg_map.insert(n.clone(), None); } Cfg::Name(ref n) => {
cfg_map.insert(n.clone(), None);
}
Cfg::KeyPair(ref k, ref v) => { Cfg::KeyPair(ref k, ref v) => {
if let Some(ref mut values) = *cfg_map.entry(k.clone()).or_insert_with(||Some(Vec::new())) { if let Some(ref mut values) =
*cfg_map.entry(k.clone()).or_insert_with(|| Some(Vec::new()))
{
values.push(v.clone()) values.push(v.clone())
} }
} }
@ -154,8 +172,12 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
for (k, v) in cfg_map { for (k, v) in cfg_map {
let k = format!("CARGO_CFG_{}", super::envify(&k)); let k = format!("CARGO_CFG_{}", super::envify(&k));
match v { match v {
Some(list) => { cmd.env(&k, list.join(",")); } Some(list) => {
None => { cmd.env(&k, ""); } cmd.env(&k, list.join(","));
}
None => {
cmd.env(&k, "");
}
} }
} }
@ -165,14 +187,19 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// This information will be used at build-time later on to figure out which // This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time. // sorts of variables need to be discovered at that time.
let lib_deps = { let lib_deps = {
dependencies.iter().filter_map(|unit| { dependencies
if unit.profile.run_custom_build { .iter()
Some((unit.pkg.manifest().links().unwrap().to_string(), .filter_map(|unit| {
unit.pkg.package_id().clone())) if unit.profile.run_custom_build {
} else { Some((
None unit.pkg.manifest().links().unwrap().to_string(),
} unit.pkg.package_id().clone(),
}).collect::<Vec<_>>() ))
} else {
None
}
})
.collect::<Vec<_>>()
}; };
let pkg_name = unit.pkg.to_string(); let pkg_name = unit.pkg.to_string();
let build_state = Arc::clone(&cx.build_state); let build_state = Arc::clone(&cx.build_state);
@ -185,8 +212,13 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
(output_file, err_file, root_output_file) (output_file, err_file, root_output_file)
}; };
let root_output = cx.target_root().to_path_buf(); let root_output = cx.target_root().to_path_buf();
let all = (id.clone(), pkg_name.clone(), Arc::clone(&build_state), let all = (
output_file.clone(), root_output.clone()); id.clone(),
pkg_name.clone(),
Arc::clone(&build_state),
output_file.clone(),
root_output.clone(),
);
let build_scripts = super::load_build_deps(cx, unit); let build_scripts = super::load_build_deps(cx, unit);
let kind = unit.kind; let kind = unit.kind;
let json_messages = cx.build_config.json_messages; let json_messages = cx.build_config.json_messages;
@ -196,12 +228,8 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let prev_root_output = paths::read_bytes(&root_output_file) let prev_root_output = paths::read_bytes(&root_output_file)
.and_then(|bytes| util::bytes2path(&bytes)) .and_then(|bytes| util::bytes2path(&bytes))
.unwrap_or_else(|_| cmd.get_cwd().unwrap().to_path_buf()); .unwrap_or_else(|_| cmd.get_cwd().unwrap().to_path_buf());
let prev_output = BuildOutput::parse_file( let prev_output =
&output_file, BuildOutput::parse_file(&output_file, &pkg_name, &prev_root_output, &root_output).ok();
&pkg_name,
&prev_root_output,
&root_output,
).ok();
let deps = BuildDeps::new(&output_file, prev_output.as_ref()); let deps = BuildDeps::new(&output_file, prev_output.as_ref());
cx.build_explicit_deps.insert(*unit, deps); cx.build_explicit_deps.insert(*unit, deps);
@ -220,8 +248,10 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// otherwise create it! // otherwise create it!
if fs::metadata(&build_output).is_err() { if fs::metadata(&build_output).is_err() {
fs::create_dir(&build_output).chain_err(|| { fs::create_dir(&build_output).chain_err(|| {
internal("failed to create script output directory for \ internal(
build command") "failed to create script output directory for \
build command",
)
})?; })?;
} }
@ -234,35 +264,45 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
for (name, id) in lib_deps { for (name, id) in lib_deps {
let key = (id.clone(), kind); let key = (id.clone(), kind);
let state = build_state.get(&key).ok_or_else(|| { let state = build_state.get(&key).ok_or_else(|| {
internal(format!("failed to locate build state for env \ internal(format!(
vars: {}/{:?}", id, kind)) "failed to locate build state for env \
vars: {}/{:?}",
id, kind
))
})?; })?;
let data = &state.metadata; let data = &state.metadata;
for &(ref key, ref value) in data.iter() { for &(ref key, ref value) in data.iter() {
cmd.env(&format!("DEP_{}_{}", super::envify(&name), cmd.env(
super::envify(key)), value); &format!("DEP_{}_{}", super::envify(&name), super::envify(key)),
value,
);
} }
} }
if let Some(build_scripts) = build_scripts { if let Some(build_scripts) = build_scripts {
super::add_plugin_deps(&mut cmd, &build_state, super::add_plugin_deps(&mut cmd, &build_state, &build_scripts, &root_output)?;
&build_scripts,
&root_output)?;
} }
} }
// And now finally, run the build command itself! // And now finally, run the build command itself!
state.running(&cmd); state.running(&cmd);
let output = cmd.exec_with_streaming( let output = cmd.exec_with_streaming(
&mut |out_line| { state.stdout(out_line); Ok(()) }, &mut |out_line| {
&mut |err_line| { state.stderr(err_line); Ok(()) }, state.stdout(out_line);
Ok(())
},
&mut |err_line| {
state.stderr(err_line);
Ok(())
},
true, true,
).map_err(|e| { ).map_err(|e| {
format_err!("failed to run custom build command for `{}`\n{}", format_err!(
pkg_name, e) "failed to run custom build command for `{}`\n{}",
pkg_name,
e
)
})?; })?;
// After the build command has finished running, we need to be sure to // After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it // remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness). // was, even if we don't run the build command again (due to freshness).
@ -273,17 +313,15 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
paths::write(&output_file, &output.stdout)?; paths::write(&output_file, &output.stdout)?;
paths::write(&err_file, &output.stderr)?; paths::write(&err_file, &output.stderr)?;
paths::write(&root_output_file, util::path2bytes(&root_output)?)?; paths::write(&root_output_file, util::path2bytes(&root_output)?)?;
let parsed_output = BuildOutput::parse( let parsed_output =
&output.stdout, BuildOutput::parse(&output.stdout, &pkg_name, &root_output, &root_output)?;
&pkg_name,
&root_output,
&root_output,
)?;
if json_messages { if json_messages {
let library_paths = parsed_output.library_paths.iter().map(|l| { let library_paths = parsed_output
l.display().to_string() .library_paths
}).collect::<Vec<_>>(); .iter()
.map(|l| l.display().to_string())
.collect::<Vec<_>>();
machine_message::emit(&machine_message::BuildScript { machine_message::emit(&machine_message::BuildScript {
package_id: &id, package_id: &id,
linked_libs: &parsed_output.library_links, linked_libs: &parsed_output.library_links,
@ -305,12 +343,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
let output = match prev_output { let output = match prev_output {
Some(output) => output, Some(output) => output,
None => { None => {
BuildOutput::parse_file( BuildOutput::parse_file(&output_file, &pkg_name, &prev_root_output, &root_output)?
&output_file,
&pkg_name,
&prev_root_output,
&root_output,
)?
} }
}; };
build_state.insert(id, kind, output); build_state.insert(id, kind, output);
@ -340,20 +373,24 @@ impl BuildState {
} }
impl BuildOutput { impl BuildOutput {
pub fn parse_file(path: &Path, pub fn parse_file(
pkg_name: &str, path: &Path,
root_output_when_generated: &Path, pkg_name: &str,
root_output: &Path) -> CargoResult<BuildOutput> { root_output_when_generated: &Path,
root_output: &Path,
) -> CargoResult<BuildOutput> {
let contents = paths::read_bytes(path)?; let contents = paths::read_bytes(path)?;
BuildOutput::parse(&contents, pkg_name, root_output_when_generated, root_output) BuildOutput::parse(&contents, pkg_name, root_output_when_generated, root_output)
} }
// Parses the output of a script. // Parses the output of a script.
// The `pkg_name` is used for error messages. // The `pkg_name` is used for error messages.
pub fn parse(input: &[u8], pub fn parse(
pkg_name: &str, input: &[u8],
root_output_when_generated: &Path, pkg_name: &str,
root_output: &Path) -> CargoResult<BuildOutput> { root_output_when_generated: &Path,
root_output: &Path,
) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new(); let mut library_paths = Vec::new();
let mut library_links = Vec::new(); let mut library_links = Vec::new();
let mut cfgs = Vec::new(); let mut cfgs = Vec::new();
@ -376,7 +413,7 @@ impl BuildOutput {
} }
let data = match iter.next() { let data = match iter.next() {
Some(val) => val, Some(val) => val,
None => continue None => continue,
}; };
// getting the `key=value` part of the line // getting the `key=value` part of the line
@ -389,17 +426,14 @@ impl BuildOutput {
_ => bail!("Wrong output in {}: `{}`", whence, line), _ => bail!("Wrong output in {}: `{}`", whence, line),
}; };
let path = |val: &str| { let path = |val: &str| match Path::new(val).strip_prefix(root_output_when_generated) {
match Path::new(val).strip_prefix(root_output_when_generated) { Ok(path) => root_output.join(path),
Ok(path) => root_output.join(path), Err(_) => PathBuf::from(val),
Err(_) => PathBuf::from(val),
}
}; };
match key { match key {
"rustc-flags" => { "rustc-flags" => {
let (paths, links) = let (paths, links) = BuildOutput::parse_rustc_flags(value, &whence)?;
BuildOutput::parse_rustc_flags(value, &whence)?;
library_links.extend(links.into_iter()); library_links.extend(links.into_iter());
library_paths.extend(paths.into_iter()); library_paths.extend(paths.into_iter());
} }
@ -426,35 +460,43 @@ impl BuildOutput {
}) })
} }
pub fn parse_rustc_flags(value: &str, whence: &str) pub fn parse_rustc_flags(
-> CargoResult<(Vec<PathBuf>, Vec<String>)> { value: &str,
whence: &str,
) -> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim(); let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace()) let mut flags_iter = value
.filter(|w| w.chars().any(|c| !c.is_whitespace())); .split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c| !c.is_whitespace()));
let (mut library_paths, mut library_links) = (Vec::new(), Vec::new()); let (mut library_paths, mut library_links) = (Vec::new(), Vec::new());
while let Some(flag) = flags_iter.next() { while let Some(flag) = flags_iter.next() {
if flag != "-l" && flag != "-L" { if flag != "-l" && flag != "-L" {
bail!("Only `-l` and `-L` flags are allowed in {}: `{}`", bail!(
whence, value) "Only `-l` and `-L` flags are allowed in {}: `{}`",
whence,
value
)
} }
let value = match flags_iter.next() { let value = match flags_iter.next() {
Some(v) => v, Some(v) => v,
None => bail!("Flag in rustc-flags has no value in {}: `{}`", None => bail!(
whence, value) "Flag in rustc-flags has no value in {}: `{}`",
whence,
value
),
}; };
match flag { match flag {
"-l" => library_links.push(value.to_string()), "-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)), "-L" => library_paths.push(PathBuf::from(value)),
// was already checked above // was already checked above
_ => bail!("only -l and -L flags are allowed") _ => bail!("only -l and -L flags are allowed"),
}; };
} }
Ok((library_paths, library_links)) Ok((library_paths, library_links))
} }
pub fn parse_rustc_env(value: &str, whence: &str) pub fn parse_rustc_env(value: &str, whence: &str) -> CargoResult<(String, String)> {
-> CargoResult<(String, String)> {
let mut iter = value.splitn(2, '='); let mut iter = value.splitn(2, '=');
let name = iter.next(); let name = iter.next();
let val = iter.next(); let val = iter.next();
@ -469,12 +511,14 @@ impl BuildDeps {
pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps { pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps {
BuildDeps { BuildDeps {
build_script_output: output_file.to_path_buf(), build_script_output: output_file.to_path_buf(),
rerun_if_changed: output.map(|p| &p.rerun_if_changed) rerun_if_changed: output
.cloned() .map(|p| &p.rerun_if_changed)
.unwrap_or_default(), .cloned()
rerun_if_env_changed: output.map(|p| &p.rerun_if_env_changed) .unwrap_or_default(),
.cloned() rerun_if_env_changed: output
.unwrap_or_default(), .map(|p| &p.rerun_if_env_changed)
.cloned()
.unwrap_or_default(),
} }
} }
} }
@ -488,32 +532,33 @@ impl BuildDeps {
/// ///
/// The given set of targets to this function is the initial set of /// The given set of targets to this function is the initial set of
/// targets/profiles which are being built. /// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CargoResult<()> {
units: &[Unit<'b>])
-> CargoResult<()> {
let mut ret = HashMap::new(); let mut ret = HashMap::new();
for unit in units { for unit in units {
build(&mut ret, cx, unit)?; build(&mut ret, cx, unit)?;
} }
cx.build_scripts.extend(ret.into_iter().map(|(k, v)| { cx.build_scripts
(k, Arc::new(v)) .extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v))));
}));
return Ok(()); return Ok(());
// Recursive function to build up the map we're constructing. This function // Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along. // memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<Unit<'b>, BuildScripts>, fn build<'a, 'b, 'cfg>(
cx: &mut Context<'b, 'cfg>, out: &'a mut HashMap<Unit<'b>, BuildScripts>,
unit: &Unit<'b>) cx: &mut Context<'b, 'cfg>,
-> CargoResult<&'a BuildScripts> { unit: &Unit<'b>,
) -> CargoResult<&'a BuildScripts> {
// Do a quick pre-flight check to see if we've already calculated the // Do a quick pre-flight check to see if we've already calculated the
// set of dependencies. // set of dependencies.
if out.contains_key(unit) { if out.contains_key(unit) {
return Ok(&out[unit]) return Ok(&out[unit]);
} }
{ {
let key = unit.pkg.manifest().links().map(|l| (l.to_string(), unit.kind)); let key = unit.pkg
.manifest()
.links()
.map(|l| (l.to_string(), unit.kind));
let build_state = &cx.build_state; let build_state = &cx.build_state;
if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) { if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) {
let key = (unit.pkg.package_id().clone(), unit.kind); let key = (unit.pkg.package_id().clone(), unit.kind);
@ -543,8 +588,8 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
let dep_scripts = build(out, cx, unit)?; let dep_scripts = build(out, cx, unit)?;
if unit.target.for_host() { if unit.target.for_host() {
ret.plugins.extend(dep_scripts.to_link.iter() ret.plugins
.map(|p| &p.0).cloned()); .extend(dep_scripts.to_link.iter().map(|p| &p.0).cloned());
} else if unit.target.linkable() { } else if unit.target.linkable() {
for &(ref pkg, kind) in dep_scripts.to_link.iter() { for &(ref pkg, kind) in dep_scripts.to_link.iter() {
add_to_link(&mut ret, pkg, kind); add_to_link(&mut ret, pkg, kind);

View file

@ -11,12 +11,12 @@ use serde_json;
use core::{Epoch, Package, TargetKind}; use core::{Epoch, Package, TargetKind};
use util; use util;
use util::{Fresh, Dirty, Freshness, internal, profile}; use util::{internal, profile, Dirty, Fresh, Freshness};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
use util::paths; use util::paths;
use super::job::Work; use super::job::Work;
use super::context::{Context, Unit, TargetFileType}; use super::context::{Context, TargetFileType, Unit};
use super::custom_build::BuildDeps; use super::custom_build::BuildDeps;
/// A tuple result of the `prepare_foo` functions in this module. /// A tuple result of the `prepare_foo` functions in this module.
@ -47,10 +47,15 @@ pub type Preparation = (Freshness, Work, Work);
/// This function will calculate the fingerprint for a target and prepare the /// This function will calculate the fingerprint for a target and prepare the
/// work necessary to either write the fingerprint or copy over all fresh files /// work necessary to either write the fingerprint or copy over all fresh files
/// from the old directories to their new locations. /// from the old directories to their new locations.
pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, pub fn prepare_target<'a, 'cfg>(
unit: &Unit<'a>) -> CargoResult<Preparation> { cx: &mut Context<'a, 'cfg>,
let _p = profile::start(format!("fingerprint: {} / {}", unit: &Unit<'a>,
unit.pkg.package_id(), unit.target.name())); ) -> CargoResult<Preparation> {
let _p = profile::start(format!(
"fingerprint: {} / {}",
unit.pkg.package_id(),
unit.target.name()
));
let new = cx.fingerprint_dir(unit); let new = cx.fingerprint_dir(unit);
let loc = new.join(&filename(cx, unit)); let loc = new.join(&filename(cx, unit));
@ -73,9 +78,9 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
if compare.is_err() { if compare.is_err() {
let source_id = unit.pkg.package_id().source_id(); let source_id = unit.pkg.package_id().source_id();
let sources = cx.packages.sources(); let sources = cx.packages.sources();
let source = sources.get(source_id).ok_or_else(|| { let source = sources
internal("missing package source") .get(source_id)
})?; .ok_or_else(|| internal("missing package source"))?;
source.verify(unit.pkg.package_id())?; source.verify(unit.pkg.package_id())?;
} }
@ -83,7 +88,8 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
let mut missing_outputs = false; let mut missing_outputs = false;
if unit.profile.doc { if unit.profile.doc {
missing_outputs = !root.join(unit.target.crate_name()) missing_outputs = !root.join(unit.target.crate_name())
.join("index.html").exists(); .join("index.html")
.exists();
} else { } else {
for &(ref src, ref link_dst, file_type) in cx.target_filenames(unit)?.iter() { for &(ref src, ref link_dst, file_type) in cx.target_filenames(unit)?.iter() {
if file_type == TargetFileType::DebugInfo { if file_type == TargetFileType::DebugInfo {
@ -102,13 +108,17 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
match fingerprint.update_local(&target_root) { match fingerprint.update_local(&target_root) {
Ok(()) => {} Ok(()) => {}
Err(..) if allow_failure => return Ok(()), Err(..) if allow_failure => return Ok(()),
Err(e) => return Err(e) Err(e) => return Err(e),
} }
write_fingerprint(&loc, &*fingerprint) write_fingerprint(&loc, &*fingerprint)
}); });
let fresh = compare.is_ok() && !missing_outputs; let fresh = compare.is_ok() && !missing_outputs;
Ok((if fresh {Fresh} else {Dirty}, write_fingerprint, Work::noop())) Ok((
if fresh { Fresh } else { Dirty },
write_fingerprint,
Work::noop(),
))
} }
/// A fingerprint can be considered to be a "short string" representing the /// A fingerprint can be considered to be a "short string" representing the
@ -142,39 +152,46 @@ pub struct Fingerprint {
#[serde(serialize_with = "serialize_deps", deserialize_with = "deserialize_deps")] #[serde(serialize_with = "serialize_deps", deserialize_with = "deserialize_deps")]
deps: Vec<(String, Arc<Fingerprint>)>, deps: Vec<(String, Arc<Fingerprint>)>,
local: Vec<LocalFingerprint>, local: Vec<LocalFingerprint>,
#[serde(skip_serializing, skip_deserializing)] #[serde(skip_serializing, skip_deserializing)] memoized_hash: Mutex<Option<u64>>,
memoized_hash: Mutex<Option<u64>>,
rustflags: Vec<String>, rustflags: Vec<String>,
epoch: Epoch, epoch: Epoch,
} }
fn serialize_deps<S>(deps: &[(String, Arc<Fingerprint>)], ser: S) fn serialize_deps<S>(deps: &[(String, Arc<Fingerprint>)], ser: S) -> Result<S::Ok, S::Error>
-> Result<S::Ok, S::Error> where
where S: ser::Serializer, S: ser::Serializer,
{ {
deps.iter().map(|&(ref a, ref b)| { deps.iter()
(a, b.hash()) .map(|&(ref a, ref b)| (a, b.hash()))
}).collect::<Vec<_>>().serialize(ser) .collect::<Vec<_>>()
.serialize(ser)
} }
fn deserialize_deps<'de, D>(d: D) -> Result<Vec<(String, Arc<Fingerprint>)>, D::Error> fn deserialize_deps<'de, D>(d: D) -> Result<Vec<(String, Arc<Fingerprint>)>, D::Error>
where D: de::Deserializer<'de>, where
D: de::Deserializer<'de>,
{ {
let decoded = <Vec<(String, u64)>>::deserialize(d)?; let decoded = <Vec<(String, u64)>>::deserialize(d)?;
Ok(decoded.into_iter().map(|(name, hash)| { Ok(decoded
(name, Arc::new(Fingerprint { .into_iter()
rustc: 0, .map(|(name, hash)| {
target: 0, (
profile: 0, name,
path: 0, Arc::new(Fingerprint {
local: vec![LocalFingerprint::Precalculated(String::new())], rustc: 0,
features: String::new(), target: 0,
deps: Vec::new(), profile: 0,
memoized_hash: Mutex::new(Some(hash)), path: 0,
epoch: Epoch::Epoch2015, local: vec![LocalFingerprint::Precalculated(String::new())],
rustflags: Vec::new(), features: String::new(),
})) deps: Vec::new(),
}).collect()) memoized_hash: Mutex::new(Some(hash)),
epoch: Epoch::Epoch2015,
rustflags: Vec::new(),
}),
)
})
.collect())
} }
#[derive(Serialize, Deserialize, Hash)] #[derive(Serialize, Deserialize, Hash)]
@ -185,9 +202,7 @@ enum LocalFingerprint {
} }
impl LocalFingerprint { impl LocalFingerprint {
fn mtime(root: &Path, mtime: Option<FileTime>, path: &Path) fn mtime(root: &Path, mtime: Option<FileTime>, path: &Path) -> LocalFingerprint {
-> LocalFingerprint
{
let mtime = MtimeSlot(Mutex::new(mtime)); let mtime = MtimeSlot(Mutex::new(mtime));
assert!(path.is_absolute()); assert!(path.is_absolute());
let path = path.strip_prefix(root).unwrap_or(path); let path = path.strip_prefix(root).unwrap_or(path);
@ -205,14 +220,11 @@ impl Fingerprint {
LocalFingerprint::MtimeBased(ref slot, ref path) => { LocalFingerprint::MtimeBased(ref slot, ref path) => {
let path = root.join(path); let path = root.join(path);
let meta = fs::metadata(&path) let meta = fs::metadata(&path)
.chain_err(|| { .chain_err(|| internal(format!("failed to stat `{}`", path.display())))?;
internal(format!("failed to stat `{}`", path.display()))
})?;
let mtime = FileTime::from_last_modification_time(&meta); let mtime = FileTime::from_last_modification_time(&meta);
*slot.0.lock().unwrap() = Some(mtime); *slot.0.lock().unwrap() = Some(mtime);
} }
LocalFingerprint::EnvBased(..) | LocalFingerprint::EnvBased(..) | LocalFingerprint::Precalculated(..) => continue,
LocalFingerprint::Precalculated(..) => continue,
} }
hash_busted = true; hash_busted = true;
} }
@ -225,7 +237,7 @@ impl Fingerprint {
fn hash(&self) -> u64 { fn hash(&self) -> u64 {
if let Some(s) = *self.memoized_hash.lock().unwrap() { if let Some(s) = *self.memoized_hash.lock().unwrap() {
return s return s;
} }
let ret = util::hash_u64(self); let ret = util::hash_u64(self);
*self.memoized_hash.lock().unwrap() = Some(ret); *self.memoized_hash.lock().unwrap() = Some(ret);
@ -237,7 +249,11 @@ impl Fingerprint {
bail!("rust compiler has changed") bail!("rust compiler has changed")
} }
if self.features != old.features { if self.features != old.features {
bail!("features have changed: {} != {}", self.features, old.features) bail!(
"features have changed: {} != {}",
self.features,
old.features
)
} }
if self.target != old.target { if self.target != old.target {
bail!("target configuration has changed") bail!("target configuration has changed")
@ -259,15 +275,18 @@ impl Fingerprint {
} }
for (new, old) in self.local.iter().zip(&old.local) { for (new, old) in self.local.iter().zip(&old.local) {
match (new, old) { match (new, old) {
(&LocalFingerprint::Precalculated(ref a), (
&LocalFingerprint::Precalculated(ref b)) => { &LocalFingerprint::Precalculated(ref a),
&LocalFingerprint::Precalculated(ref b),
) => {
if a != b { if a != b {
bail!("precalculated components have changed: {} != {}", bail!("precalculated components have changed: {} != {}", a, b)
a, b)
} }
} }
(&LocalFingerprint::MtimeBased(ref on_disk_mtime, ref ap), (
&LocalFingerprint::MtimeBased(ref previously_built_mtime, ref bp)) => { &LocalFingerprint::MtimeBased(ref on_disk_mtime, ref ap),
&LocalFingerprint::MtimeBased(ref previously_built_mtime, ref bp),
) => {
let on_disk_mtime = on_disk_mtime.0.lock().unwrap(); let on_disk_mtime = on_disk_mtime.0.lock().unwrap();
let previously_built_mtime = previously_built_mtime.0.lock().unwrap(); let previously_built_mtime = previously_built_mtime.0.lock().unwrap();
@ -278,19 +297,30 @@ impl Fingerprint {
}; };
if should_rebuild { if should_rebuild {
bail!("mtime based components have changed: previously {:?} now {:?}, \ bail!(
paths are {:?} and {:?}", "mtime based components have changed: previously {:?} now {:?}, \
*previously_built_mtime, *on_disk_mtime, ap, bp) paths are {:?} and {:?}",
*previously_built_mtime,
*on_disk_mtime,
ap,
bp
)
} }
} }
(&LocalFingerprint::EnvBased(ref akey, ref avalue), (
&LocalFingerprint::EnvBased(ref bkey, ref bvalue)) => { &LocalFingerprint::EnvBased(ref akey, ref avalue),
&LocalFingerprint::EnvBased(ref bkey, ref bvalue),
) => {
if *akey != *bkey { if *akey != *bkey {
bail!("env vars changed: {} != {}", akey, bkey); bail!("env vars changed: {} != {}", akey, bkey);
} }
if *avalue != *bvalue { if *avalue != *bvalue {
bail!("env var `{}` changed: previously {:?} now {:?}", bail!(
akey, bvalue, avalue) "env var `{}` changed: previously {:?} now {:?}",
akey,
bvalue,
avalue
)
} }
} }
_ => bail!("local fingerprint type has changed"), _ => bail!("local fingerprint type has changed"),
@ -323,7 +353,16 @@ impl hash::Hash for Fingerprint {
ref rustflags, ref rustflags,
.. ..
} = *self; } = *self;
(rustc, features, target, path, profile, local, epoch, rustflags).hash(h); (
rustc,
features,
target,
path,
profile,
local,
epoch,
rustflags,
).hash(h);
h.write_usize(deps.len()); h.write_usize(deps.len());
for &(ref name, ref fingerprint) in deps { for &(ref name, ref fingerprint) in deps {
@ -342,17 +381,21 @@ impl hash::Hash for MtimeSlot {
impl ser::Serialize for MtimeSlot { impl ser::Serialize for MtimeSlot {
fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
where S: ser::Serializer, where
S: ser::Serializer,
{ {
self.0.lock().unwrap().map(|ft| { self.0
(ft.seconds_relative_to_1970(), ft.nanoseconds()) .lock()
}).serialize(s) .unwrap()
.map(|ft| (ft.seconds_relative_to_1970(), ft.nanoseconds()))
.serialize(s)
} }
} }
impl<'de> de::Deserialize<'de> for MtimeSlot { impl<'de> de::Deserialize<'de> for MtimeSlot {
fn deserialize<D>(d: D) -> Result<MtimeSlot, D::Error> fn deserialize<D>(d: D) -> Result<MtimeSlot, D::Error>
where D: de::Deserializer<'de>, where
D: de::Deserializer<'de>,
{ {
let kind: Option<(u64, u32)> = de::Deserialize::deserialize(d)?; let kind: Option<(u64, u32)> = de::Deserialize::deserialize(d)?;
Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| { Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| {
@ -373,10 +416,12 @@ impl<'de> de::Deserialize<'de> for MtimeSlot {
/// ///
/// Information like file modification time is only calculated for path /// Information like file modification time is only calculated for path
/// dependencies and is calculated in `calculate_target_fresh`. /// dependencies and is calculated in `calculate_target_fresh`.
fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) fn calculate<'a, 'cfg>(
-> CargoResult<Arc<Fingerprint>> { cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
) -> CargoResult<Arc<Fingerprint>> {
if let Some(s) = cx.fingerprints.get(unit) { if let Some(s) = cx.fingerprints.get(unit) {
return Ok(Arc::clone(s)) return Ok(Arc::clone(s));
} }
// Next, recursively calculate the fingerprint for all of our dependencies. // Next, recursively calculate the fingerprint for all of our dependencies.
@ -387,13 +432,12 @@ fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
// induce a recompile, they're just dependencies in the sense that they need // induce a recompile, they're just dependencies in the sense that they need
// to be built. // to be built.
let deps = cx.dep_targets(unit)?; let deps = cx.dep_targets(unit)?;
let deps = deps.iter().filter(|u| { let deps = deps.iter()
!u.target.is_custom_build() && !u.target.is_bin() .filter(|u| !u.target.is_custom_build() && !u.target.is_bin())
}).map(|unit| { .map(|unit| {
calculate(cx, unit).map(|fingerprint| { calculate(cx, unit).map(|fingerprint| (unit.pkg.package_id().to_string(), fingerprint))
(unit.pkg.package_id().to_string(), fingerprint)
}) })
}).collect::<CargoResult<Vec<_>>>()?; .collect::<CargoResult<Vec<_>>>()?;
// And finally, calculate what our own local fingerprint is // And finally, calculate what our own local fingerprint is
let local = if use_dep_info(unit) { let local = if use_dep_info(unit) {
@ -429,7 +473,6 @@ fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
Ok(fingerprint) Ok(fingerprint)
} }
// We want to use the mtime for files if we're a path source, but if we're a // We want to use the mtime for files if we're a path source, but if we're a
// git/registry source, then the mtime of files may fluctuate, but they won't // git/registry source, then the mtime of files may fluctuate, but they won't
// change so long as the source itself remains constant (which is the // change so long as the source itself remains constant (which is the
@ -456,10 +499,11 @@ fn use_dep_info(unit: &Unit) -> bool {
/// ///
/// The currently implemented solution is option (1), although it is planned to /// The currently implemented solution is option (1), although it is planned to
/// migrate to option (2) in the near future. /// migrate to option (2) in the near future.
pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) pub fn prepare_build_cmd<'a, 'cfg>(
-> CargoResult<Preparation> { cx: &mut Context<'a, 'cfg>,
let _p = profile::start(format!("fingerprint build cmd: {}", unit: &Unit<'a>,
unit.pkg.package_id())); ) -> CargoResult<Preparation> {
let _p = profile::start(format!("fingerprint build cmd: {}", unit.pkg.package_id()));
let new = cx.fingerprint_dir(unit); let new = cx.fingerprint_dir(unit);
let loc = new.join("build"); let loc = new.join("build");
@ -499,8 +543,7 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
if let Some(output_path) = output_path { if let Some(output_path) = output_path {
let outputs = state.outputs.lock().unwrap(); let outputs = state.outputs.lock().unwrap();
let outputs = &outputs[&key]; let outputs = &outputs[&key];
if !outputs.rerun_if_changed.is_empty() || if !outputs.rerun_if_changed.is_empty() || !outputs.rerun_if_env_changed.is_empty() {
!outputs.rerun_if_env_changed.is_empty() {
let deps = BuildDeps::new(&output_path, Some(outputs)); let deps = BuildDeps::new(&output_path, Some(outputs));
fingerprint.local = local_fingerprints_deps(&deps, &target_root, &pkg_root); fingerprint.local = local_fingerprints_deps(&deps, &target_root, &pkg_root);
fingerprint.update_local(&target_root)?; fingerprint.update_local(&target_root)?;
@ -509,13 +552,17 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
write_fingerprint(&loc, &fingerprint) write_fingerprint(&loc, &fingerprint)
}); });
Ok((if compare.is_ok() {Fresh} else {Dirty}, write_fingerprint, Work::noop())) Ok((
if compare.is_ok() { Fresh } else { Dirty },
write_fingerprint,
Work::noop(),
))
} }
fn build_script_local_fingerprints<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, fn build_script_local_fingerprints<'a, 'cfg>(
unit: &Unit<'a>) cx: &mut Context<'a, 'cfg>,
-> CargoResult<(Vec<LocalFingerprint>, Option<PathBuf>)> unit: &Unit<'a>,
{ ) -> CargoResult<(Vec<LocalFingerprint>, Option<PathBuf>)> {
let state = cx.build_state.outputs.lock().unwrap(); let state = cx.build_state.outputs.lock().unwrap();
// First up, if this build script is entirely overridden, then we just // First up, if this build script is entirely overridden, then we just
// return the hash of what we overrode it with. // return the hash of what we overrode it with.
@ -524,9 +571,11 @@ fn build_script_local_fingerprints<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// fingerprint afterwards because this is all just overridden. // fingerprint afterwards because this is all just overridden.
if let Some(output) = state.get(&(unit.pkg.package_id().clone(), unit.kind)) { if let Some(output) = state.get(&(unit.pkg.package_id().clone(), unit.kind)) {
debug!("override local fingerprints deps"); debug!("override local fingerprints deps");
let s = format!("overridden build state with hash: {}", let s = format!(
util::hash_u64(output)); "overridden build state with hash: {}",
return Ok((vec![LocalFingerprint::Precalculated(s)], None)) util::hash_u64(output)
);
return Ok((vec![LocalFingerprint::Precalculated(s)], None));
} }
// Next up we look at the previously listed dependencies for the build // Next up we look at the previously listed dependencies for the build
@ -540,18 +589,23 @@ fn build_script_local_fingerprints<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() { if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() {
debug!("old local fingerprints deps"); debug!("old local fingerprints deps");
let s = pkg_fingerprint(cx, unit.pkg)?; let s = pkg_fingerprint(cx, unit.pkg)?;
return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output))) return Ok((vec![LocalFingerprint::Precalculated(s)], Some(output)));
} }
// Ok so now we're in "new mode" where we can have files listed as // Ok so now we're in "new mode" where we can have files listed as
// dependencies as well as env vars listed as dependencies. Process them all // dependencies as well as env vars listed as dependencies. Process them all
// here. // here.
Ok((local_fingerprints_deps(deps, cx.target_root(), unit.pkg.root()), Some(output))) Ok((
local_fingerprints_deps(deps, cx.target_root(), unit.pkg.root()),
Some(output),
))
} }
fn local_fingerprints_deps(deps: &BuildDeps, target_root: &Path, pkg_root: &Path) fn local_fingerprints_deps(
-> Vec<LocalFingerprint> deps: &BuildDeps,
{ target_root: &Path,
pkg_root: &Path,
) -> Vec<LocalFingerprint> {
debug!("new local fingerprints deps"); debug!("new local fingerprints deps");
let mut local = Vec::new(); let mut local = Vec::new();
if !deps.rerun_if_changed.is_empty() { if !deps.rerun_if_changed.is_empty() {
@ -573,8 +627,10 @@ fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> {
let hash = fingerprint.hash(); let hash = fingerprint.hash();
debug!("write fingerprint: {}", loc.display()); debug!("write fingerprint: {}", loc.display());
paths::write(loc, util::to_hex(hash).as_bytes())?; paths::write(loc, util::to_hex(hash).as_bytes())?;
paths::write(&loc.with_extension("json"), paths::write(
&serde_json::to_vec(&fingerprint).unwrap())?; &loc.with_extension("json"),
&serde_json::to_vec(&fingerprint).unwrap(),
)?;
Ok(()) Ok(())
} }
@ -590,16 +646,16 @@ pub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> Ca
} }
pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> PathBuf { pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> PathBuf {
cx.fingerprint_dir(unit).join(&format!("dep-{}", filename(cx, unit))) cx.fingerprint_dir(unit)
.join(&format!("dep-{}", filename(cx, unit)))
} }
fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint) fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult<()> {
-> CargoResult<()> {
let old_fingerprint_short = paths::read(loc)?; let old_fingerprint_short = paths::read(loc)?;
let new_hash = new_fingerprint.hash(); let new_hash = new_fingerprint.hash();
if util::to_hex(new_hash) == old_fingerprint_short { if util::to_hex(new_hash) == old_fingerprint_short {
return Ok(()) return Ok(());
} }
let old_fingerprint_json = paths::read(&loc.with_extension("json"))?; let old_fingerprint_json = paths::read(&loc.with_extension("json"))?;
@ -621,9 +677,7 @@ fn log_compare(unit: &Unit, compare: &CargoResult<()>) {
} }
// Parse the dep-info into a list of paths // Parse the dep-info into a list of paths
pub fn parse_dep_info(pkg: &Package, dep_info: &Path) pub fn parse_dep_info(pkg: &Package, dep_info: &Path) -> CargoResult<Option<Vec<PathBuf>>> {
-> CargoResult<Option<Vec<PathBuf>>>
{
let data = match paths::read_bytes(dep_info) { let data = match paths::read_bytes(dep_info) {
Ok(data) => data, Ok(data) => data,
Err(_) => return Ok(None), Err(_) => return Ok(None),
@ -639,9 +693,7 @@ pub fn parse_dep_info(pkg: &Package, dep_info: &Path)
} }
} }
fn dep_info_mtime_if_fresh(pkg: &Package, dep_info: &Path) fn dep_info_mtime_if_fresh(pkg: &Package, dep_info: &Path) -> CargoResult<Option<FileTime>> {
-> CargoResult<Option<FileTime>>
{
if let Some(paths) = parse_dep_info(pkg, dep_info)? { if let Some(paths) = parse_dep_info(pkg, dep_info)? {
Ok(mtime_if_fresh(dep_info, paths.iter())) Ok(mtime_if_fresh(dep_info, paths.iter()))
} else { } else {
@ -653,15 +705,16 @@ fn pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult<String> {
let source_id = pkg.package_id().source_id(); let source_id = pkg.package_id().source_id();
let sources = cx.packages.sources(); let sources = cx.packages.sources();
let source = sources.get(source_id).ok_or_else(|| { let source = sources
internal("missing package source") .get(source_id)
})?; .ok_or_else(|| internal("missing package source"))?;
source.fingerprint(pkg) source.fingerprint(pkg)
} }
fn mtime_if_fresh<I>(output: &Path, paths: I) -> Option<FileTime> fn mtime_if_fresh<I>(output: &Path, paths: I) -> Option<FileTime>
where I: IntoIterator, where
I::Item: AsRef<Path>, I: IntoIterator,
I::Item: AsRef<Path>,
{ {
let meta = match fs::metadata(output) { let meta = match fs::metadata(output) {
Ok(meta) => meta, Ok(meta) => meta,
@ -675,7 +728,7 @@ fn mtime_if_fresh<I>(output: &Path, paths: I) -> Option<FileTime>
Ok(meta) => meta, Ok(meta) => meta,
Err(..) => { Err(..) => {
info!("stale: {} -- missing", path.display()); info!("stale: {} -- missing", path.display());
return true return true;
} }
}; };
let mtime2 = FileTime::from_last_modification_time(&meta); let mtime2 = FileTime::from_last_modification_time(&meta);
@ -703,8 +756,7 @@ fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String {
TargetKind::Lib(..) => "lib", TargetKind::Lib(..) => "lib",
TargetKind::Bin => "bin", TargetKind::Bin => "bin",
TargetKind::Test => "integration-test", TargetKind::Test => "integration-test",
TargetKind::ExampleBin | TargetKind::ExampleBin | TargetKind::ExampleLib(..) => "example",
TargetKind::ExampleLib(..) => "example",
TargetKind::Bench => "bench", TargetKind::Bench => "bench",
TargetKind::CustomBuild => "build-script", TargetKind::CustomBuild => "build-script",
}; };
@ -734,14 +786,17 @@ fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String {
/// ///
/// The serialized Cargo format will contain a list of files, all of which are /// The serialized Cargo format will contain a list of files, all of which are
/// relative if they're under `root`. or absolute if they're elsewehre. /// relative if they're under `root`. or absolute if they're elsewehre.
pub fn translate_dep_info(rustc_dep_info: &Path, pub fn translate_dep_info(
cargo_dep_info: &Path, rustc_dep_info: &Path,
pkg_root: &Path, cargo_dep_info: &Path,
rustc_cwd: &Path) -> CargoResult<()> { pkg_root: &Path,
rustc_cwd: &Path,
) -> CargoResult<()> {
let target = parse_rustc_dep_info(rustc_dep_info)?; let target = parse_rustc_dep_info(rustc_dep_info)?;
let deps = &target.get(0).ok_or_else(|| { let deps = &target
internal("malformed dep-info format, no targets".to_string()) .get(0)
})?.1; .ok_or_else(|| internal("malformed dep-info format, no targets".to_string()))?
.1;
let mut new_contents = Vec::new(); let mut new_contents = Vec::new();
for file in deps { for file in deps {
@ -754,11 +809,10 @@ pub fn translate_dep_info(rustc_dep_info: &Path,
Ok(()) Ok(())
} }
pub fn parse_rustc_dep_info(rustc_dep_info: &Path) pub fn parse_rustc_dep_info(rustc_dep_info: &Path) -> CargoResult<Vec<(String, Vec<String>)>> {
-> CargoResult<Vec<(String, Vec<String>)>>
{
let contents = paths::read(rustc_dep_info)?; let contents = paths::read(rustc_dep_info)?;
contents.lines() contents
.lines()
.filter_map(|l| l.find(": ").map(|i| (l, i))) .filter_map(|l| l.find(": ").map(|i| (l, i)))
.map(|(line, pos)| { .map(|(line, pos)| {
let target = &line[..pos]; let target = &line[..pos];

View file

@ -1,14 +1,17 @@
use std::fmt; use std::fmt;
use util::{CargoResult, Fresh, Dirty, Freshness}; use util::{CargoResult, Dirty, Fresh, Freshness};
use super::job_queue::JobState; use super::job_queue::JobState;
pub struct Job { dirty: Work, fresh: Work } pub struct Job {
dirty: Work,
fresh: Work,
}
/// Each proc should send its description before starting. /// Each proc should send its description before starting.
/// It should send either once or close immediately. /// It should send either once or close immediately.
pub struct Work { pub struct Work {
inner: Box<for <'a, 'b> FnBox<&'a JobState<'b>, CargoResult<()>> + Send>, inner: Box<for<'a, 'b> FnBox<&'a JobState<'b>, CargoResult<()>> + Send>,
} }
trait FnBox<A, R> { trait FnBox<A, R> {
@ -23,7 +26,8 @@ impl<A, R, F: FnOnce(A) -> R> FnBox<A, R> for F {
impl Work { impl Work {
pub fn new<F>(f: F) -> Work pub fn new<F>(f: F) -> Work
where F: FnOnce(&JobState) -> CargoResult<()> + Send + 'static where
F: FnOnce(&JobState) -> CargoResult<()> + Send + 'static,
{ {
Work { inner: Box::new(f) } Work { inner: Box::new(f) }
} }

View file

@ -3,15 +3,15 @@ use std::collections::hash_map::HashMap;
use std::fmt; use std::fmt;
use std::io; use std::io;
use std::mem; use std::mem;
use std::sync::mpsc::{channel, Sender, Receiver}; use std::sync::mpsc::{channel, Receiver, Sender};
use crossbeam::{self, Scope}; use crossbeam::{self, Scope};
use jobserver::{Acquired, HelperThread}; use jobserver::{Acquired, HelperThread};
use core::{PackageId, Target, Profile}; use core::{PackageId, Profile, Target};
use util::{Config, DependencyQueue, Fresh, Dirty, Freshness}; use util::{Config, DependencyQueue, Dirty, Fresh, Freshness};
use util::{CargoResult, ProcessBuilder, profile, internal, CargoResultExt}; use util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder};
use {handle_error}; use handle_error;
use super::{Context, Kind, Unit}; use super::{Context, Kind, Unit};
use super::job::Job; use super::job::Job;
@ -92,14 +92,18 @@ impl<'a> JobQueue<'a> {
} }
} }
pub fn enqueue<'cfg>(&mut self, pub fn enqueue<'cfg>(
cx: &Context<'a, 'cfg>, &mut self,
unit: &Unit<'a>, cx: &Context<'a, 'cfg>,
job: Job, unit: &Unit<'a>,
fresh: Freshness) -> CargoResult<()> { job: Job,
fresh: Freshness,
) -> CargoResult<()> {
let key = Key::new(unit); let key = Key::new(unit);
let deps = key.dependencies(cx)?; let deps = key.dependencies(cx)?;
self.queue.queue(Fresh, key, Vec::new(), &deps).push((job, fresh)); self.queue
.queue(Fresh, key, Vec::new(), &deps)
.push((job, fresh));
*self.counts.entry(key.pkg).or_insert(0) += 1; *self.counts.entry(key.pkg).or_insert(0) += 1;
Ok(()) Ok(())
} }
@ -127,25 +131,23 @@ impl<'a> JobQueue<'a> {
// As a result, this `transmute` to a longer lifetime should be safe in // As a result, this `transmute` to a longer lifetime should be safe in
// practice. // practice.
let tx = self.tx.clone(); let tx = self.tx.clone();
let tx = unsafe { let tx = unsafe { mem::transmute::<Sender<Message<'a>>, Sender<Message<'static>>>(tx) };
mem::transmute::<Sender<Message<'a>>, Sender<Message<'static>>>(tx) let helper = cx.jobserver
}; .clone()
let helper = cx.jobserver.clone().into_helper_thread(move |token| { .into_helper_thread(move |token| {
drop(tx.send(Message::Token(token))); drop(tx.send(Message::Token(token)));
}).chain_err(|| { })
"failed to create helper thread for jobserver management" .chain_err(|| "failed to create helper thread for jobserver management")?;
})?;
crossbeam::scope(|scope| { crossbeam::scope(|scope| self.drain_the_queue(cx, scope, &helper))
self.drain_the_queue(cx, scope, &helper)
})
} }
fn drain_the_queue(&mut self, fn drain_the_queue(
cx: &mut Context, &mut self,
scope: &Scope<'a>, cx: &mut Context,
jobserver_helper: &HelperThread) scope: &Scope<'a>,
-> CargoResult<()> { jobserver_helper: &HelperThread,
) -> CargoResult<()> {
use std::time::Instant; use std::time::Instant;
let mut tokens = Vec::new(); let mut tokens = Vec::new();
@ -170,13 +172,14 @@ impl<'a> JobQueue<'a> {
// start requesting job tokens. Each job after the first needs to // start requesting job tokens. Each job after the first needs to
// request a token. // request a token.
while let Some((fresh, key, jobs)) = self.queue.dequeue() { while let Some((fresh, key, jobs)) = self.queue.dequeue() {
let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| { let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| f.combine(fresh));
f.combine(fresh) self.pending.insert(
}); key,
self.pending.insert(key, PendingBuild { PendingBuild {
amt: jobs.len(), amt: jobs.len(),
fresh: total_fresh, fresh: total_fresh,
}); },
);
for (job, f) in jobs { for (job, f) in jobs {
queue.push((key, job, f.combine(fresh))); queue.push((key, job, f.combine(fresh)));
if self.active + queue.len() > 0 { if self.active + queue.len() > 0 {
@ -196,7 +199,7 @@ impl<'a> JobQueue<'a> {
// If after all that we're not actually running anything then we're // If after all that we're not actually running anything then we're
// done! // done!
if self.active == 0 { if self.active == 0 {
break break;
} }
// And finally, before we block waiting for the next event, drop any // And finally, before we block waiting for the next event, drop any
@ -237,8 +240,9 @@ impl<'a> JobQueue<'a> {
error = Some(format_err!("build failed")); error = Some(format_err!("build failed"));
handle_error(e, &mut *cx.config.shell()); handle_error(e, &mut *cx.config.shell());
cx.config.shell().warn( cx.config.shell().warn(
"build failed, waiting for other \ "build failed, waiting for other \
jobs to finish...")?; jobs to finish...",
)?;
} else { } else {
error = Some(e); error = Some(e);
} }
@ -246,29 +250,32 @@ impl<'a> JobQueue<'a> {
} }
} }
Message::Token(acquired_token) => { Message::Token(acquired_token) => {
tokens.push(acquired_token.chain_err(|| { tokens.push(acquired_token.chain_err(|| "failed to acquire jobserver token")?);
"failed to acquire jobserver token"
})?);
} }
} }
} }
let build_type = if self.is_release { "release" } else { "dev" }; let build_type = if self.is_release { "release" } else { "dev" };
let profile = cx.lib_profile(); let profile = cx.lib_profile();
let mut opt_type = String::from(if profile.opt_level == "0" { "unoptimized" } let mut opt_type = String::from(if profile.opt_level == "0" {
else { "optimized" }); "unoptimized"
} else {
"optimized"
});
if profile.debuginfo.is_some() { if profile.debuginfo.is_some() {
opt_type += " + debuginfo"; opt_type += " + debuginfo";
} }
let duration = start_time.elapsed(); let duration = start_time.elapsed();
let time_elapsed = format!("{}.{1:.2} secs", let time_elapsed = format!(
duration.as_secs(), "{}.{1:.2} secs",
duration.subsec_nanos() / 10_000_000); duration.as_secs(),
duration.subsec_nanos() / 10_000_000
);
if self.queue.is_empty() { if self.queue.is_empty() {
let message = format!("{} [{}] target(s) in {}", let message = format!(
build_type, "{} [{}] target(s) in {}",
opt_type, build_type, opt_type, time_elapsed
time_elapsed); );
cx.config.shell().status("Finished", message)?; cx.config.shell().status("Finished", message)?;
Ok(()) Ok(())
} else if let Some(e) = error { } else if let Some(e) = error {
@ -281,12 +288,14 @@ impl<'a> JobQueue<'a> {
/// Executes a job in the `scope` given, pushing the spawned thread's /// Executes a job in the `scope` given, pushing the spawned thread's
/// handled onto `threads`. /// handled onto `threads`.
fn run(&mut self, fn run(
key: Key<'a>, &mut self,
fresh: Freshness, key: Key<'a>,
job: Job, fresh: Freshness,
config: &Config, job: Job,
scope: &Scope<'a>) -> CargoResult<()> { config: &Config,
scope: &Scope<'a>,
) -> CargoResult<()> {
info!("start: {:?}", key); info!("start: {:?}", key);
self.active += 1; self.active += 1;
@ -294,14 +303,14 @@ impl<'a> JobQueue<'a> {
let my_tx = self.tx.clone(); let my_tx = self.tx.clone();
let doit = move || { let doit = move || {
let res = job.run(fresh, &JobState { let res = job.run(fresh, &JobState { tx: my_tx.clone() });
tx: my_tx.clone(),
});
my_tx.send(Message::Finish(key, res)).unwrap(); my_tx.send(Message::Finish(key, res)).unwrap();
}; };
match fresh { match fresh {
Freshness::Fresh => doit(), Freshness::Fresh => doit(),
Freshness::Dirty => { scope.spawn(doit); } Freshness::Dirty => {
scope.spawn(doit);
}
} }
// Print out some nice progress information // Print out some nice progress information
@ -354,13 +363,16 @@ impl<'a> JobQueue<'a> {
// In general, we try to print "Compiling" for the first nontrivial task // In general, we try to print "Compiling" for the first nontrivial task
// run for a package, regardless of when that is. We then don't print // run for a package, regardless of when that is. We then don't print
// out any more information for a package after we've printed it once. // out any more information for a package after we've printed it once.
fn note_working_on(&mut self, fn note_working_on(
config: &Config, &mut self,
key: &Key<'a>, config: &Config,
fresh: Freshness) -> CargoResult<()> { key: &Key<'a>,
if (self.compiled.contains(key.pkg) && !key.profile.doc) || fresh: Freshness,
(self.documented.contains(key.pkg) && key.profile.doc) { ) -> CargoResult<()> {
return Ok(()) if (self.compiled.contains(key.pkg) && !key.profile.doc)
|| (self.documented.contains(key.pkg) && key.profile.doc)
{
return Ok(());
} }
match fresh { match fresh {
@ -397,8 +409,7 @@ impl<'a> Key<'a> {
} }
} }
fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) -> CargoResult<Vec<Key<'a>>> {
-> CargoResult<Vec<Key<'a>>> {
let unit = Unit { let unit = Unit {
pkg: cx.get_package(self.pkg)?, pkg: cx.get_package(self.pkg)?,
target: self.target, target: self.target,
@ -406,21 +417,27 @@ impl<'a> Key<'a> {
kind: self.kind, kind: self.kind,
}; };
let targets = cx.dep_targets(&unit)?; let targets = cx.dep_targets(&unit)?;
Ok(targets.iter().filter_map(|unit| { Ok(targets
// Binaries aren't actually needed to *compile* tests, just to run .iter()
// them, so we don't include this dependency edge in the job graph. .filter_map(|unit| {
if self.target.is_test() && unit.target.is_bin() { // Binaries aren't actually needed to *compile* tests, just to run
None // them, so we don't include this dependency edge in the job graph.
} else { if self.target.is_test() && unit.target.is_bin() {
Some(Key::new(unit)) None
} } else {
}).collect()) Some(Key::new(unit))
}
})
.collect())
} }
} }
impl<'a> fmt::Debug for Key<'a> { impl<'a> fmt::Debug for Key<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} => {}/{} => {:?}", self.pkg, self.target, self.profile, write!(
self.kind) f,
"{} => {}/{} => {:?}",
self.pkg, self.target, self.profile, self.kind
)
} }
} }

View file

@ -51,10 +51,10 @@
use std::fs; use std::fs;
use std::io; use std::io;
use std::path::{PathBuf, Path}; use std::path::{Path, PathBuf};
use core::Workspace; use core::Workspace;
use util::{Config, FileLock, CargoResult, Filesystem}; use util::{CargoResult, Config, FileLock, Filesystem};
/// Contains the paths of all target output locations. /// Contains the paths of all target output locations.
/// ///
@ -84,17 +84,15 @@ impl Layout {
/// ///
/// Differs from `at` in that this calculates the root path from the workspace target directory, /// Differs from `at` in that this calculates the root path from the workspace target directory,
/// adding the target triple and the profile (debug, release, ...). /// adding the target triple and the profile (debug, release, ...).
pub fn new(ws: &Workspace, pub fn new(ws: &Workspace, triple: Option<&str>, dest: &str) -> CargoResult<Layout> {
triple: Option<&str>,
dest: &str) -> CargoResult<Layout> {
let mut path = ws.target_dir(); let mut path = ws.target_dir();
// Flexible target specifications often point at filenames, so interpret // Flexible target specifications often point at filenames, so interpret
// the target triple as a Path and then just use the file stem as the // the target triple as a Path and then just use the file stem as the
// component for the directory name. // component for the directory name.
if let Some(triple) = triple { if let Some(triple) = triple {
path.push(Path::new(triple).file_stem().ok_or_else(|| { path.push(Path::new(triple)
format_err!("target was empty") .file_stem()
})?); .ok_or_else(|| format_err!("target was empty"))?);
} }
path.push(dest); path.push(dest);
Layout::at(ws.config(), path) Layout::at(ws.config(), path)
@ -131,7 +129,7 @@ impl Layout {
/// This is recommended to prevent derived/temporary files from bloating backups. /// This is recommended to prevent derived/temporary files from bloating backups.
fn exclude_from_backups(&self, path: &Path) { fn exclude_from_backups(&self, path: &Path) {
use std::ptr; use std::ptr;
use core_foundation::{url, number, string}; use core_foundation::{number, string, url};
use core_foundation::base::TCFType; use core_foundation::base::TCFType;
// For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey
@ -177,17 +175,31 @@ impl Layout {
} }
/// Fetch the root path. /// Fetch the root path.
pub fn dest(&self) -> &Path { &self.root } pub fn dest(&self) -> &Path {
&self.root
}
/// Fetch the deps path. /// Fetch the deps path.
pub fn deps(&self) -> &Path { &self.deps } pub fn deps(&self) -> &Path {
&self.deps
}
/// Fetch the examples path. /// Fetch the examples path.
pub fn examples(&self) -> &Path { &self.examples } pub fn examples(&self) -> &Path {
&self.examples
}
/// Fetch the root path. /// Fetch the root path.
pub fn root(&self) -> &Path { &self.root } pub fn root(&self) -> &Path {
&self.root
}
/// Fetch the incremental path. /// Fetch the incremental path.
pub fn incremental(&self) -> &Path { &self.incremental } pub fn incremental(&self) -> &Path {
&self.incremental
}
/// Fetch the fingerprint path. /// Fetch the fingerprint path.
pub fn fingerprint(&self) -> &Path { &self.fingerprint } pub fn fingerprint(&self) -> &Path {
&self.fingerprint
}
/// Fetch the build path. /// Fetch the build path.
pub fn build(&self) -> &Path { &self.build } pub fn build(&self) -> &Path {
&self.build
}
} }

View file

@ -1,7 +1,7 @@
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::fmt::Write; use std::fmt::Write;
use core::{Resolve, PackageId}; use core::{PackageId, Resolve};
use util::CargoResult; use util::CargoResult;
use super::Unit; use super::Unit;
@ -21,7 +21,7 @@ impl<'a> Links<'a> {
pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'a>) -> CargoResult<()> { pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'a>) -> CargoResult<()> {
if !self.validated.insert(unit.pkg.package_id()) { if !self.validated.insert(unit.pkg.package_id()) {
return Ok(()) return Ok(());
} }
let lib = match unit.pkg.manifest().links() { let lib = match unit.pkg.manifest().links() {
Some(lib) => lib, Some(lib) => lib,
@ -34,26 +34,37 @@ impl<'a> Links<'a> {
let dep_path = resolve.path_to_top(pkgid); let dep_path = resolve.path_to_top(pkgid);
let mut dep_path_desc = format!("package `{}`", dep_path[0]); let mut dep_path_desc = format!("package `{}`", dep_path[0]);
for dep in dep_path.iter().skip(1) { for dep in dep_path.iter().skip(1) {
write!(dep_path_desc, write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap();
"\n ... which is depended on by `{}`",
dep).unwrap();
} }
dep_path_desc dep_path_desc
}; };
bail!("multiple packages link to native library `{}`, \ bail!(
but a native library can be linked only once\n\ "multiple packages link to native library `{}`, \
\n\ but a native library can be linked only once\n\
{}\nlinks to native library `{}`\n\ \n\
\n\ {}\nlinks to native library `{}`\n\
{}\nalso links to native library `{}`", \n\
lib, {}\nalso links to native library `{}`",
describe_path(prev), lib, lib,
describe_path(pkg), lib) describe_path(prev),
lib,
describe_path(pkg),
lib
)
} }
if !unit.pkg.manifest().targets().iter().any(|t| t.is_custom_build()) { if !unit.pkg
bail!("package `{}` specifies that it links to `{}` but does not \ .manifest()
have a custom build script", unit.pkg.package_id(), lib) .targets()
.iter()
.any(|t| t.is_custom_build())
{
bail!(
"package `{}` specifies that it links to `{}` but does not \
have a custom build script",
unit.pkg.package_id(),
lib
)
} }
self.links.insert(lib.to_string(), unit.pkg.package_id()); self.links.insert(lib.to_string(), unit.pkg.package_id());
Ok(()) Ok(())

View file

@ -9,12 +9,12 @@ use std::sync::Arc;
use same_file::is_same_file; use same_file::is_same_file;
use serde_json; use serde_json;
use core::{Feature, Package, PackageId, PackageSet, Target, Resolve}; use core::{Feature, Package, PackageId, PackageSet, Resolve, Target};
use core::{Profile, Profiles, Workspace}; use core::{Profile, Profiles, Workspace};
use core::manifest::Lto; use core::manifest::Lto;
use core::shell::ColorChoice; use core::shell::ColorChoice;
use util::{self, ProcessBuilder, machine_message}; use util::{self, machine_message, ProcessBuilder};
use util::{Config, internal, profile, join_paths}; use util::{internal, join_paths, profile, Config};
use util::paths; use util::paths;
use util::errors::{CargoResult, CargoResultExt, Internal}; use util::errors::{CargoResult, CargoResultExt, Internal};
use util::Freshness; use util::Freshness;
@ -25,8 +25,8 @@ use self::job_queue::JobQueue;
use self::output_depinfo::output_depinfo; use self::output_depinfo::output_depinfo;
pub use self::compilation::Compilation; pub use self::compilation::Compilation;
pub use self::context::{Context, Unit, TargetFileType}; pub use self::context::{Context, TargetFileType, Unit};
pub use self::custom_build::{BuildOutput, BuildMap, BuildScripts}; pub use self::custom_build::{BuildMap, BuildOutput, BuildScripts};
pub use self::layout::is_bad_artifact_name; pub use self::layout::is_bad_artifact_name;
mod compilation; mod compilation;
@ -43,7 +43,10 @@ mod output_depinfo;
/// ///
/// These will be the same unless cross-compiling. /// These will be the same unless cross-compiling.
#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord)] #[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord)]
pub enum Kind { Host, Target } pub enum Kind {
Host,
Target,
}
/// Configuration information for a rustc build. /// Configuration information for a rustc build.
#[derive(Default, Clone)] #[derive(Default, Clone)]
@ -97,22 +100,19 @@ pub trait Executor: Send + Sync + 'static {
/// In case of an `Err`, Cargo will not continue with the build process for /// In case of an `Err`, Cargo will not continue with the build process for
/// this package. /// this package.
fn exec(&self, fn exec(&self, cmd: ProcessBuilder, _id: &PackageId, _target: &Target) -> CargoResult<()> {
cmd: ProcessBuilder,
_id: &PackageId,
_target: &Target)
-> CargoResult<()> {
cmd.exec()?; cmd.exec()?;
Ok(()) Ok(())
} }
fn exec_json(&self, fn exec_json(
cmd: ProcessBuilder, &self,
_id: &PackageId, cmd: ProcessBuilder,
_target: &Target, _id: &PackageId,
handle_stdout: &mut FnMut(&str) -> CargoResult<()>, _target: &Target,
handle_stderr: &mut FnMut(&str) -> CargoResult<()>) handle_stdout: &mut FnMut(&str) -> CargoResult<()>,
-> CargoResult<()> { handle_stderr: &mut FnMut(&str) -> CargoResult<()>,
) -> CargoResult<()> {
cmd.exec_with_streaming(handle_stdout, handle_stderr, false)?; cmd.exec_with_streaming(handle_stdout, handle_stderr, false)?;
Ok(()) Ok(())
} }
@ -133,33 +133,38 @@ impl Executor for DefaultExecutor {}
// Returns a mapping of the root package plus its immediate dependencies to // Returns a mapping of the root package plus its immediate dependencies to
// where the compiled libraries are all located. // where the compiled libraries are all located.
pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, pub fn compile_targets<'a, 'cfg: 'a>(
pkg_targets: &'a PackagesToBuild<'a>, ws: &Workspace<'cfg>,
packages: &'a PackageSet<'cfg>, pkg_targets: &'a PackagesToBuild<'a>,
resolve: &'a Resolve, packages: &'a PackageSet<'cfg>,
config: &'cfg Config, resolve: &'a Resolve,
build_config: BuildConfig, config: &'cfg Config,
profiles: &'a Profiles, build_config: BuildConfig,
exec: &Arc<Executor>) profiles: &'a Profiles,
-> CargoResult<Compilation<'cfg>> { exec: &Arc<Executor>,
let units = pkg_targets.iter().flat_map(|&(pkg, ref targets)| { ) -> CargoResult<Compilation<'cfg>> {
let default_kind = if build_config.requested_target.is_some() { let units = pkg_targets
Kind::Target .iter()
} else { .flat_map(|&(pkg, ref targets)| {
Kind::Host let default_kind = if build_config.requested_target.is_some() {
}; Kind::Target
targets.iter().map(move |&(target, profile)| { } else {
Unit { Kind::Host
};
targets.iter().map(move |&(target, profile)| Unit {
pkg, pkg,
target, target,
profile, profile,
kind: if target.for_host() {Kind::Host} else {default_kind}, kind: if target.for_host() {
} Kind::Host
} else {
default_kind
},
})
}) })
}).collect::<Vec<_>>(); .collect::<Vec<_>>();
let mut cx = Context::new(ws, resolve, packages, config, let mut cx = Context::new(ws, resolve, packages, config, build_config, profiles)?;
build_config, profiles)?;
let mut queue = JobQueue::new(&cx); let mut queue = JobQueue::new(&cx);
@ -192,50 +197,73 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
}; };
if unit.profile.test { if unit.profile.test {
cx.compilation.tests.push((unit.pkg.clone(), cx.compilation.tests.push((
unit.target.kind().clone(), unit.pkg.clone(),
unit.target.name().to_string(), unit.target.kind().clone(),
dst.clone())); unit.target.name().to_string(),
dst.clone(),
));
} else if unit.target.is_bin() || unit.target.is_example() { } else if unit.target.is_bin() || unit.target.is_example() {
cx.compilation.binaries.push(bindst.clone()); cx.compilation.binaries.push(bindst.clone());
} else if unit.target.is_lib() { } else if unit.target.is_lib() {
let pkgid = unit.pkg.package_id().clone(); let pkgid = unit.pkg.package_id().clone();
cx.compilation.libraries.entry(pkgid).or_insert_with(HashSet::new) cx.compilation
.insert((unit.target.clone(), dst.clone())); .libraries
.entry(pkgid)
.or_insert_with(HashSet::new)
.insert((unit.target.clone(), dst.clone()));
} }
} }
for dep in cx.dep_targets(unit)?.iter() { for dep in cx.dep_targets(unit)?.iter() {
if !unit.target.is_lib() { continue } if !unit.target.is_lib() {
continue;
}
if dep.profile.run_custom_build { if dep.profile.run_custom_build {
let out_dir = cx.build_script_out_dir(dep).display().to_string(); let out_dir = cx.build_script_out_dir(dep).display().to_string();
cx.compilation.extra_env.entry(dep.pkg.package_id().clone()) cx.compilation
.or_insert_with(Vec::new) .extra_env
.push(("OUT_DIR".to_string(), out_dir)); .entry(dep.pkg.package_id().clone())
.or_insert_with(Vec::new)
.push(("OUT_DIR".to_string(), out_dir));
} }
if !dep.target.is_lib() { continue } if !dep.target.is_lib() {
if dep.profile.doc { continue } continue;
}
if dep.profile.doc {
continue;
}
let v = cx.target_filenames(dep)?; let v = cx.target_filenames(dep)?;
cx.compilation.libraries cx.compilation
.libraries
.entry(unit.pkg.package_id().clone()) .entry(unit.pkg.package_id().clone())
.or_insert_with(HashSet::new) .or_insert_with(HashSet::new)
.extend(v.iter().map(|&(ref f, _, _)| { .extend(
(dep.target.clone(), f.clone()) v.iter()
})); .map(|&(ref f, _, _)| (dep.target.clone(), f.clone())),
);
} }
let feats = cx.resolve.features(unit.pkg.package_id()); let feats = cx.resolve.features(unit.pkg.package_id());
if !feats.is_empty() { if !feats.is_empty() {
cx.compilation.cfgs.entry(unit.pkg.package_id().clone()).or_insert_with(|| { cx.compilation
feats.iter().map(|feat| format!("feature=\"{}\"", feat)).collect() .cfgs
}); .entry(unit.pkg.package_id().clone())
.or_insert_with(|| {
feats
.iter()
.map(|feat| format!("feature=\"{}\"", feat))
.collect()
});
} }
let rustdocflags = cx.rustdocflags_args(unit)?; let rustdocflags = cx.rustdocflags_args(unit)?;
if !rustdocflags.is_empty() { if !rustdocflags.is_empty() {
cx.compilation.rustdocflags.entry(unit.pkg.package_id().clone()) cx.compilation
.rustdocflags
.entry(unit.pkg.package_id().clone())
.or_insert(rustdocflags); .or_insert(rustdocflags);
} }
@ -243,11 +271,15 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
} }
for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() { for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() {
cx.compilation.cfgs.entry(pkg.clone()) cx.compilation
.cfgs
.entry(pkg.clone())
.or_insert_with(HashSet::new) .or_insert_with(HashSet::new)
.extend(output.cfgs.iter().cloned()); .extend(output.cfgs.iter().cloned());
cx.compilation.extra_env.entry(pkg.clone()) cx.compilation
.extra_env
.entry(pkg.clone())
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.extend(output.env.iter().cloned()); .extend(output.env.iter().cloned());
@ -259,18 +291,19 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
Ok(cx.compilation) Ok(cx.compilation)
} }
fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>, fn compile<'a, 'cfg: 'a>(
jobs: &mut JobQueue<'a>, cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>, jobs: &mut JobQueue<'a>,
exec: &Arc<Executor>) -> CargoResult<()> { unit: &Unit<'a>,
exec: &Arc<Executor>,
) -> CargoResult<()> {
if !cx.compiled.insert(*unit) { if !cx.compiled.insert(*unit) {
return Ok(()) return Ok(());
} }
// Build up the work to be done to compile this unit, enqueuing it once // Build up the work to be done to compile this unit, enqueuing it once
// we've got everything constructed. // we've got everything constructed.
let p = profile::start(format!("preparing: {}/{}", unit.pkg, let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name()));
unit.target.name()));
fingerprint::prepare_init(cx, unit)?; fingerprint::prepare_init(cx, unit)?;
cx.links.validate(cx.resolve, unit)?; cx.links.validate(cx.resolve, unit)?;
@ -307,9 +340,11 @@ fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>,
Ok(()) Ok(())
} }
fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, fn rustc<'a, 'cfg>(
unit: &Unit<'a>, cx: &mut Context<'a, 'cfg>,
exec: &Arc<Executor>) -> CargoResult<Work> { unit: &Unit<'a>,
exec: &Arc<Executor>,
) -> CargoResult<Work> {
let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?; let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?;
let name = unit.pkg.name().to_string(); let name = unit.pkg.name().to_string();
@ -336,8 +371,7 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// If we are a binary and the package also contains a library, then we // If we are a binary and the package also contains a library, then we
// don't pass the `-l` flags. // don't pass the `-l` flags.
let pass_l_flag = unit.target.is_lib() || let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib());
!unit.pkg.targets().iter().any(|t| t.is_lib());
let do_rename = unit.target.allows_underscores() && !unit.profile.test; let do_rename = unit.target.allows_underscores() && !unit.profile.test;
let real_name = unit.target.name().to_string(); let real_name = unit.target.name().to_string();
let crate_name = unit.target.crate_name(); let crate_name = unit.target.crate_name();
@ -360,7 +394,10 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
let root_output = cx.target_root().to_path_buf(); let root_output = cx.target_root().to_path_buf();
let pkg_root = unit.pkg.root().to_path_buf(); let pkg_root = unit.pkg.root().to_path_buf();
let cwd = rustc.get_cwd().unwrap_or_else(|| cx.config.cwd()).to_path_buf(); let cwd = rustc
.get_cwd()
.unwrap_or_else(|| cx.config.cwd())
.to_path_buf();
return Ok(Work::new(move |state| { return Ok(Work::new(move |state| {
// Only at runtime have we discovered what the extra -L and -l // Only at runtime have we discovered what the extra -L and -l
@ -372,10 +409,14 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// previous build scripts, we include them in the rustc invocation. // previous build scripts, we include them in the rustc invocation.
if let Some(build_deps) = build_deps { if let Some(build_deps) = build_deps {
let build_state = build_state.outputs.lock().unwrap(); let build_state = build_state.outputs.lock().unwrap();
add_native_deps(&mut rustc, &build_state, &build_deps, add_native_deps(
pass_l_flag, &current_id)?; &mut rustc,
add_plugin_deps(&mut rustc, &build_state, &build_deps, &build_state,
&root_output)?; &build_deps,
pass_l_flag,
&current_id,
)?;
add_plugin_deps(&mut rustc, &build_state, &build_deps, &root_output)?;
add_custom_env(&mut rustc, &build_state, &current_id, kind)?; add_custom_env(&mut rustc, &build_state, &current_id, kind)?;
} }
@ -393,11 +434,19 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
state.running(&rustc); state.running(&rustc);
if json_messages { if json_messages {
exec.exec_json(rustc, &package_id, &target, exec.exec_json(
&mut |line| if !line.is_empty() { rustc,
Err(internal(&format!("compiler stdout is not empty: `{}`", line))) &package_id,
} else { &target,
Ok(()) &mut |line| {
if !line.is_empty() {
Err(internal(&format!(
"compiler stdout is not empty: `{}`",
line
)))
} else {
Ok(())
}
}, },
&mut |line| { &mut |line| {
// stderr from rustc can have a mix of JSON and non-JSON output // stderr from rustc can have a mix of JSON and non-JSON output
@ -417,38 +466,36 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
writeln!(io::stderr(), "{}", line)?; writeln!(io::stderr(), "{}", line)?;
} }
Ok(()) Ok(())
} },
).chain_err(|| { ).chain_err(|| format!("Could not compile `{}`.", name))?;
format!("Could not compile `{}`.", name)
})?;
} else { } else {
exec.exec(rustc, &package_id, &target) exec.exec(rustc, &package_id, &target)
.map_err(Internal::new) .map_err(Internal::new)
.chain_err(|| { .chain_err(|| format!("Could not compile `{}`.", name))?;
format!("Could not compile `{}`.", name)
})?;
} }
if do_rename && real_name != crate_name { if do_rename && real_name != crate_name {
let dst = &filenames[0].0; let dst = &filenames[0].0;
let src = dst.with_file_name(dst.file_name().unwrap() let src = dst.with_file_name(
.to_str().unwrap() dst.file_name()
.replace(&real_name, &crate_name)); .unwrap()
.to_str()
.unwrap()
.replace(&real_name, &crate_name),
);
if src.exists() && src.file_name() != dst.file_name() { if src.exists() && src.file_name() != dst.file_name() {
fs::rename(&src, &dst).chain_err(|| { fs::rename(&src, &dst)
internal(format!("could not rename crate {:?}", src)) .chain_err(|| internal(format!("could not rename crate {:?}", src)))?;
})?;
} }
} }
if rustc_dep_info_loc.exists() { if rustc_dep_info_loc.exists() {
fingerprint::translate_dep_info(&rustc_dep_info_loc, fingerprint::translate_dep_info(&rustc_dep_info_loc, &dep_info_loc, &pkg_root, &cwd)
&dep_info_loc,
&pkg_root,
&cwd)
.chain_err(|| { .chain_err(|| {
internal(format!("could not parse/generate dep info at: {}", internal(format!(
rustc_dep_info_loc.display())) "could not parse/generate dep info at: {}",
rustc_dep_info_loc.display()
))
})?; })?;
} }
@ -457,15 +504,19 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// Add all relevant -L and -l flags from dependencies (now calculated and // Add all relevant -L and -l flags from dependencies (now calculated and
// present in `state`) to the command provided // present in `state`) to the command provided
fn add_native_deps(rustc: &mut ProcessBuilder, fn add_native_deps(
build_state: &BuildMap, rustc: &mut ProcessBuilder,
build_scripts: &BuildScripts, build_state: &BuildMap,
pass_l_flag: bool, build_scripts: &BuildScripts,
current_id: &PackageId) -> CargoResult<()> { pass_l_flag: bool,
current_id: &PackageId,
) -> CargoResult<()> {
for key in build_scripts.to_link.iter() { for key in build_scripts.to_link.iter() {
let output = build_state.get(key).ok_or_else(|| { let output = build_state.get(key).ok_or_else(|| {
internal(format!("couldn't find build state for {}/{:?}", internal(format!(
key.0, key.1)) "couldn't find build state for {}/{:?}",
key.0, key.1
))
})?; })?;
for path in output.library_paths.iter() { for path in output.library_paths.iter() {
rustc.arg("-L").arg(path); rustc.arg("-L").arg(path);
@ -486,10 +537,12 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// Add all custom environment variables present in `state` (after they've // Add all custom environment variables present in `state` (after they've
// been put there by one of the `build_scripts`) to the command provided. // been put there by one of the `build_scripts`) to the command provided.
fn add_custom_env(rustc: &mut ProcessBuilder, fn add_custom_env(
build_state: &BuildMap, rustc: &mut ProcessBuilder,
current_id: &PackageId, build_state: &BuildMap,
kind: Kind) -> CargoResult<()> { current_id: &PackageId,
kind: Kind,
) -> CargoResult<()> {
let key = (current_id.clone(), kind); let key = (current_id.clone(), kind);
if let Some(output) = build_state.get(&key) { if let Some(output) = build_state.get(&key) {
for &(ref name, ref value) in output.env.iter() { for &(ref name, ref value) in output.env.iter() {
@ -502,14 +555,18 @@ fn rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
/// Link the compiled target (often of form `foo-{metadata_hash}`) to the /// Link the compiled target (often of form `foo-{metadata_hash}`) to the
/// final target. This must happen during both "Fresh" and "Compile" /// final target. This must happen during both "Fresh" and "Compile"
fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, fn link_targets<'a, 'cfg>(
unit: &Unit<'a>, cx: &mut Context<'a, 'cfg>,
fresh: bool) -> CargoResult<Work> { unit: &Unit<'a>,
fresh: bool,
) -> CargoResult<Work> {
let filenames = cx.target_filenames(unit)?; let filenames = cx.target_filenames(unit)?;
let package_id = unit.pkg.package_id().clone(); let package_id = unit.pkg.package_id().clone();
let target = unit.target.clone(); let target = unit.target.clone();
let profile = unit.profile.clone(); let profile = unit.profile.clone();
let features = cx.resolve.features_sorted(&package_id).into_iter() let features = cx.resolve
.features_sorted(&package_id)
.into_iter()
.map(|s| s.to_owned()) .map(|s| s.to_owned())
.collect(); .collect();
let json_messages = cx.build_config.json_messages; let json_messages = cx.build_config.json_messages;
@ -524,7 +581,7 @@ fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
// This may have been a `cargo rustc` command which changes the // This may have been a `cargo rustc` command which changes the
// output, so the source may not actually exist. // output, so the source may not actually exist.
if !src.exists() { if !src.exists() {
continue continue;
} }
let dst = match link_dst.as_ref() { let dst = match link_dst.as_ref() {
Some(dst) => dst, Some(dst) => dst,
@ -537,7 +594,7 @@ fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
debug!("linking {} to {}", src.display(), dst.display()); debug!("linking {} to {}", src.display(), dst.display());
if is_same_file(src, dst).unwrap_or(false) { if is_same_file(src, dst).unwrap_or(false) {
continue continue;
} }
if dst.exists() { if dst.exists() {
paths::remove_file(&dst)?; paths::remove_file(&dst)?;
@ -563,8 +620,11 @@ fn link_targets<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
fs::copy(src, dst).map(|_| ()) fs::copy(src, dst).map(|_| ())
}) })
.chain_err(|| { .chain_err(|| {
format!("failed to link or copy `{}` to `{}`", format!(
src.display(), dst.display()) "failed to link or copy `{}` to `{}`",
src.display(),
dst.display()
)
})?; })?;
} }
@ -589,21 +649,24 @@ fn load_build_deps(cx: &Context, unit: &Unit) -> Option<Arc<BuildScripts>> {
// For all plugin dependencies, add their -L paths (now calculated and // For all plugin dependencies, add their -L paths (now calculated and
// present in `state`) to the dynamic library load path for the command to // present in `state`) to the dynamic library load path for the command to
// execute. // execute.
fn add_plugin_deps(rustc: &mut ProcessBuilder, fn add_plugin_deps(
build_state: &BuildMap, rustc: &mut ProcessBuilder,
build_scripts: &BuildScripts, build_state: &BuildMap,
root_output: &PathBuf) build_scripts: &BuildScripts,
-> CargoResult<()> { root_output: &PathBuf,
) -> CargoResult<()> {
let var = util::dylib_path_envvar(); let var = util::dylib_path_envvar();
let search_path = rustc.get_env(var).unwrap_or_default(); let search_path = rustc.get_env(var).unwrap_or_default();
let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>(); let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>();
for id in build_scripts.plugins.iter() { for id in build_scripts.plugins.iter() {
let key = (id.clone(), Kind::Host); let key = (id.clone(), Kind::Host);
let output = build_state.get(&key).ok_or_else(|| { let output = build_state
internal(format!("couldn't find libs for plugin dep {}", id)) .get(&key)
})?; .ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", id)))?;
search_path.append(&mut filter_dynamic_search_path(output.library_paths.iter(), search_path.append(&mut filter_dynamic_search_path(
root_output)); output.library_paths.iter(),
root_output,
));
} }
let search_path = join_paths(&search_path, var)?; let search_path = join_paths(&search_path, var)?;
rustc.env(var, &search_path); rustc.env(var, &search_path);
@ -615,19 +678,21 @@ fn add_plugin_deps(rustc: &mut ProcessBuilder,
// Strip off prefixes like "native=" or "framework=" and filter out directories // Strip off prefixes like "native=" or "framework=" and filter out directories
// *not* inside our output directory since they are likely spurious and can cause // *not* inside our output directory since they are likely spurious and can cause
// clashes with system shared libraries (issue #3366). // clashes with system shared libraries (issue #3366).
fn filter_dynamic_search_path<'a, I>(paths :I, root_output: &PathBuf) -> Vec<PathBuf> fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &PathBuf) -> Vec<PathBuf>
where I: Iterator<Item=&'a PathBuf> { where
I: Iterator<Item = &'a PathBuf>,
{
let mut search_path = vec![]; let mut search_path = vec![];
for dir in paths { for dir in paths {
let dir = match dir.to_str() { let dir = match dir.to_str() {
Some(s) => { Some(s) => {
let mut parts = s.splitn(2, '='); let mut parts = s.splitn(2, '=');
match (parts.next(), parts.next()) { match (parts.next(), parts.next()) {
(Some("native"), Some(path)) | (Some("native"), Some(path))
(Some("crate"), Some(path)) | | (Some("crate"), Some(path))
(Some("dependency"), Some(path)) | | (Some("dependency"), Some(path))
(Some("framework"), Some(path)) | | (Some("framework"), Some(path))
(Some("all"), Some(path)) => path.into(), | (Some("all"), Some(path)) => path.into(),
_ => dir.clone(), _ => dir.clone(),
} }
} }
@ -636,16 +701,22 @@ fn filter_dynamic_search_path<'a, I>(paths :I, root_output: &PathBuf) -> Vec<Pat
if dir.starts_with(&root_output) { if dir.starts_with(&root_output) {
search_path.push(dir); search_path.push(dir);
} else { } else {
debug!("Not including path {} in runtime library search path because it is \ debug!(
outside target root {}", dir.display(), root_output.display()); "Not including path {} in runtime library search path because it is \
outside target root {}",
dir.display(),
root_output.display()
);
} }
} }
search_path search_path
} }
fn prepare_rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, fn prepare_rustc<'a, 'cfg>(
crate_types: &[&str], cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>) -> CargoResult<ProcessBuilder> { crate_types: &[&str],
unit: &Unit<'a>,
) -> CargoResult<ProcessBuilder> {
let mut base = cx.compilation.rustc_process(unit.pkg)?; let mut base = cx.compilation.rustc_process(unit.pkg)?;
base.inherit_jobserver(&cx.jobserver); base.inherit_jobserver(&cx.jobserver);
build_base_args(cx, &mut base, unit, crate_types)?; build_base_args(cx, &mut base, unit, crate_types)?;
@ -653,9 +724,7 @@ fn prepare_rustc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
Ok(base) Ok(base)
} }
fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<Work> {
fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>) -> CargoResult<Work> {
let mut rustdoc = cx.compilation.rustdoc_process(unit.pkg)?; let mut rustdoc = cx.compilation.rustdoc_process(unit.pkg)?;
rustdoc.inherit_jobserver(&cx.jobserver); rustdoc.inherit_jobserver(&cx.jobserver);
rustdoc.arg("--crate-name").arg(&unit.target.crate_name()); rustdoc.arg("--crate-name").arg(&unit.target.crate_name());
@ -702,7 +771,9 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
} }
} }
state.running(&rustdoc); state.running(&rustdoc);
rustdoc.exec().chain_err(|| format!("Could not document `{}`.", name))?; rustdoc
.exec()
.chain_err(|| format!("Could not document `{}`.", name))?;
Ok(()) Ok(())
})) }))
} }
@ -737,14 +808,27 @@ fn add_path_args(cx: &Context, unit: &Unit, cmd: &mut ProcessBuilder) {
cmd.cwd(cwd); cmd.cwd(cwd);
} }
fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, fn build_base_args<'a, 'cfg>(
cmd: &mut ProcessBuilder, cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>, cmd: &mut ProcessBuilder,
crate_types: &[&str]) -> CargoResult<()> { unit: &Unit<'a>,
crate_types: &[&str],
) -> CargoResult<()> {
let Profile { let Profile {
ref opt_level, ref lto, codegen_units, ref rustc_args, debuginfo, ref opt_level,
debug_assertions, overflow_checks, rpath, test, doc: _doc, ref lto,
run_custom_build, ref panic, check, .. codegen_units,
ref rustc_args,
debuginfo,
debug_assertions,
overflow_checks,
rpath,
test,
doc: _doc,
run_custom_build,
ref panic,
check,
..
} = *unit.profile; } = *unit.profile;
assert!(!run_custom_build); assert!(!run_custom_build);
@ -753,8 +837,12 @@ fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
add_path_args(cx, unit, cmd); add_path_args(cx, unit, cmd);
match cx.config.shell().color_choice() { match cx.config.shell().color_choice() {
ColorChoice::Always => { cmd.arg("--color").arg("always"); } ColorChoice::Always => {
ColorChoice::Never => { cmd.arg("--color").arg("never"); } cmd.arg("--color").arg("always");
}
ColorChoice::Never => {
cmd.arg("--color").arg("never");
}
ColorChoice::CargoAuto => {} ColorChoice::CargoAuto => {}
} }
@ -774,10 +862,8 @@ fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
cmd.arg("--emit=dep-info,link"); cmd.arg("--emit=dep-info,link");
} }
let prefer_dynamic = (unit.target.for_host() && let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build())
!unit.target.is_custom_build()) || || (crate_types.contains(&"dylib") && cx.ws.members().any(|p| p != unit.pkg));
(crate_types.contains(&"dylib") &&
cx.ws.members().any(|p| p != unit.pkg));
if prefer_dynamic { if prefer_dynamic {
cmd.arg("-C").arg("prefer-dynamic"); cmd.arg("-C").arg("prefer-dynamic");
} }
@ -875,7 +961,8 @@ fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
cmd.arg("-C").arg(&format!("extra-filename=-{}", m)); cmd.arg("-C").arg(&format!("extra-filename=-{}", m));
} }
None => { None => {
cmd.arg("-C").arg(&format!("metadata={}", cx.target_short_hash(unit))); cmd.arg("-C")
.arg(&format!("metadata={}", cx.target_short_hash(unit)));
} }
} }
@ -885,8 +972,7 @@ fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
cmd.arg("--out-dir").arg(&cx.out_dir(unit)); cmd.arg("--out-dir").arg(&cx.out_dir(unit));
fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) {
val: Option<&OsStr>) {
if let Some(val) = val { if let Some(val) = val {
let mut joined = OsString::from(prefix); let mut joined = OsString::from(prefix);
joined.push(val); joined.push(val);
@ -895,20 +981,31 @@ fn build_base_args<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
} }
if unit.kind == Kind::Target { if unit.kind == Kind::Target {
opt(cmd, "--target", "", cx.requested_target().map(|s| s.as_ref())); opt(
cmd,
"--target",
"",
cx.requested_target().map(|s| s.as_ref()),
);
} }
opt(cmd, "-C", "ar=", cx.ar(unit.kind).map(|s| s.as_ref())); opt(cmd, "-C", "ar=", cx.ar(unit.kind).map(|s| s.as_ref()));
opt(cmd, "-C", "linker=", cx.linker(unit.kind).map(|s| s.as_ref())); opt(
cmd,
"-C",
"linker=",
cx.linker(unit.kind).map(|s| s.as_ref()),
);
cmd.args(&cx.incremental_args(unit)?); cmd.args(&cx.incremental_args(unit)?);
Ok(()) Ok(())
} }
fn build_deps_args<'a, 'cfg>(
fn build_deps_args<'a, 'cfg>(cmd: &mut ProcessBuilder, cmd: &mut ProcessBuilder,
cx: &mut Context<'a, 'cfg>, cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>) -> CargoResult<()> { unit: &Unit<'a>,
) -> CargoResult<()> {
cmd.arg("-L").arg(&{ cmd.arg("-L").arg(&{
let mut deps = OsString::from("dependency="); let mut deps = OsString::from("dependency=");
deps.push(cx.deps_dir(unit)); deps.push(cx.deps_dir(unit));
@ -930,17 +1027,24 @@ fn build_deps_args<'a, 'cfg>(cmd: &mut ProcessBuilder,
// If there is not one linkable target but should, rustc fails later // If there is not one linkable target but should, rustc fails later
// on if there is an `extern crate` for it. This may turn into a hard // on if there is an `extern crate` for it. This may turn into a hard
// error in the future, see PR #4797 // error in the future, see PR #4797
if !dep_targets.iter().any(|u| !u.profile.doc && u.target.linkable()) { if !dep_targets
if let Some(u) = dep_targets.iter() .iter()
.find(|u| !u.profile.doc && u.target.is_lib()) { .any(|u| !u.profile.doc && u.target.linkable())
cx.config.shell().warn(format!("The package `{}` \ {
provides no linkable target. The compiler might raise an error while compiling \ if let Some(u) = dep_targets
`{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \ .iter()
Cargo.toml. This warning might turn into a hard error in the future.", .find(|u| !u.profile.doc && u.target.is_lib())
u.target.crate_name(), {
unit.target.crate_name(), cx.config.shell().warn(format!(
u.target.crate_name()))?; "The package `{}` \
} provides no linkable target. The compiler might raise an error while compiling \
`{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \
Cargo.toml. This warning might turn into a hard error in the future.",
u.target.crate_name(),
unit.target.crate_name(),
u.target.crate_name()
))?;
}
} }
for dep in dep_targets { for dep in dep_targets {
@ -954,13 +1058,15 @@ Cargo.toml. This warning might turn into a hard error in the future.",
return Ok(()); return Ok(());
fn link_to<'a, 'cfg>(cmd: &mut ProcessBuilder, fn link_to<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>, cmd: &mut ProcessBuilder,
current: &Unit<'a>, cx: &mut Context<'a, 'cfg>,
dep: &Unit<'a>) -> CargoResult<()> { current: &Unit<'a>,
dep: &Unit<'a>,
) -> CargoResult<()> {
for &(ref dst, _, file_type) in cx.target_filenames(dep)?.iter() { for &(ref dst, _, file_type) in cx.target_filenames(dep)?.iter() {
if file_type != TargetFileType::Linkable { if file_type != TargetFileType::Linkable {
continue continue;
} }
let mut v = OsString::new(); let mut v = OsString::new();
@ -972,7 +1078,9 @@ Cargo.toml. This warning might turn into a hard error in the future.",
// //
// This I believe mostly works out for now, but we'll likely want // This I believe mostly works out for now, but we'll likely want
// to tighten up this in the future. // to tighten up this in the future.
let name = current.pkg.dependencies() let name = current
.pkg
.dependencies()
.iter() .iter()
.filter(|d| d.matches_ignoring_source(dep.pkg.summary())) .filter(|d| d.matches_ignoring_source(dep.pkg.summary()))
.filter_map(|d| d.rename()) .filter_map(|d| d.rename())
@ -991,9 +1099,9 @@ Cargo.toml. This warning might turn into a hard error in the future.",
fn envify(s: &str) -> String { fn envify(s: &str) -> String {
s.chars() s.chars()
.flat_map(|c| c.to_uppercase()) .flat_map(|c| c.to_uppercase())
.map(|c| if c == '-' {'_'} else {c}) .map(|c| if c == '-' { '_' } else { c })
.collect() .collect()
} }
impl Kind { impl Kind {

View file

@ -1,10 +1,10 @@
use std::collections::{HashSet, BTreeSet}; use std::collections::{BTreeSet, HashSet};
use std::io::{Write, BufWriter}; use std::io::{BufWriter, Write};
use std::fs::File; use std::fs::File;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use ops::{Context, Unit}; use ops::{Context, Unit};
use util::{CargoResult, internal}; use util::{internal, CargoResult};
use util::paths; use util::paths;
use ops::cargo_rustc::fingerprint; use ops::cargo_rustc::fingerprint;
@ -15,9 +15,12 @@ fn render_filename<P: AsRef<Path>>(path: P, basedir: Option<&str>) -> CargoResul
Some(base) => match path.strip_prefix(base) { Some(base) => match path.strip_prefix(base) {
Ok(relpath) => relpath, Ok(relpath) => relpath,
_ => path, _ => path,
} },
}; };
relpath.to_str().ok_or_else(|| internal("path not utf-8")).map(|f| f.replace(" ", "\\ ")) relpath
.to_str()
.ok_or_else(|| internal("path not utf-8"))
.map(|f| f.replace(" ", "\\ "))
} }
fn add_deps_for_unit<'a, 'b>( fn add_deps_for_unit<'a, 'b>(
@ -25,9 +28,7 @@ fn add_deps_for_unit<'a, 'b>(
context: &mut Context<'a, 'b>, context: &mut Context<'a, 'b>,
unit: &Unit<'a>, unit: &Unit<'a>,
visited: &mut HashSet<Unit<'a>>, visited: &mut HashSet<Unit<'a>>,
) ) -> CargoResult<()> {
-> CargoResult<()>
{
if !visited.insert(*unit) { if !visited.insert(*unit) {
return Ok(()); return Ok(());
} }
@ -42,8 +43,11 @@ fn add_deps_for_unit<'a, 'b>(
deps.insert(path); deps.insert(path);
} }
} else { } else {
debug!("can't find dep_info for {:?} {:?}", debug!(
unit.pkg.package_id(), unit.profile); "can't find dep_info for {:?} {:?}",
unit.pkg.package_id(),
unit.profile
);
return Err(internal("dep_info missing")); return Err(internal("dep_info missing"));
} }
} }
@ -73,8 +77,12 @@ pub fn output_depinfo<'a, 'b>(context: &mut Context<'a, 'b>, unit: &Unit<'a>) ->
let basedir_string; let basedir_string;
let basedir = match context.config.get_path("build.dep-info-basedir")? { let basedir = match context.config.get_path("build.dep-info-basedir")? {
Some(value) => { Some(value) => {
basedir_string = value.val.as_os_str().to_str(). basedir_string = value
ok_or_else(|| internal("build.dep-info-basedir path not utf-8"))?.to_string(); .val
.as_os_str()
.to_str()
.ok_or_else(|| internal("build.dep-info-basedir path not utf-8"))?
.to_string();
Some(basedir_string.as_str()) Some(basedir_string.as_str())
} }
None => None, None => None,
@ -92,10 +100,8 @@ pub fn output_depinfo<'a, 'b>(context: &mut Context<'a, 'b>, unit: &Unit<'a>) ->
// If nothing changed don't recreate the file which could alter // If nothing changed don't recreate the file which could alter
// its mtime // its mtime
if let Ok(previous) = fingerprint::parse_rustc_dep_info(&output_path) { if let Ok(previous) = fingerprint::parse_rustc_dep_info(&output_path) {
if previous.len() == 1 && if previous.len() == 1 && previous[0].0 == target_fn && previous[0].1 == deps {
previous[0].0 == target_fn && continue;
previous[0].1 == deps {
continue
} }
} }

View file

@ -1,7 +1,7 @@
use std::ffi::{OsString, OsStr}; use std::ffi::{OsStr, OsString};
use ops::{self, Compilation}; use ops::{self, Compilation};
use util::{self, CargoTestError, Test, ProcessError}; use util::{self, CargoTestError, ProcessError, Test};
use util::errors::CargoResult; use util::errors::CargoResult;
use core::Workspace; use core::Workspace;
@ -12,13 +12,15 @@ pub struct TestOptions<'a> {
pub only_doc: bool, pub only_doc: bool,
} }
pub fn run_tests(ws: &Workspace, pub fn run_tests(
options: &TestOptions, ws: &Workspace,
test_args: &[String]) -> CargoResult<Option<CargoTestError>> { options: &TestOptions,
test_args: &[String],
) -> CargoResult<Option<CargoTestError>> {
let compilation = compile_tests(ws, options)?; let compilation = compile_tests(ws, options)?;
if options.no_run { if options.no_run {
return Ok(None) return Ok(None);
} }
let (test, mut errors) = if options.only_doc { let (test, mut errors) = if options.only_doc {
assert!(options.compile_opts.filter.is_specific()); assert!(options.compile_opts.filter.is_specific());
@ -29,7 +31,7 @@ pub fn run_tests(ws: &Workspace,
// If we have an error and want to fail fast, return // If we have an error and want to fail fast, return
if !errors.is_empty() && !options.no_fail_fast { if !errors.is_empty() && !options.no_fail_fast {
return Ok(Some(CargoTestError::new(test, errors))) return Ok(Some(CargoTestError::new(test, errors)));
} }
// If a specific test was requested or we're not running any tests at all, // If a specific test was requested or we're not running any tests at all,
@ -37,7 +39,7 @@ pub fn run_tests(ws: &Workspace,
if options.compile_opts.filter.is_specific() { if options.compile_opts.filter.is_specific() {
match errors.len() { match errors.len() {
0 => return Ok(None), 0 => return Ok(None),
_ => return Ok(Some(CargoTestError::new(test, errors))) _ => return Ok(Some(CargoTestError::new(test, errors))),
} }
} }
@ -51,15 +53,17 @@ pub fn run_tests(ws: &Workspace,
} }
} }
pub fn run_benches(ws: &Workspace, pub fn run_benches(
options: &TestOptions, ws: &Workspace,
args: &[String]) -> CargoResult<Option<CargoTestError>> { options: &TestOptions,
args: &[String],
) -> CargoResult<Option<CargoTestError>> {
let mut args = args.to_vec(); let mut args = args.to_vec();
args.push("--bench".to_string()); args.push("--bench".to_string());
let compilation = compile_tests(ws, options)?; let compilation = compile_tests(ws, options)?;
if options.no_run { if options.no_run {
return Ok(None) return Ok(None);
} }
let (test, errors) = run_unit_tests(options, &args, &compilation)?; let (test, errors) = run_unit_tests(options, &args, &compilation)?;
match errors.len() { match errors.len() {
@ -68,21 +72,23 @@ pub fn run_benches(ws: &Workspace,
} }
} }
fn compile_tests<'a>(ws: &Workspace<'a>, fn compile_tests<'a>(
options: &TestOptions<'a>) ws: &Workspace<'a>,
-> CargoResult<Compilation<'a>> { options: &TestOptions<'a>,
) -> CargoResult<Compilation<'a>> {
let mut compilation = ops::compile(ws, &options.compile_opts)?; let mut compilation = ops::compile(ws, &options.compile_opts)?;
compilation.tests.sort_by(|a, b| { compilation
(a.0.package_id(), &a.1, &a.2).cmp(&(b.0.package_id(), &b.1, &b.2)) .tests
}); .sort_by(|a, b| (a.0.package_id(), &a.1, &a.2).cmp(&(b.0.package_id(), &b.1, &b.2)));
Ok(compilation) Ok(compilation)
} }
/// Run the unit and integration tests of a project. /// Run the unit and integration tests of a project.
fn run_unit_tests(options: &TestOptions, fn run_unit_tests(
test_args: &[String], options: &TestOptions,
compilation: &Compilation) test_args: &[String],
-> CargoResult<(Test, Vec<ProcessError>)> { compilation: &Compilation,
) -> CargoResult<(Test, Vec<ProcessError>)> {
let config = options.compile_opts.config; let config = options.compile_opts.config;
let cwd = options.compile_opts.config.cwd(); let cwd = options.compile_opts.config.cwd();
@ -95,12 +101,12 @@ fn run_unit_tests(options: &TestOptions,
}; };
let mut cmd = compilation.target_process(exe, pkg)?; let mut cmd = compilation.target_process(exe, pkg)?;
cmd.args(test_args); cmd.args(test_args);
config.shell().concise(|shell| { config
shell.status("Running", to_display.display().to_string()) .shell()
})?; .concise(|shell| shell.status("Running", to_display.display().to_string()))?;
config.shell().verbose(|shell| { config
shell.status("Running", cmd.to_string()) .shell()
})?; .verbose(|shell| shell.status("Running", cmd.to_string()))?;
let result = cmd.exec(); let result = cmd.exec();
@ -118,16 +124,27 @@ fn run_unit_tests(options: &TestOptions,
if errors.len() == 1 { if errors.len() == 1 {
let (kind, name, pkg_name, e) = errors.pop().unwrap(); let (kind, name, pkg_name, e) = errors.pop().unwrap();
Ok((Test::UnitTest{kind, name, pkg_name}, vec![e])) Ok((
Test::UnitTest {
kind,
name,
pkg_name,
},
vec![e],
))
} else { } else {
Ok((Test::Multiple, errors.into_iter().map(|(_, _, _, e)| e).collect())) Ok((
Test::Multiple,
errors.into_iter().map(|(_, _, _, e)| e).collect(),
))
} }
} }
fn run_doc_tests(options: &TestOptions, fn run_doc_tests(
test_args: &[String], options: &TestOptions,
compilation: &Compilation) test_args: &[String],
-> CargoResult<(Test, Vec<ProcessError>)> { compilation: &Compilation,
) -> CargoResult<(Test, Vec<ProcessError>)> {
let mut errors = Vec::new(); let mut errors = Vec::new();
let config = options.compile_opts.config; let config = options.compile_opts.config;
@ -137,16 +154,24 @@ fn run_doc_tests(options: &TestOptions,
} }
let libs = compilation.to_doc_test.iter().map(|package| { let libs = compilation.to_doc_test.iter().map(|package| {
(package, package.targets().iter().filter(|t| t.doctested()) (
.map(|t| (t.src_path(), t.name(), t.crate_name()))) package,
package
.targets()
.iter()
.filter(|t| t.doctested())
.map(|t| (t.src_path(), t.name(), t.crate_name())),
)
}); });
for (package, tests) in libs { for (package, tests) in libs {
for (lib, name, crate_name) in tests { for (lib, name, crate_name) in tests {
config.shell().status("Doc-tests", name)?; config.shell().status("Doc-tests", name)?;
let mut p = compilation.rustdoc_process(package)?; let mut p = compilation.rustdoc_process(package)?;
p.arg("--test").arg(lib) p.arg("--test")
.arg("--crate-name").arg(&crate_name); .arg(lib)
.arg("--crate-name")
.arg(&crate_name);
for &rust_dep in &[&compilation.deps_output] { for &rust_dep in &[&compilation.deps_output] {
let mut arg = OsString::from("dependency="); let mut arg = OsString::from("dependency=");
@ -186,9 +211,8 @@ fn run_doc_tests(options: &TestOptions,
// dynamically as well, causing problems. As a result we only // dynamically as well, causing problems. As a result we only
// pass `--extern` for rlib deps and skip out on all other // pass `--extern` for rlib deps and skip out on all other
// artifacts. // artifacts.
if lib.extension() != Some(OsStr::new("rlib")) && if lib.extension() != Some(OsStr::new("rlib")) && !target.for_host() {
!target.for_host() { continue;
continue
} }
let mut arg = OsString::from(target.crate_name()); let mut arg = OsString::from(target.crate_name());
arg.push("="); arg.push("=");
@ -200,9 +224,9 @@ fn run_doc_tests(options: &TestOptions,
p.args(flags); p.args(flags);
} }
config.shell().verbose(|shell| { config
shell.status("Running", p.to_string()) .shell()
})?; .verbose(|shell| shell.status("Running", p.to_string()))?;
if let Err(e) = p.exec() { if let Err(e) = p.exec() {
let e = e.downcast::<ProcessError>()?; let e = e.downcast::<ProcessError>()?;
errors.push(e); errors.push(e);

View file

@ -2,7 +2,7 @@ use std::io::prelude::*;
use toml; use toml;
use core::{Resolve, resolver, Workspace}; use core::{resolver, Resolve, Workspace};
use core::resolver::WorkspaceResolve; use core::resolver::WorkspaceResolve;
use util::Filesystem; use util::Filesystem;
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
@ -10,24 +10,23 @@ use util::toml as cargo_toml;
pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> { pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
if !ws.root().join("Cargo.lock").exists() { if !ws.root().join("Cargo.lock").exists() {
return Ok(None) return Ok(None);
} }
let root = Filesystem::new(ws.root().to_path_buf()); let root = Filesystem::new(ws.root().to_path_buf());
let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?; let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?;
let mut s = String::new(); let mut s = String::new();
f.read_to_string(&mut s).chain_err(|| { f.read_to_string(&mut s)
format!("failed to read file: {}", f.path().display()) .chain_err(|| format!("failed to read file: {}", f.path().display()))?;
})?;
let resolve = (|| -> CargoResult<Option<Resolve>> { let resolve =
let resolve : toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?; (|| -> CargoResult<Option<Resolve>> {
let v: resolver::EncodableResolve = resolve.try_into()?; let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?;
Ok(Some(v.into_resolve(ws)?)) let v: resolver::EncodableResolve = resolve.try_into()?;
})().chain_err(|| { Ok(Some(v.into_resolve(ws)?))
format!("failed to parse lock file at: {}", f.path().display()) })()
})?; .chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?;
Ok(resolve) Ok(resolve)
} }
@ -71,7 +70,7 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()>
// helpful on read-only filesystems. // helpful on read-only filesystems.
if let Ok(orig) = orig { if let Ok(orig) = orig {
if are_equal_lockfiles(orig, &out, ws) { if are_equal_lockfiles(orig, &out, ws) {
return Ok(()) return Ok(());
} }
} }
@ -80,20 +79,27 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()>
bail!("can't update in the offline mode"); bail!("can't update in the offline mode");
} }
let flag = if ws.config().network_allowed() {"--locked"} else {"--frozen"}; let flag = if ws.config().network_allowed() {
bail!("the lock file needs to be updated but {} was passed to \ "--locked"
prevent this", flag); } else {
"--frozen"
};
bail!(
"the lock file needs to be updated but {} was passed to \
prevent this",
flag
);
} }
// Ok, if that didn't work just write it out // Ok, if that didn't work just write it out
ws_root.open_rw("Cargo.lock", ws.config(), "Cargo.lock file").and_then(|mut f| { ws_root
f.file().set_len(0)?; .open_rw("Cargo.lock", ws.config(), "Cargo.lock file")
f.write_all(out.as_bytes())?; .and_then(|mut f| {
Ok(()) f.file().set_len(0)?;
}).chain_err(|| { f.write_all(out.as_bytes())?;
format!("failed to write {}", Ok(())
ws.root().join("Cargo.lock").display()) })
})?; .chain_err(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?;
Ok(()) Ok(())
} }

View file

@ -3,27 +3,28 @@ pub use self::cargo_compile::{compile, compile_with_exec, compile_ws, CompileOpt
pub use self::cargo_compile::{CompileFilter, CompileMode, FilterRule, MessageFormat, Packages}; pub use self::cargo_compile::{CompileFilter, CompileMode, FilterRule, MessageFormat, Packages};
pub use self::cargo_read_manifest::{read_package, read_packages}; pub use self::cargo_read_manifest::{read_package, read_packages};
pub use self::cargo_rustc::{compile_targets, Compilation, Kind, Unit}; pub use self::cargo_rustc::{compile_targets, Compilation, Kind, Unit};
pub use self::cargo_rustc::{Context, is_bad_artifact_name}; pub use self::cargo_rustc::{is_bad_artifact_name, Context};
pub use self::cargo_rustc::{BuildOutput, BuildConfig, TargetConfig}; pub use self::cargo_rustc::{BuildConfig, BuildOutput, TargetConfig};
pub use self::cargo_rustc::{Executor, DefaultExecutor}; pub use self::cargo_rustc::{DefaultExecutor, Executor};
pub use self::cargo_run::run; pub use self::cargo_run::run;
pub use self::cargo_install::{install, install_list, uninstall}; pub use self::cargo_install::{install, install_list, uninstall};
pub use self::cargo_new::{new, init, NewOptions, VersionControl}; pub use self::cargo_new::{init, new, NewOptions, VersionControl};
pub use self::cargo_doc::{doc, DocOptions}; pub use self::cargo_doc::{doc, DocOptions};
pub use self::cargo_generate_lockfile::{generate_lockfile}; pub use self::cargo_generate_lockfile::generate_lockfile;
pub use self::cargo_generate_lockfile::{update_lockfile}; pub use self::cargo_generate_lockfile::update_lockfile;
pub use self::cargo_generate_lockfile::UpdateOptions; pub use self::cargo_generate_lockfile::UpdateOptions;
pub use self::lockfile::{load_pkg_lockfile, write_pkg_lockfile}; pub use self::lockfile::{load_pkg_lockfile, write_pkg_lockfile};
pub use self::cargo_test::{run_tests, run_benches, TestOptions}; pub use self::cargo_test::{run_benches, run_tests, TestOptions};
pub use self::cargo_package::{package, PackageOpts}; pub use self::cargo_package::{package, PackageOpts};
pub use self::registry::{publish, registry_configuration, RegistryConfig}; pub use self::registry::{publish, registry_configuration, RegistryConfig};
pub use self::registry::{registry_login, search, needs_custom_http_transport, http_handle}; pub use self::registry::{http_handle, needs_custom_http_transport, registry_login, search};
pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts}; pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts};
pub use self::registry::configure_http_handle; pub use self::registry::configure_http_handle;
pub use self::cargo_fetch::fetch; pub use self::cargo_fetch::fetch;
pub use self::cargo_pkgid::pkgid; pub use self::cargo_pkgid::pkgid;
pub use self::resolve::{resolve_ws, resolve_ws_precisely, resolve_ws_with_method, resolve_with_previous}; pub use self::resolve::{resolve_with_previous, resolve_ws, resolve_ws_precisely,
pub use self::cargo_output_metadata::{output_metadata, OutputMetadataOptions, ExportInfo}; resolve_ws_with_method};
pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions};
mod cargo_clean; mod cargo_clean;
mod cargo_compile; mod cargo_compile;

View file

@ -5,7 +5,7 @@ use std::time::Duration;
use curl::easy::{Easy, SslOpt}; use curl::easy::{Easy, SslOpt};
use git2; use git2;
use registry::{Registry, NewCrate, NewCrateDependency}; use registry::{NewCrate, NewCrateDependency, Registry};
use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET}; use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
@ -15,7 +15,7 @@ use core::{Package, SourceId, Workspace};
use core::dependency::Kind; use core::dependency::Kind;
use core::manifest::ManifestMetadata; use core::manifest::ManifestMetadata;
use ops; use ops;
use sources::{RegistrySource}; use sources::RegistrySource;
use util::config::{self, Config}; use util::config::{self, Config};
use util::paths; use util::paths;
use util::ToUrl; use util::ToUrl;
@ -53,8 +53,11 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
Some(ref registry) => allowed_registries.contains(registry), Some(ref registry) => allowed_registries.contains(registry),
None => false, None => false,
} { } {
bail!("some crates cannot be published.\n\ bail!(
`{}` is marked as unpublishable", pkg.name()); "some crates cannot be published.\n\
`{}` is marked as unpublishable",
pkg.name()
);
} }
} }
@ -62,40 +65,56 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
bail!("published crates cannot contain [patch] sections"); bail!("published crates cannot contain [patch] sections");
} }
let (mut registry, reg_id) = registry(opts.config, let (mut registry, reg_id) = registry(
opts.token.clone(), opts.config,
opts.index.clone(), opts.token.clone(),
opts.registry.clone())?; opts.index.clone(),
opts.registry.clone(),
)?;
verify_dependencies(pkg, &reg_id)?; verify_dependencies(pkg, &reg_id)?;
// Prepare a tarball, with a non-surpressable warning if metadata // Prepare a tarball, with a non-surpressable warning if metadata
// is missing since this is being put online. // is missing since this is being put online.
let tarball = ops::package(ws, &ops::PackageOpts { let tarball = ops::package(
config: opts.config, ws,
verify: opts.verify, &ops::PackageOpts {
list: false, config: opts.config,
check_metadata: true, verify: opts.verify,
allow_dirty: opts.allow_dirty, list: false,
target: opts.target.clone(), check_metadata: true,
jobs: opts.jobs, allow_dirty: opts.allow_dirty,
registry: opts.registry.clone(), target: opts.target.clone(),
})?.unwrap(); jobs: opts.jobs,
registry: opts.registry.clone(),
},
)?.unwrap();
// Upload said tarball to the specified destination // Upload said tarball to the specified destination
opts.config.shell().status("Uploading", pkg.package_id().to_string())?; opts.config
transmit(opts.config, pkg, tarball.file(), &mut registry, &reg_id, opts.dry_run)?; .shell()
.status("Uploading", pkg.package_id().to_string())?;
transmit(
opts.config,
pkg,
tarball.file(),
&mut registry,
&reg_id,
opts.dry_run,
)?;
Ok(()) Ok(())
} }
fn verify_dependencies(pkg: &Package, registry_src: &SourceId) fn verify_dependencies(pkg: &Package, registry_src: &SourceId) -> CargoResult<()> {
-> CargoResult<()> {
for dep in pkg.dependencies().iter() { for dep in pkg.dependencies().iter() {
if dep.source_id().is_path() { if dep.source_id().is_path() {
if !dep.specified_req() { if !dep.specified_req() {
bail!("all path dependencies must have a version specified \ bail!(
when publishing.\ndependency `{}` does not specify \ "all path dependencies must have a version specified \
a version", dep.name()) when publishing.\ndependency `{}` does not specify \
a version",
dep.name()
)
} }
} else if dep.source_id() != registry_src { } else if dep.source_id() != registry_src {
if dep.source_id().is_registry() { if dep.source_id().is_registry() {
@ -108,58 +127,75 @@ fn verify_dependencies(pkg: &Package, registry_src: &SourceId)
(crate `{}` is pulled from {})", dep.name(), dep.name(), dep.source_id()); (crate `{}` is pulled from {})", dep.name(), dep.name(), dep.source_id());
} }
} else { } else {
bail!("crates cannot be published to crates.io with dependencies sourced from \ bail!(
a repository\neither publish `{}` as its own crate on crates.io and \ "crates cannot be published to crates.io with dependencies sourced from \
specify a crates.io version as a dependency or pull it into this \ a repository\neither publish `{}` as its own crate on crates.io and \
repository and specify it with a path and version\n(crate `{}` has \ specify a crates.io version as a dependency or pull it into this \
repository path `{}`)", dep.name(), dep.name(), dep.source_id()); repository and specify it with a path and version\n(crate `{}` has \
repository path `{}`)",
dep.name(),
dep.name(),
dep.source_id()
);
} }
} }
} }
Ok(()) Ok(())
} }
fn transmit(config: &Config, fn transmit(
pkg: &Package, config: &Config,
tarball: &File, pkg: &Package,
registry: &mut Registry, tarball: &File,
registry_id: &SourceId, registry: &mut Registry,
dry_run: bool) -> CargoResult<()> { registry_id: &SourceId,
dry_run: bool,
) -> CargoResult<()> {
let deps = pkg.dependencies()
.iter()
.map(|dep| {
// If the dependency is from a different registry, then include the
// registry in the dependency.
let dep_registry_id = match dep.registry_id() {
Some(id) => id,
None => bail!("dependency missing registry ID"),
};
let dep_registry = if dep_registry_id != registry_id {
Some(dep_registry_id.url().to_string())
} else {
None
};
let deps = pkg.dependencies().iter().map(|dep| { Ok(NewCrateDependency {
optional: dep.is_optional(),
// If the dependency is from a different registry, then include the default_features: dep.uses_default_features(),
// registry in the dependency. name: dep.name().to_string(),
let dep_registry_id = match dep.registry_id() { features: dep.features().to_vec(),
Some(id) => id, version_req: dep.version_req().to_string(),
None => bail!("dependency missing registry ID"), target: dep.platform().map(|s| s.to_string()),
}; kind: match dep.kind() {
let dep_registry = if dep_registry_id != registry_id { Kind::Normal => "normal",
Some(dep_registry_id.url().to_string()) Kind::Build => "build",
} else { Kind::Development => "dev",
None }.to_string(),
}; registry: dep_registry,
})
Ok(NewCrateDependency {
optional: dep.is_optional(),
default_features: dep.uses_default_features(),
name: dep.name().to_string(),
features: dep.features().to_vec(),
version_req: dep.version_req().to_string(),
target: dep.platform().map(|s| s.to_string()),
kind: match dep.kind() {
Kind::Normal => "normal",
Kind::Build => "build",
Kind::Development => "dev",
}.to_string(),
registry: dep_registry,
}) })
}).collect::<CargoResult<Vec<NewCrateDependency>>>()?; .collect::<CargoResult<Vec<NewCrateDependency>>>()?;
let manifest = pkg.manifest(); let manifest = pkg.manifest();
let ManifestMetadata { let ManifestMetadata {
ref authors, ref description, ref homepage, ref documentation, ref authors,
ref keywords, ref readme, ref repository, ref license, ref license_file, ref description,
ref categories, ref badges, ref links, ref homepage,
ref documentation,
ref keywords,
ref readme,
ref repository,
ref license,
ref license_file,
ref categories,
ref badges,
ref links,
} = *manifest.metadata(); } = *manifest.metadata();
let readme_content = match *readme { let readme_content = match *readme {
Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?), Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?),
@ -177,79 +213,91 @@ fn transmit(config: &Config,
return Ok(()); return Ok(());
} }
let publish = registry.publish(&NewCrate { let publish = registry.publish(
name: pkg.name().to_string(), &NewCrate {
vers: pkg.version().to_string(), name: pkg.name().to_string(),
deps, vers: pkg.version().to_string(),
features: pkg.summary().features().clone(), deps,
authors: authors.clone(), features: pkg.summary().features().clone(),
description: description.clone(), authors: authors.clone(),
homepage: homepage.clone(), description: description.clone(),
documentation: documentation.clone(), homepage: homepage.clone(),
keywords: keywords.clone(), documentation: documentation.clone(),
categories: categories.clone(), keywords: keywords.clone(),
readme: readme_content, categories: categories.clone(),
readme_file: readme.clone(), readme: readme_content,
repository: repository.clone(), readme_file: readme.clone(),
license: license.clone(), repository: repository.clone(),
license_file: license_file.clone(), license: license.clone(),
badges: badges.clone(), license_file: license_file.clone(),
links: links.clone(), badges: badges.clone(),
}, tarball); links: links.clone(),
},
tarball,
);
match publish { match publish {
Ok(warnings) => { Ok(warnings) => {
if !warnings.invalid_categories.is_empty() { if !warnings.invalid_categories.is_empty() {
let msg = format!("\ let msg = format!(
the following are not valid category slugs and were \ "\
ignored: {}. Please see https://crates.io/category_slugs \ the following are not valid category slugs and were \
for the list of all category slugs. \ ignored: {}. Please see https://crates.io/category_slugs \
", warnings.invalid_categories.join(", ")); for the list of all category slugs. \
",
warnings.invalid_categories.join(", ")
);
config.shell().warn(&msg)?; config.shell().warn(&msg)?;
} }
if !warnings.invalid_badges.is_empty() { if !warnings.invalid_badges.is_empty() {
let msg = format!("\ let msg = format!(
the following are not valid badges and were ignored: {}. \ "\
Either the badge type specified is unknown or a required \ the following are not valid badges and were ignored: {}. \
attribute is missing. Please see \ Either the badge type specified is unknown or a required \
http://doc.crates.io/manifest.html#package-metadata \ attribute is missing. Please see \
for valid badge types and their required attributes.", http://doc.crates.io/manifest.html#package-metadata \
warnings.invalid_badges.join(", ")); for valid badge types and their required attributes.",
warnings.invalid_badges.join(", ")
);
config.shell().warn(&msg)?; config.shell().warn(&msg)?;
} }
Ok(()) Ok(())
}, }
Err(e) => Err(e), Err(e) => Err(e),
} }
} }
pub fn registry_configuration(config: &Config, pub fn registry_configuration(
registry: Option<String>) -> CargoResult<RegistryConfig> { config: &Config,
registry: Option<String>,
) -> CargoResult<RegistryConfig> {
let (index, token) = match registry { let (index, token) = match registry {
Some(registry) => { Some(registry) => (
(Some(config.get_registry_index(&registry)?.to_string()), Some(config.get_registry_index(&registry)?.to_string()),
config.get_string(&format!("registries.{}.token", registry))?.map(|p| p.val)) config
} .get_string(&format!("registries.{}.token", registry))?
.map(|p| p.val),
),
None => { None => {
// Checking out for default index and token // Checking out for default index and token
(config.get_string("registry.index")?.map(|p| p.val), (
config.get_string("registry.token")?.map(|p| p.val)) config.get_string("registry.index")?.map(|p| p.val),
config.get_string("registry.token")?.map(|p| p.val),
)
} }
}; };
Ok(RegistryConfig { Ok(RegistryConfig { index, token })
index,
token
})
} }
pub fn registry(config: &Config, pub fn registry(
token: Option<String>, config: &Config,
index: Option<String>, token: Option<String>,
registry: Option<String>) -> CargoResult<(Registry, SourceId)> { index: Option<String>,
registry: Option<String>,
) -> CargoResult<(Registry, SourceId)> {
// Parse all configuration options // Parse all configuration options
let RegistryConfig { let RegistryConfig {
token: token_config, token: token_config,
@ -263,9 +311,8 @@ pub fn registry(config: &Config,
}; };
let api_host = { let api_host = {
let mut src = RegistrySource::remote(&sid, config); let mut src = RegistrySource::remote(&sid, config);
src.update().chain_err(|| { src.update()
format!("failed to update {}", sid) .chain_err(|| format!("failed to update {}", sid))?;
})?;
(src.config()?).unwrap().api.unwrap() (src.config()?).unwrap().api.unwrap()
}; };
let handle = http_handle(config)?; let handle = http_handle(config)?;
@ -275,8 +322,10 @@ pub fn registry(config: &Config,
/// Create a new HTTP handle with appropriate global configuration for cargo. /// Create a new HTTP handle with appropriate global configuration for cargo.
pub fn http_handle(config: &Config) -> CargoResult<Easy> { pub fn http_handle(config: &Config) -> CargoResult<Easy> {
if config.frozen() { if config.frozen() {
bail!("attempting to make an HTTP request, but --frozen was \ bail!(
specified") "attempting to make an HTTP request, but --frozen was \
specified"
)
} }
if !config.network_allowed() { if !config.network_allowed() {
bail!("can't make HTTP request in the offline mode") bail!("can't make HTTP request in the offline mode")
@ -332,11 +381,11 @@ pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<
/// via environment variables are picked up by libcurl. /// via environment variables are picked up by libcurl.
fn http_proxy(config: &Config) -> CargoResult<Option<String>> { fn http_proxy(config: &Config) -> CargoResult<Option<String>> {
if let Some(s) = config.get_string("http.proxy")? { if let Some(s) = config.get_string("http.proxy")? {
return Ok(Some(s.val)) return Ok(Some(s.val));
} }
if let Ok(cfg) = git2::Config::open_default() { if let Ok(cfg) = git2::Config::open_default() {
if let Ok(s) = cfg.get_str("http.proxy") { if let Ok(s) = cfg.get_str("http.proxy") {
return Ok(Some(s.to_string())) return Ok(Some(s.to_string()));
} }
} }
Ok(None) Ok(None)
@ -356,24 +405,22 @@ fn http_proxy_exists(config: &Config) -> CargoResult<bool> {
if http_proxy(config)?.is_some() { if http_proxy(config)?.is_some() {
Ok(true) Ok(true)
} else { } else {
Ok(["http_proxy", "HTTP_PROXY", Ok(["http_proxy", "HTTP_PROXY", "https_proxy", "HTTPS_PROXY"]
"https_proxy", "HTTPS_PROXY"].iter().any(|v| env::var(v).is_ok())) .iter()
.any(|v| env::var(v).is_ok()))
} }
} }
fn http_timeout(config: &Config) -> CargoResult<Option<i64>> { fn http_timeout(config: &Config) -> CargoResult<Option<i64>> {
if let Some(s) = config.get_i64("http.timeout")? { if let Some(s) = config.get_i64("http.timeout")? {
return Ok(Some(s.val)) return Ok(Some(s.val));
} }
Ok(env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok())) Ok(env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok()))
} }
pub fn registry_login(config: &Config, pub fn registry_login(config: &Config, token: String, registry: Option<String>) -> CargoResult<()> {
token: String,
registry: Option<String>) -> CargoResult<()> {
let RegistryConfig { let RegistryConfig {
token: old_token, token: old_token, ..
..
} = registry_configuration(config, registry.clone())?; } = registry_configuration(config, registry.clone())?;
if let Some(old_token) = old_token { if let Some(old_token) = old_token {
@ -405,39 +452,41 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
} }
}; };
let (mut registry, _) = registry(config, let (mut registry, _) = registry(
opts.token.clone(), config,
opts.index.clone(), opts.token.clone(),
opts.registry.clone())?; opts.index.clone(),
opts.registry.clone(),
)?;
if let Some(ref v) = opts.to_add { if let Some(ref v) = opts.to_add {
let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>(); let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
let msg = registry.add_owners(&name, &v).map_err(|e| { let msg = registry
format_err!("failed to invite owners to crate {}: {}", name, e) .add_owners(&name, &v)
})?; .map_err(|e| format_err!("failed to invite owners to crate {}: {}", name, e))?;
config.shell().status("Owner", msg)?; config.shell().status("Owner", msg)?;
} }
if let Some(ref v) = opts.to_remove { if let Some(ref v) = opts.to_remove {
let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>(); let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
config.shell().status("Owner", format!("removing {:?} from crate {}", config
v, name))?; .shell()
registry.remove_owners(&name, &v).chain_err(|| { .status("Owner", format!("removing {:?} from crate {}", v, name))?;
format!("failed to remove owners from crate {}", name) registry
})?; .remove_owners(&name, &v)
.chain_err(|| format!("failed to remove owners from crate {}", name))?;
} }
if opts.list { if opts.list {
let owners = registry.list_owners(&name).chain_err(|| { let owners = registry
format!("failed to list owners of crate {}", name) .list_owners(&name)
})?; .chain_err(|| format!("failed to list owners of crate {}", name))?;
for owner in owners.iter() { for owner in owners.iter() {
print!("{}", owner.login); print!("{}", owner.login);
match (owner.name.as_ref(), owner.email.as_ref()) { match (owner.name.as_ref(), owner.email.as_ref()) {
(Some(name), Some(email)) => println!(" ({} <{}>)", name, email), (Some(name), Some(email)) => println!(" ({} <{}>)", name, email),
(Some(s), None) | (Some(s), None) | (None, Some(s)) => println!(" ({})", s),
(None, Some(s)) => println!(" ({})", s),
(None, None) => println!(), (None, None) => println!(),
} }
} }
@ -446,13 +495,15 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
Ok(()) Ok(())
} }
pub fn yank(config: &Config, pub fn yank(
krate: Option<String>, config: &Config,
version: Option<String>, krate: Option<String>,
token: Option<String>, version: Option<String>,
index: Option<String>, token: Option<String>,
undo: bool, index: Option<String>,
reg: Option<String>) -> CargoResult<()> { undo: bool,
reg: Option<String>,
) -> CargoResult<()> {
let name = match krate { let name = match krate {
Some(name) => name, Some(name) => name,
None => { None => {
@ -463,31 +514,37 @@ pub fn yank(config: &Config,
}; };
let version = match version { let version = match version {
Some(v) => v, Some(v) => v,
None => bail!("a version must be specified to yank") None => bail!("a version must be specified to yank"),
}; };
let (mut registry, _) = registry(config, token, index, reg)?; let (mut registry, _) = registry(config, token, index, reg)?;
if undo { if undo {
config.shell().status("Unyank", format!("{}:{}", name, version))?; config
registry.unyank(&name, &version).chain_err(|| { .shell()
"failed to undo a yank" .status("Unyank", format!("{}:{}", name, version))?;
})?; registry
.unyank(&name, &version)
.chain_err(|| "failed to undo a yank")?;
} else { } else {
config.shell().status("Yank", format!("{}:{}", name, version))?; config
registry.yank(&name, &version).chain_err(|| { .shell()
"failed to yank" .status("Yank", format!("{}:{}", name, version))?;
})?; registry
.yank(&name, &version)
.chain_err(|| "failed to yank")?;
} }
Ok(()) Ok(())
} }
pub fn search(query: &str, pub fn search(
config: &Config, query: &str,
index: Option<String>, config: &Config,
limit: u32, index: Option<String>,
reg: Option<String>) -> CargoResult<()> { limit: u32,
reg: Option<String>,
) -> CargoResult<()> {
fn truncate_with_ellipsis(s: &str, max_width: usize) -> String { fn truncate_with_ellipsis(s: &str, max_width: usize) -> String {
// We should truncate at grapheme-boundary and compute character-widths, // We should truncate at grapheme-boundary and compute character-widths,
// yet the dependencies on unicode-segmentation and unicode-width are // yet the dependencies on unicode-segmentation and unicode-width are
@ -501,46 +558,51 @@ pub fn search(query: &str,
} }
let (mut registry, _) = registry(config, None, index, reg)?; let (mut registry, _) = registry(config, None, index, reg)?;
let (crates, total_crates) = registry.search(query, limit).chain_err(|| { let (crates, total_crates) = registry
"failed to retrieve search results from the registry" .search(query, limit)
})?; .chain_err(|| "failed to retrieve search results from the registry")?;
let names = crates.iter() let names = crates
.iter()
.map(|krate| format!("{} = \"{}\"", krate.name, krate.max_version)) .map(|krate| format!("{} = \"{}\"", krate.name, krate.max_version))
.collect::<Vec<String>>(); .collect::<Vec<String>>();
let description_margin = names.iter() let description_margin = names.iter().map(|s| s.len() + 4).max().unwrap_or_default();
.map(|s| s.len() + 4)
.max()
.unwrap_or_default();
let description_length = cmp::max(80, 128 - description_margin); let description_length = cmp::max(80, 128 - description_margin);
let descriptions = crates.iter() let descriptions = crates.iter().map(|krate| {
.map(|krate| krate
krate.description.as_ref().map(|desc| .description
truncate_with_ellipsis(&desc.replace("\n", " "), description_length))); .as_ref()
.map(|desc| truncate_with_ellipsis(&desc.replace("\n", " "), description_length))
});
for (name, description) in names.into_iter().zip(descriptions) { for (name, description) in names.into_iter().zip(descriptions) {
let line = match description { let line = match description {
Some(desc) => { Some(desc) => {
let space = repeat(' ').take(description_margin - name.len()) let space = repeat(' ')
.collect::<String>(); .take(description_margin - name.len())
.collect::<String>();
name + &space + "# " + &desc name + &space + "# " + &desc
} }
None => name None => name,
}; };
println!("{}", line); println!("{}", line);
} }
let search_max_limit = 100; let search_max_limit = 100;
if total_crates > u32::from(limit) && limit < search_max_limit { if total_crates > u32::from(limit) && limit < search_max_limit {
println!("... and {} crates more (use --limit N to see more)", println!(
total_crates - u32::from(limit)); "... and {} crates more (use --limit N to see more)",
total_crates - u32::from(limit)
);
} else if total_crates > u32::from(limit) && limit >= search_max_limit { } else if total_crates > u32::from(limit) && limit >= search_max_limit {
println!("... and {} crates more (go to http://crates.io/search?q={} to see more)", println!(
total_crates - u32::from(limit), "... and {} crates more (go to http://crates.io/search?q={} to see more)",
percent_encode(query.as_bytes(), QUERY_ENCODE_SET)); total_crates - u32::from(limit),
percent_encode(query.as_bytes(), QUERY_ENCODE_SET)
);
} }
Ok(()) Ok(())

View file

@ -2,7 +2,7 @@ use std::collections::HashSet;
use core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace}; use core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace};
use core::registry::PackageRegistry; use core::registry::PackageRegistry;
use core::resolver::{self, Resolve, Method}; use core::resolver::{self, Method, Resolve};
use sources::PathSource; use sources::PathSource;
use ops; use ops;
use util::profile; use util::profile;
@ -22,13 +22,14 @@ pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolv
/// Resolves dependencies for some packages of the workspace, /// Resolves dependencies for some packages of the workspace,
/// taking into account `paths` overrides and activated features. /// taking into account `paths` overrides and activated features.
pub fn resolve_ws_precisely<'a>(ws: &Workspace<'a>, pub fn resolve_ws_precisely<'a>(
source: Option<Box<Source + 'a>>, ws: &Workspace<'a>,
features: &[String], source: Option<Box<Source + 'a>>,
all_features: bool, features: &[String],
no_default_features: bool, all_features: bool,
specs: &[PackageIdSpec]) no_default_features: bool,
-> CargoResult<(PackageSet<'a>, Resolve)> { specs: &[PackageIdSpec],
) -> CargoResult<(PackageSet<'a>, Resolve)> {
let features = Method::split_features(features); let features = Method::split_features(features);
let method = if all_features { let method = if all_features {
Method::Everything Method::Everything
@ -43,11 +44,12 @@ pub fn resolve_ws_precisely<'a>(ws: &Workspace<'a>,
resolve_ws_with_method(ws, source, method, specs) resolve_ws_with_method(ws, source, method, specs)
} }
pub fn resolve_ws_with_method<'a>(ws: &Workspace<'a>, pub fn resolve_ws_with_method<'a>(
source: Option<Box<Source + 'a>>, ws: &Workspace<'a>,
method: Method, source: Option<Box<Source + 'a>>,
specs: &[PackageIdSpec]) method: Method,
-> CargoResult<(PackageSet<'a>, Resolve)> { specs: &[PackageIdSpec],
) -> CargoResult<(PackageSet<'a>, Resolve)> {
let mut registry = PackageRegistry::new(ws.config())?; let mut registry = PackageRegistry::new(ws.config())?;
if let Some(source) = source { if let Some(source) = source {
registry.add_preloaded(source); registry.add_preloaded(source);
@ -68,10 +70,13 @@ pub fn resolve_ws_with_method<'a>(ws: &Workspace<'a>,
add_overrides(&mut registry, ws)?; add_overrides(&mut registry, ws)?;
for &(ref replace_spec, ref dep) in ws.root_replace() { for &(ref replace_spec, ref dep) in ws.root_replace() {
if !resolve.iter().any(|r| replace_spec.matches(r) && !dep.matches_id(r)) { if !resolve
ws.config().shell().warn( .iter()
format!("package replacement is not used: {}", replace_spec) .any(|r| replace_spec.matches(r) && !dep.matches_id(r))
)? {
ws.config()
.shell()
.warn(format!("package replacement is not used: {}", replace_spec))?
} }
} }
@ -80,32 +85,38 @@ pub fn resolve_ws_with_method<'a>(ws: &Workspace<'a>,
ops::load_pkg_lockfile(ws)? ops::load_pkg_lockfile(ws)?
}; };
let resolved_with_overrides = let resolved_with_overrides = ops::resolve_with_previous(
ops::resolve_with_previous(&mut registry, &mut registry,
ws, ws,
method, method,
resolve.as_ref(), resolve.as_ref(),
None, None,
specs, specs,
add_patches, add_patches,
true)?; true,
)?;
let packages = get_resolved_packages(&resolved_with_overrides, registry); let packages = get_resolved_packages(&resolved_with_overrides, registry);
Ok((packages, resolved_with_overrides)) Ok((packages, resolved_with_overrides))
} }
fn resolve_with_registry(ws: &Workspace, registry: &mut PackageRegistry, warn: bool) fn resolve_with_registry(
-> CargoResult<Resolve> { ws: &Workspace,
registry: &mut PackageRegistry,
warn: bool,
) -> CargoResult<Resolve> {
let prev = ops::load_pkg_lockfile(ws)?; let prev = ops::load_pkg_lockfile(ws)?;
let resolve = resolve_with_previous(registry, let resolve = resolve_with_previous(
ws, registry,
Method::Everything, ws,
prev.as_ref(), Method::Everything,
None, prev.as_ref(),
&[], None,
true, &[],
warn)?; true,
warn,
)?;
if !ws.is_ephemeral() { if !ws.is_ephemeral() {
ops::write_pkg_lockfile(ws, &resolve)?; ops::write_pkg_lockfile(ws, &resolve)?;
@ -113,7 +124,6 @@ fn resolve_with_registry(ws: &Workspace, registry: &mut PackageRegistry, warn: b
Ok(resolve) Ok(resolve)
} }
/// Resolve all dependencies for a package using an optional previous instance /// Resolve all dependencies for a package using an optional previous instance
/// of resolve to guide the resolution process. /// of resolve to guide the resolution process.
/// ///
@ -123,15 +133,16 @@ fn resolve_with_registry(ws: &Workspace, registry: &mut PackageRegistry, warn: b
/// ///
/// The previous resolve normally comes from a lockfile. This function does not /// The previous resolve normally comes from a lockfile. This function does not
/// read or write lockfiles from the filesystem. /// read or write lockfiles from the filesystem.
pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, pub fn resolve_with_previous<'a>(
ws: &Workspace, registry: &mut PackageRegistry,
method: Method, ws: &Workspace,
previous: Option<&'a Resolve>, method: Method,
to_avoid: Option<&HashSet<&'a PackageId>>, previous: Option<&'a Resolve>,
specs: &[PackageIdSpec], to_avoid: Option<&HashSet<&'a PackageId>>,
register_patches: bool, specs: &[PackageIdSpec],
warn: bool) register_patches: bool,
-> CargoResult<Resolve> { warn: bool,
) -> CargoResult<Resolve> {
// Here we place an artificial limitation that all non-registry sources // Here we place an artificial limitation that all non-registry sources
// cannot be locked at more than one revision. This means that if a git // cannot be locked at more than one revision. This means that if a git
// repository provides more than one package, they must all be updated in // repository provides more than one package, they must all be updated in
@ -141,9 +152,12 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
// different // different
let mut to_avoid_sources = HashSet::new(); let mut to_avoid_sources = HashSet::new();
if let Some(to_avoid) = to_avoid { if let Some(to_avoid) = to_avoid {
to_avoid_sources.extend(to_avoid.iter() to_avoid_sources.extend(
.map(|p| p.source_id()) to_avoid
.filter(|s| !s.is_registry())); .iter()
.map(|p| p.source_id())
.filter(|s| !s.is_registry()),
);
} }
let ref keep = |p: &&'a PackageId| { let ref keep = |p: &&'a PackageId| {
@ -177,9 +191,7 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
if let Some(r) = previous { if let Some(r) = previous {
trace!("previous: {:?}", r); trace!("previous: {:?}", r);
for node in r.iter().filter(keep) { for node in r.iter().filter(keep) {
let deps = r.deps_not_replaced(node) let deps = r.deps_not_replaced(node).filter(keep).cloned().collect();
.filter(keep)
.cloned().collect();
registry.register_lock(node.clone(), deps); registry.register_lock(node.clone(), deps);
} }
} }
@ -190,21 +202,24 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
Some(r) => r, Some(r) => r,
None => { None => {
registry.patch(url, patches)?; registry.patch(url, patches)?;
continue continue;
} }
}; };
let patches = patches.iter().map(|dep| { let patches = patches
let unused = previous.unused_patches(); .iter()
let candidates = previous.iter().chain(unused); .map(|dep| {
match candidates.filter(keep).find(|id| dep.matches_id(id)) { let unused = previous.unused_patches();
Some(id) => { let candidates = previous.iter().chain(unused);
let mut dep = dep.clone(); match candidates.filter(keep).find(|id| dep.matches_id(id)) {
dep.lock_to(id); Some(id) => {
dep let mut dep = dep.clone();
dep.lock_to(id);
dep
}
None => dep.clone(),
} }
None => dep.clone(), })
} .collect::<Vec<_>>();
}).collect::<Vec<_>>();
registry.patch(url, &patches)?; registry.patch(url, &patches)?;
} }
@ -248,7 +263,7 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
if specs.iter().any(|spec| spec.matches(member_id)) { if specs.iter().any(|spec| spec.matches(member_id)) {
base base
} else { } else {
continue continue;
} }
} }
} }
@ -262,26 +277,23 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
let root_replace = ws.root_replace(); let root_replace = ws.root_replace();
let replace = match previous { let replace = match previous {
Some(r) => { Some(r) => root_replace
root_replace.iter().map(|&(ref spec, ref dep)| { .iter()
.map(|&(ref spec, ref dep)| {
for (key, val) in r.replacements().iter() { for (key, val) in r.replacements().iter() {
if spec.matches(key) && dep.matches_id(val) && keep(&val) { if spec.matches(key) && dep.matches_id(val) && keep(&val) {
let mut dep = dep.clone(); let mut dep = dep.clone();
dep.lock_to(val); dep.lock_to(val);
return (spec.clone(), dep) return (spec.clone(), dep);
} }
} }
(spec.clone(), dep.clone()) (spec.clone(), dep.clone())
}).collect::<Vec<_>>() })
} .collect::<Vec<_>>(),
None => root_replace.to_vec(), None => root_replace.to_vec(),
}; };
let mut resolved = resolver::resolve(&summaries, let mut resolved = resolver::resolve(&summaries, &replace, registry, Some(ws.config()), warn)?;
&replace,
registry,
Some(ws.config()),
warn)?;
resolved.register_used_patches(registry.patches()); resolved.register_used_patches(registry.patches());
if let Some(previous) = previous { if let Some(previous) = previous {
resolved.merge_from(previous)?; resolved.merge_from(previous)?;
@ -291,11 +303,10 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
/// Read the `paths` configuration variable to discover all path overrides that /// Read the `paths` configuration variable to discover all path overrides that
/// have been configured. /// have been configured.
fn add_overrides<'a>(registry: &mut PackageRegistry<'a>, fn add_overrides<'a>(registry: &mut PackageRegistry<'a>, ws: &Workspace<'a>) -> CargoResult<()> {
ws: &Workspace<'a>) -> CargoResult<()> {
let paths = match ws.config().get_list("paths")? { let paths = match ws.config().get_list("paths")? {
Some(list) => list, Some(list) => list,
None => return Ok(()) None => return Ok(()),
}; };
let paths = paths.val.iter().map(|&(ref s, ref p)| { let paths = paths.val.iter().map(|&(ref s, ref p)| {
@ -309,19 +320,19 @@ fn add_overrides<'a>(registry: &mut PackageRegistry<'a>,
let id = SourceId::for_path(&path)?; let id = SourceId::for_path(&path)?;
let mut source = PathSource::new_recursive(&path, &id, ws.config()); let mut source = PathSource::new_recursive(&path, &id, ws.config());
source.update().chain_err(|| { source.update().chain_err(|| {
format!("failed to update path override `{}` \ format!(
(defined in `{}`)", path.display(), "failed to update path override `{}` \
definition.display()) (defined in `{}`)",
path.display(),
definition.display()
)
})?; })?;
registry.add_override(Box::new(source)); registry.add_override(Box::new(source));
} }
Ok(()) Ok(())
} }
fn get_resolved_packages<'a>(resolve: &Resolve, fn get_resolved_packages<'a>(resolve: &Resolve, registry: PackageRegistry<'a>) -> PackageSet<'a> {
registry: PackageRegistry<'a>)
-> PackageSet<'a> {
let ids: Vec<PackageId> = resolve.iter().cloned().collect(); let ids: Vec<PackageId> = resolve.iter().cloned().collect();
registry.get(&ids) registry.get(&ids)
} }

View file

@ -9,7 +9,7 @@ use std::path::{Path, PathBuf};
use url::Url; use url::Url;
use core::{Source, SourceId, GitReference}; use core::{GitReference, Source, SourceId};
use sources::ReplacedSource; use sources::ReplacedSource;
use util::{Config, ToUrl}; use util::{Config, ToUrl};
use util::config::ConfigValue; use util::config::ConfigValue;
@ -58,10 +58,13 @@ impl<'cfg> SourceConfigMap<'cfg> {
id2name: HashMap::new(), id2name: HashMap::new(),
config, config,
}; };
base.add("crates-io", SourceConfig { base.add(
id: SourceId::crates_io(config)?, "crates-io",
replace_with: None, SourceConfig {
}); id: SourceId::crates_io(config)?,
replace_with: None,
},
);
Ok(base) Ok(base)
} }
@ -81,10 +84,14 @@ impl<'cfg> SourceConfigMap<'cfg> {
loop { loop {
let cfg = match self.cfgs.get(name) { let cfg = match self.cfgs.get(name) {
Some(cfg) => cfg, Some(cfg) => cfg,
None => bail!("could not find a configured source with the \ None => bail!(
name `{}` when attempting to lookup `{}` \ "could not find a configured source with the \
(configuration in `{}`)", name `{}` when attempting to lookup `{}` \
name, orig_name, path.display()), (configuration in `{}`)",
name,
orig_name,
path.display()
),
}; };
match cfg.replace_with { match cfg.replace_with {
Some((ref s, ref p)) => { Some((ref s, ref p)) => {
@ -93,37 +100,47 @@ impl<'cfg> SourceConfigMap<'cfg> {
} }
None if *id == cfg.id => return Ok(id.load(self.config)?), None if *id == cfg.id => return Ok(id.load(self.config)?),
None => { None => {
new_id = cfg.id.with_precise(id.precise() new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string()));
.map(|s| s.to_string())); break;
break
} }
} }
debug!("following pointer to {}", name); debug!("following pointer to {}", name);
if name == orig_name { if name == orig_name {
bail!("detected a cycle of `replace-with` sources, the source \ bail!(
`{}` is eventually replaced with itself \ "detected a cycle of `replace-with` sources, the source \
(configuration in `{}`)", name, path.display()) `{}` is eventually replaced with itself \
(configuration in `{}`)",
name,
path.display()
)
} }
} }
let new_src = new_id.load(self.config)?; let new_src = new_id.load(self.config)?;
let old_src = id.load(self.config)?; let old_src = id.load(self.config)?;
if !new_src.supports_checksums() && old_src.supports_checksums() { if !new_src.supports_checksums() && old_src.supports_checksums() {
bail!("\ bail!(
"\
cannot replace `{orig}` with `{name}`, the source `{orig}` supports \ cannot replace `{orig}` with `{name}`, the source `{orig}` supports \
checksums, but `{name}` does not checksums, but `{name}` does not
a lock file compatible with `{orig}` cannot be generated in this situation a lock file compatible with `{orig}` cannot be generated in this situation
", orig = orig_name, name = name); ",
orig = orig_name,
name = name
);
} }
if old_src.requires_precise() && id.precise().is_none() { if old_src.requires_precise() && id.precise().is_none() {
bail!("\ bail!(
"\
the source {orig} requires a lock file to be present first before it can be the source {orig} requires a lock file to be present first before it can be
used against vendored source code used against vendored source code
remove the source replacement configuration, generate a lock file, and then remove the source replacement configuration, generate a lock file, and then
restore the source replacement configuration to continue the build restore the source replacement configuration to continue the build
", orig = orig_name); ",
orig = orig_name
);
} }
Ok(Box::new(ReplacedSource::new(id, &new_id, new_src))) Ok(Box::new(ReplacedSource::new(id, &new_id, new_src)))
@ -142,8 +159,7 @@ restore the source replacement configuration to continue the build
srcs.push(SourceId::for_registry(&url)?); srcs.push(SourceId::for_registry(&url)?);
} }
if let Some(val) = table.get("local-registry") { if let Some(val) = table.get("local-registry") {
let (s, path) = val.string(&format!("source.{}.local-registry", let (s, path) = val.string(&format!("source.{}.local-registry", name))?;
name))?;
let mut path = path.to_path_buf(); let mut path = path.to_path_buf();
path.pop(); path.pop();
path.pop(); path.pop();
@ -151,8 +167,7 @@ restore the source replacement configuration to continue the build
srcs.push(SourceId::for_local_registry(&path)?); srcs.push(SourceId::for_local_registry(&path)?);
} }
if let Some(val) = table.get("directory") { if let Some(val) = table.get("directory") {
let (s, path) = val.string(&format!("source.{}.directory", let (s, path) = val.string(&format!("source.{}.directory", name))?;
name))?;
let mut path = path.to_path_buf(); let mut path = path.to_path_buf();
path.pop(); path.pop();
path.pop(); path.pop();
@ -171,17 +186,13 @@ restore the source replacement configuration to continue the build
}; };
let reference = match try("branch")? { let reference = match try("branch")? {
Some(b) => GitReference::Branch(b.0.to_string()), Some(b) => GitReference::Branch(b.0.to_string()),
None => { None => match try("tag")? {
match try("tag")? { Some(b) => GitReference::Tag(b.0.to_string()),
Some(b) => GitReference::Tag(b.0.to_string()), None => match try("rev")? {
None => { Some(b) => GitReference::Rev(b.0.to_string()),
match try("rev")? { None => GitReference::Branch("master".to_string()),
Some(b) => GitReference::Rev(b.0.to_string()), },
None => GitReference::Branch("master".to_string()), },
}
}
}
}
}; };
srcs.push(SourceId::for_git(&url, reference)?); srcs.push(SourceId::for_git(&url, reference)?);
} }
@ -191,9 +202,11 @@ restore the source replacement configuration to continue the build
let mut srcs = srcs.into_iter(); let mut srcs = srcs.into_iter();
let src = srcs.next().ok_or_else(|| { let src = srcs.next().ok_or_else(|| {
format_err!("no source URL specified for `source.{}`, need \ format_err!(
either `registry` or `local-registry` defined", "no source URL specified for `source.{}`, need \
name) either `registry` or `local-registry` defined",
name
)
})?; })?;
if srcs.next().is_some() { if srcs.next().is_some() {
bail!("more than one source URL specified for `source.{}`", name) bail!("more than one source URL specified for `source.{}`", name)
@ -201,24 +214,29 @@ restore the source replacement configuration to continue the build
let mut replace_with = None; let mut replace_with = None;
if let Some(val) = table.get("replace-with") { if let Some(val) = table.get("replace-with") {
let (s, path) = val.string(&format!("source.{}.replace-with", let (s, path) = val.string(&format!("source.{}.replace-with", name))?;
name))?;
replace_with = Some((s.to_string(), path.to_path_buf())); replace_with = Some((s.to_string(), path.to_path_buf()));
} }
self.add(name, SourceConfig { self.add(
id: src, name,
replace_with, SourceConfig {
}); id: src,
replace_with,
},
);
return Ok(()); return Ok(());
fn url(cfg: &ConfigValue, key: &str) -> CargoResult<Url> { fn url(cfg: &ConfigValue, key: &str) -> CargoResult<Url> {
let (url, path) = cfg.string(key)?; let (url, path) = cfg.string(key)?;
let url = url.to_url().chain_err(|| { let url = url.to_url().chain_err(|| {
format!("configuration key `{}` specified an invalid \ format!(
URL (in {})", key, path.display()) "configuration key `{}` specified an invalid \
URL (in {})",
key,
path.display()
)
})?; })?;
Ok(url) Ok(url)
} }

View file

@ -8,7 +8,7 @@ use hex;
use serde_json; use serde_json;
use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry}; use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary};
use sources::PathSource; use sources::PathSource;
use util::{Config, Sha256}; use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
@ -28,8 +28,7 @@ struct Checksum {
} }
impl<'cfg> DirectorySource<'cfg> { impl<'cfg> DirectorySource<'cfg> {
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> DirectorySource<'cfg> {
-> DirectorySource<'cfg> {
DirectorySource { DirectorySource {
source_id: id.clone(), source_id: id.clone(),
root: path.to_path_buf(), root: path.to_path_buf(),
@ -46,9 +45,7 @@ impl<'cfg> Debug for DirectorySource<'cfg> {
} }
impl<'cfg> Registry for DirectorySource<'cfg> { impl<'cfg> Registry for DirectorySource<'cfg> {
fn query(&mut self, fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
let packages = self.packages.values().map(|p| &p.0); let packages = self.packages.values().map(|p| &p.0);
let matches = packages.filter(|pkg| dep.matches(pkg.summary())); let matches = packages.filter(|pkg| dep.matches(pkg.summary()));
for summary in matches.map(|pkg| pkg.summary().clone()) { for summary in matches.map(|pkg| pkg.summary().clone()) {
@ -74,8 +71,10 @@ impl<'cfg> Source for DirectorySource<'cfg> {
fn update(&mut self) -> CargoResult<()> { fn update(&mut self) -> CargoResult<()> {
self.packages.clear(); self.packages.clear();
let entries = self.root.read_dir().chain_err(|| { let entries = self.root.read_dir().chain_err(|| {
format!("failed to read root of directory source: {}", format!(
self.root.display()) "failed to read root of directory source: {}",
self.root.display()
)
})?; })?;
for entry in entries { for entry in entries {
@ -87,7 +86,7 @@ impl<'cfg> Source for DirectorySource<'cfg> {
// (rust-lang/cargo#3414). // (rust-lang/cargo#3414).
if let Some(s) = path.file_name().and_then(|s| s.to_str()) { if let Some(s) = path.file_name().and_then(|s| s.to_str()) {
if s.starts_with('.') { if s.starts_with('.') {
continue continue;
} }
} }
@ -107,7 +106,7 @@ impl<'cfg> Source for DirectorySource<'cfg> {
// downside of accidentally misconfigured vendor directories // downside of accidentally misconfigured vendor directories
// silently returning less crates. // silently returning less crates.
if !path.join("Cargo.toml").exists() { if !path.join("Cargo.toml").exists() {
continue continue;
} }
let mut src = PathSource::new(&path, &self.source_id, self.config); let mut src = PathSource::new(&path, &self.source_id, self.config);
@ -116,17 +115,20 @@ impl<'cfg> Source for DirectorySource<'cfg> {
let cksum_file = path.join(".cargo-checksum.json"); let cksum_file = path.join(".cargo-checksum.json");
let cksum = paths::read(&path.join(cksum_file)).chain_err(|| { let cksum = paths::read(&path.join(cksum_file)).chain_err(|| {
format!("failed to load checksum `.cargo-checksum.json` \ format!(
of {} v{}", "failed to load checksum `.cargo-checksum.json` \
pkg.package_id().name(), of {} v{}",
pkg.package_id().version()) pkg.package_id().name(),
pkg.package_id().version()
)
})?; })?;
let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| { let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| {
format!("failed to decode `.cargo-checksum.json` of \ format!(
{} v{}", "failed to decode `.cargo-checksum.json` of \
pkg.package_id().name(), {} v{}",
pkg.package_id().version()) pkg.package_id().name(),
pkg.package_id().version()
)
})?; })?;
let mut manifest = pkg.manifest().clone(); let mut manifest = pkg.manifest().clone();
@ -143,9 +145,11 @@ impl<'cfg> Source for DirectorySource<'cfg> {
} }
fn download(&mut self, id: &PackageId) -> CargoResult<Package> { fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
self.packages.get(id).map(|p| &p.0).cloned().ok_or_else(|| { self.packages
format_err!("failed to find package with id: {}", id) .get(id)
}) .map(|p| &p.0)
.cloned()
.ok_or_else(|| format_err!("failed to find package with id: {}", id))
} }
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> { fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
@ -155,8 +159,7 @@ impl<'cfg> Source for DirectorySource<'cfg> {
fn verify(&self, id: &PackageId) -> CargoResult<()> { fn verify(&self, id: &PackageId) -> CargoResult<()> {
let (pkg, cksum) = match self.packages.get(id) { let (pkg, cksum) = match self.packages.get(id) {
Some(&(ref pkg, ref cksum)) => (pkg, cksum), Some(&(ref pkg, ref cksum)) => (pkg, cksum),
None => bail!("failed to find entry for `{}` in directory source", None => bail!("failed to find entry for `{}` in directory source", id),
id),
}; };
let mut buf = [0; 16 * 1024]; let mut buf = [0; 16 * 1024];
@ -172,23 +175,26 @@ impl<'cfg> Source for DirectorySource<'cfg> {
n => h.update(&buf[..n]), n => h.update(&buf[..n]),
} }
} }
})().chain_err(|| { })()
format!("failed to calculate checksum of: {}", .chain_err(|| format!("failed to calculate checksum of: {}", file.display()))?;
file.display())
})?;
let actual = hex::encode(h.finish()); let actual = hex::encode(h.finish());
if &*actual != cksum { if &*actual != cksum {
bail!("\ bail!(
the listed checksum of `{}` has changed:\n\ "\
expected: {}\n\ the listed checksum of `{}` has changed:\n\
actual: {}\n\ expected: {}\n\
\n\ actual: {}\n\
directory sources are not intended to be edited, if \ \n\
modifications are required then it is recommended \ directory sources are not intended to be edited, if \
that [replace] is used with a forked copy of the \ modifications are required then it is recommended \
source\ that [replace] is used with a forked copy of the \
", file.display(), cksum, actual); source\
",
file.display(),
cksum,
actual
);
} }
} }

View file

@ -1,4 +1,4 @@
pub use self::utils::{GitRemote, GitDatabase, GitCheckout, GitRevision, fetch}; pub use self::utils::{fetch, GitCheckout, GitDatabase, GitRemote, GitRevision};
pub use self::source::{GitSource, canonicalize_url}; pub use self::source::{canonicalize_url, GitSource};
mod utils; mod utils;
mod source; mod source;

View file

@ -4,7 +4,7 @@ use url::Url;
use core::source::{Source, SourceId}; use core::source::{Source, SourceId};
use core::GitReference; use core::GitReference;
use core::{Package, PackageId, Summary, Registry, Dependency}; use core::{Dependency, Package, PackageId, Registry, Summary};
use util::Config; use util::Config;
use util::errors::CargoResult; use util::errors::CargoResult;
use util::hex::short_hash; use util::hex::short_hash;
@ -24,8 +24,7 @@ pub struct GitSource<'cfg> {
} }
impl<'cfg> GitSource<'cfg> { impl<'cfg> GitSource<'cfg> {
pub fn new(source_id: &SourceId, pub fn new(source_id: &SourceId, config: &'cfg Config) -> CargoResult<GitSource<'cfg>> {
config: &'cfg Config) -> CargoResult<GitSource<'cfg>> {
assert!(source_id.is_git(), "id is not git, id={}", source_id); assert!(source_id.is_git(), "id is not git, id={}", source_id);
let remote = GitRemote::new(source_id.url()); let remote = GitRemote::new(source_id.url());
@ -49,7 +48,9 @@ impl<'cfg> GitSource<'cfg> {
Ok(source) Ok(source)
} }
pub fn url(&self) -> &Url { self.remote.url() } pub fn url(&self) -> &Url {
self.remote.url()
}
pub fn read_packages(&mut self) -> CargoResult<Vec<Package>> { pub fn read_packages(&mut self) -> CargoResult<Vec<Package>> {
if self.path_source.is_none() { if self.path_source.is_none() {
@ -61,13 +62,11 @@ impl<'cfg> GitSource<'cfg> {
fn ident(url: &Url) -> CargoResult<String> { fn ident(url: &Url) -> CargoResult<String> {
let url = canonicalize_url(url)?; let url = canonicalize_url(url)?;
let ident = url.path_segments().and_then(|mut s| s.next_back()).unwrap_or(""); let ident = url.path_segments()
.and_then(|mut s| s.next_back())
.unwrap_or("");
let ident = if ident == "" { let ident = if ident == "" { "_empty" } else { ident };
"_empty"
} else {
ident
};
Ok(format!("{}-{}", ident, short_hash(&url))) Ok(format!("{}-{}", ident, short_hash(&url)))
} }
@ -79,7 +78,10 @@ pub fn canonicalize_url(url: &Url) -> CargoResult<Url> {
// cannot-be-a-base-urls are not supported // cannot-be-a-base-urls are not supported
// eg. github.com:rust-lang-nursery/rustfmt.git // eg. github.com:rust-lang-nursery/rustfmt.git
if url.cannot_be_a_base() { if url.cannot_be_a_base() {
bail!("invalid url `{}`: cannot-be-a-base-URLs are not supported", url) bail!(
"invalid url `{}`: cannot-be-a-base-URLs are not supported",
url
)
} }
// Strip a trailing slash // Strip a trailing slash
@ -117,17 +119,16 @@ impl<'cfg> Debug for GitSource<'cfg> {
match self.reference.pretty_ref() { match self.reference.pretty_ref() {
Some(s) => write!(f, " ({})", s), Some(s) => write!(f, " ({})", s),
None => Ok(()) None => Ok(()),
} }
} }
} }
impl<'cfg> Registry for GitSource<'cfg> { impl<'cfg> Registry for GitSource<'cfg> {
fn query(&mut self, fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
dep: &Dependency, let src = self.path_source
f: &mut FnMut(Summary)) -> CargoResult<()> { .as_mut()
let src = self.path_source.as_mut() .expect("BUG: update() must be called before query()");
.expect("BUG: update() must be called before query()");
src.query(dep, f) src.query(dep, f)
} }
@ -146,14 +147,18 @@ impl<'cfg> Source for GitSource<'cfg> {
} }
fn update(&mut self) -> CargoResult<()> { fn update(&mut self) -> CargoResult<()> {
let lock = self.config.git_path() let lock =
.open_rw(".cargo-lock-git", self.config, "the git checkouts")?; self.config
.git_path()
.open_rw(".cargo-lock-git", self.config, "the git checkouts")?;
let db_path = lock.parent().join("db").join(&self.ident); let db_path = lock.parent().join("db").join(&self.ident);
if self.config.cli_unstable().offline && !db_path.exists() { if self.config.cli_unstable().offline && !db_path.exists() {
bail!("can't checkout from '{}': you are in the offline mode (-Z offline)", bail!(
self.remote.url()); "can't checkout from '{}': you are in the offline mode (-Z offline)",
self.remote.url()
);
} }
// Resolve our reference to an actual revision, and check if the // Resolve our reference to an actual revision, and check if the
@ -161,16 +166,18 @@ impl<'cfg> Source for GitSource<'cfg> {
// database pinned at that revision, and if we don't we issue an update // database pinned at that revision, and if we don't we issue an update
// to try to find the revision. // to try to find the revision.
let actual_rev = self.remote.rev_for(&db_path, &self.reference); let actual_rev = self.remote.rev_for(&db_path, &self.reference);
let should_update = actual_rev.is_err() || let should_update = actual_rev.is_err() || self.source_id.precise().is_none();
self.source_id.precise().is_none();
let (db, actual_rev) = if should_update && !self.config.cli_unstable().offline { let (db, actual_rev) = if should_update && !self.config.cli_unstable().offline {
self.config.shell().status("Updating", self.config.shell().status(
format!("git repository `{}`", self.remote.url()))?; "Updating",
format!("git repository `{}`", self.remote.url()),
)?;
trace!("updating git source `{:?}`", self.remote); trace!("updating git source `{:?}`", self.remote);
self.remote.checkout(&db_path, &self.reference, self.config)? self.remote
.checkout(&db_path, &self.reference, self.config)?
} else { } else {
(self.remote.db_at(&db_path)?, actual_rev.unwrap()) (self.remote.db_at(&db_path)?, actual_rev.unwrap())
}; };
@ -180,8 +187,10 @@ impl<'cfg> Source for GitSource<'cfg> {
// https://github.com/servo/servo/pull/14397 // https://github.com/servo/servo/pull/14397
let short_id = db.to_short_id(actual_rev.clone()).unwrap(); let short_id = db.to_short_id(actual_rev.clone()).unwrap();
let checkout_path = lock.parent().join("checkouts") let checkout_path = lock.parent()
.join(&self.ident).join(short_id.as_str()); .join("checkouts")
.join(&self.ident)
.join(short_id.as_str());
// Copy the database to the checkout location. After this we could drop // Copy the database to the checkout location. After this we could drop
// the lock on the database as we no longer needed it, but we leave it // the lock on the database as we no longer needed it, but we leave it
@ -191,9 +200,7 @@ impl<'cfg> Source for GitSource<'cfg> {
db.copy_to(actual_rev.clone(), &checkout_path, self.config)?; db.copy_to(actual_rev.clone(), &checkout_path, self.config)?;
let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); let source_id = self.source_id.with_precise(Some(actual_rev.to_string()));
let path_source = PathSource::new_recursive(&checkout_path, let path_source = PathSource::new_recursive(&checkout_path, &source_id, self.config);
&source_id,
self.config);
self.path_source = Some(path_source); self.path_source = Some(path_source);
self.rev = Some(actual_rev); self.rev = Some(actual_rev);
@ -201,11 +208,15 @@ impl<'cfg> Source for GitSource<'cfg> {
} }
fn download(&mut self, id: &PackageId) -> CargoResult<Package> { fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
trace!("getting packages for package id `{}` from `{:?}`", id, trace!(
self.remote); "getting packages for package id `{}` from `{:?}`",
self.path_source.as_mut() id,
.expect("BUG: update() must be called before get()") self.remote
.download(id) );
self.path_source
.as_mut()
.expect("BUG: update() must be called before get()")
.download(id)
} }
fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> { fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> {

View file

@ -11,9 +11,9 @@ use serde::ser::{self, Serialize};
use url::Url; use url::Url;
use core::GitReference; use core::GitReference;
use util::{ToUrl, internal, Config, network, Progress}; use util::{internal, network, Config, Progress, ToUrl};
use util::paths; use util::paths;
use util::errors::{CargoResult, CargoResultExt, CargoError}; use util::errors::{CargoError, CargoResult, CargoResultExt};
#[derive(PartialEq, Clone, Debug)] #[derive(PartialEq, Clone, Debug)]
pub struct GitRevision(git2::Oid); pub struct GitRevision(git2::Oid);
@ -25,8 +25,9 @@ impl ser::Serialize for GitRevision {
} }
fn serialize_str<T, S>(t: &T, s: S) -> Result<S::Ok, S::Error> fn serialize_str<T, S>(t: &T, s: S) -> Result<S::Ok, S::Error>
where T: fmt::Display, where
S: ser::Serializer, T: fmt::Display,
S: ser::Serializer,
{ {
t.to_string().serialize(s) t.to_string().serialize(s)
} }
@ -49,8 +50,7 @@ impl GitShortID {
/// `GitDatabase`. /// `GitDatabase`.
#[derive(PartialEq, Clone, Debug, Serialize)] #[derive(PartialEq, Clone, Debug, Serialize)]
pub struct GitRemote { pub struct GitRemote {
#[serde(serialize_with = "serialize_str")] #[serde(serialize_with = "serialize_str")] url: Url,
url: Url,
} }
/// `GitDatabase` is a local clone of a remote repository's database. Multiple /// `GitDatabase` is a local clone of a remote repository's database. Multiple
@ -59,8 +59,7 @@ pub struct GitRemote {
pub struct GitDatabase { pub struct GitDatabase {
remote: GitRemote, remote: GitRemote,
path: PathBuf, path: PathBuf,
#[serde(skip_serializing)] #[serde(skip_serializing)] repo: git2::Repository,
repo: git2::Repository,
} }
/// `GitCheckout` is a local checkout of a particular revision. Calling /// `GitCheckout` is a local checkout of a particular revision. Calling
@ -71,8 +70,7 @@ pub struct GitCheckout<'a> {
database: &'a GitDatabase, database: &'a GitDatabase,
location: PathBuf, location: PathBuf,
revision: GitRevision, revision: GitRevision,
#[serde(skip_serializing)] #[serde(skip_serializing)] repo: git2::Repository,
repo: git2::Repository,
} }
// Implementations // Implementations
@ -86,22 +84,20 @@ impl GitRemote {
&self.url &self.url
} }
pub fn rev_for(&self, path: &Path, reference: &GitReference) pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult<GitRevision> {
-> CargoResult<GitRevision> {
reference.resolve(&self.db_at(path)?.repo) reference.resolve(&self.db_at(path)?.repo)
} }
pub fn checkout(&self, pub fn checkout(
into: &Path, &self,
reference: &GitReference, into: &Path,
cargo_config: &Config) reference: &GitReference,
-> CargoResult<(GitDatabase, GitRevision)> cargo_config: &Config,
{ ) -> CargoResult<(GitDatabase, GitRevision)> {
let mut repo_and_rev = None; let mut repo_and_rev = None;
if let Ok(mut repo) = git2::Repository::open(into) { if let Ok(mut repo) = git2::Repository::open(into) {
self.fetch_into(&mut repo, cargo_config).chain_err(|| { self.fetch_into(&mut repo, cargo_config)
format!("failed to fetch into {}", into.display()) .chain_err(|| format!("failed to fetch into {}", into.display()))?;
})?;
if let Ok(rev) = reference.resolve(&repo) { if let Ok(rev) = reference.resolve(&repo) {
repo_and_rev = Some((repo, rev)); repo_and_rev = Some((repo, rev));
} }
@ -109,19 +105,21 @@ impl GitRemote {
let (repo, rev) = match repo_and_rev { let (repo, rev) = match repo_and_rev {
Some(pair) => pair, Some(pair) => pair,
None => { None => {
let repo = self.clone_into(into, cargo_config).chain_err(|| { let repo = self.clone_into(into, cargo_config)
format!("failed to clone into: {}", into.display()) .chain_err(|| format!("failed to clone into: {}", into.display()))?;
})?;
let rev = reference.resolve(&repo)?; let rev = reference.resolve(&repo)?;
(repo, rev) (repo, rev)
} }
}; };
Ok((GitDatabase { Ok((
remote: self.clone(), GitDatabase {
path: into.to_path_buf(), remote: self.clone(),
repo, path: into.to_path_buf(),
}, rev)) repo,
},
rev,
))
} }
pub fn db_at(&self, db_path: &Path) -> CargoResult<GitDatabase> { pub fn db_at(&self, db_path: &Path) -> CargoResult<GitDatabase> {
@ -145,14 +143,23 @@ impl GitRemote {
} }
fs::create_dir_all(dst)?; fs::create_dir_all(dst)?;
let mut repo = git2::Repository::init_bare(dst)?; let mut repo = git2::Repository::init_bare(dst)?;
fetch(&mut repo, &self.url, "refs/heads/*:refs/heads/*", cargo_config)?; fetch(
&mut repo,
&self.url,
"refs/heads/*:refs/heads/*",
cargo_config,
)?;
Ok(repo) Ok(repo)
} }
} }
impl GitDatabase { impl GitDatabase {
pub fn copy_to(&self, rev: GitRevision, dest: &Path, cargo_config: &Config) pub fn copy_to(
-> CargoResult<GitCheckout> { &self,
rev: GitRevision,
dest: &Path,
cargo_config: &Config,
) -> CargoResult<GitCheckout> {
let mut checkout = None; let mut checkout = None;
if let Ok(repo) = git2::Repository::open(dest) { if let Ok(repo) = git2::Repository::open(dest) {
let mut co = GitCheckout::new(dest, self, rev.clone(), repo); let mut co = GitCheckout::new(dest, self, rev.clone(), repo);
@ -193,26 +200,22 @@ impl GitDatabase {
impl GitReference { impl GitReference {
fn resolve(&self, repo: &git2::Repository) -> CargoResult<GitRevision> { fn resolve(&self, repo: &git2::Repository) -> CargoResult<GitRevision> {
let id = match *self { let id = match *self {
GitReference::Tag(ref s) => { GitReference::Tag(ref s) => (|| -> CargoResult<git2::Oid> {
(|| -> CargoResult<git2::Oid> { let refname = format!("refs/tags/{}", s);
let refname = format!("refs/tags/{}", s); let id = repo.refname_to_id(&refname)?;
let id = repo.refname_to_id(&refname)?; let obj = repo.find_object(id, None)?;
let obj = repo.find_object(id, None)?; let obj = obj.peel(ObjectType::Commit)?;
let obj = obj.peel(ObjectType::Commit)?; Ok(obj.id())
Ok(obj.id()) })()
})().chain_err(|| { .chain_err(|| format!("failed to find tag `{}`", s))?,
format!("failed to find tag `{}`", s)
})?
}
GitReference::Branch(ref s) => { GitReference::Branch(ref s) => {
(|| { (|| {
let b = repo.find_branch(s, git2::BranchType::Local)?; let b = repo.find_branch(s, git2::BranchType::Local)?;
b.get().target().ok_or_else(|| { b.get()
format_err!("branch `{}` did not have a target", s) .target()
}) .ok_or_else(|| format_err!("branch `{}` did not have a target", s))
})().chain_err(|| { })()
format!("failed to find branch `{}`", s) .chain_err(|| format!("failed to find branch `{}`", s))?
})?
} }
GitReference::Rev(ref s) => { GitReference::Rev(ref s) => {
let obj = repo.revparse_single(s)?; let obj = repo.revparse_single(s)?;
@ -227,10 +230,12 @@ impl GitReference {
} }
impl<'a> GitCheckout<'a> { impl<'a> GitCheckout<'a> {
fn new(path: &Path, database: &'a GitDatabase, revision: GitRevision, fn new(
repo: git2::Repository) path: &Path,
-> GitCheckout<'a> database: &'a GitDatabase,
{ revision: GitRevision,
repo: git2::Repository,
) -> GitCheckout<'a> {
GitCheckout { GitCheckout {
location: path.to_path_buf(), location: path.to_path_buf(),
database, database,
@ -239,16 +244,14 @@ impl<'a> GitCheckout<'a> {
} }
} }
fn clone_into(into: &Path, fn clone_into(
database: &'a GitDatabase, into: &Path,
revision: GitRevision, database: &'a GitDatabase,
config: &Config) revision: GitRevision,
-> CargoResult<GitCheckout<'a>> config: &Config,
{ ) -> CargoResult<GitCheckout<'a>> {
let dirname = into.parent().unwrap(); let dirname = into.parent().unwrap();
fs::create_dir_all(&dirname).chain_err(|| { fs::create_dir_all(&dirname).chain_err(|| format!("Couldn't mkdir {}", dirname.display()))?;
format!("Couldn't mkdir {}", dirname.display())
})?;
if into.exists() { if into.exists() {
paths::remove_dir_all(into)?; paths::remove_dir_all(into)?;
} }
@ -335,22 +338,25 @@ impl<'a> GitCheckout<'a> {
info!("update submodules for: {:?}", repo.workdir().unwrap()); info!("update submodules for: {:?}", repo.workdir().unwrap());
for mut child in repo.submodules()? { for mut child in repo.submodules()? {
update_submodule(repo, &mut child, cargo_config) update_submodule(repo, &mut child, cargo_config).chain_err(|| {
.chain_err(|| { format!(
format!("failed to update submodule `{}`", "failed to update submodule `{}`",
child.name().unwrap_or("")) child.name().unwrap_or("")
})?; )
})?;
} }
Ok(()) Ok(())
} }
fn update_submodule(parent: &git2::Repository, fn update_submodule(
child: &mut git2::Submodule, parent: &git2::Repository,
cargo_config: &Config) -> CargoResult<()> { child: &mut git2::Submodule,
cargo_config: &Config,
) -> CargoResult<()> {
child.init(false)?; child.init(false)?;
let url = child.url().ok_or_else(|| { let url = child
internal("non-utf8 url for submodule") .url()
})?; .ok_or_else(|| internal("non-utf8 url for submodule"))?;
// A submodule which is listed in .gitmodules but not actually // A submodule which is listed in .gitmodules but not actually
// checked out will not have a head id, so we should ignore it. // checked out will not have a head id, so we should ignore it.
@ -370,7 +376,7 @@ impl<'a> GitCheckout<'a> {
let mut repo = match head_and_repo { let mut repo = match head_and_repo {
Ok((head, repo)) => { Ok((head, repo)) => {
if child.head_id() == head { if child.head_id() == head {
return update_submodules(&repo, cargo_config) return update_submodules(&repo, cargo_config);
} }
repo repo
} }
@ -385,8 +391,11 @@ impl<'a> GitCheckout<'a> {
let refspec = "refs/heads/*:refs/heads/*"; let refspec = "refs/heads/*:refs/heads/*";
let url = url.to_url()?; let url = url.to_url()?;
fetch(&mut repo, &url, refspec, cargo_config).chain_err(|| { fetch(&mut repo, &url, refspec, cargo_config).chain_err(|| {
internal(format!("failed to fetch submodule `{}` from {}", internal(format!(
child.name().unwrap_or(""), url)) "failed to fetch submodule `{}` from {}",
child.name().unwrap_or(""),
url
))
})?; })?;
let obj = repo.find_object(head, None)?; let obj = repo.find_object(head, None)?;
@ -423,9 +432,9 @@ impl<'a> GitCheckout<'a> {
/// credentials until we give it a reason to not do so. To ensure we don't /// credentials until we give it a reason to not do so. To ensure we don't
/// just sit here looping forever we keep track of authentications we've /// just sit here looping forever we keep track of authentications we've
/// attempted and we don't try the same ones again. /// attempted and we don't try the same ones again.
fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F) fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F) -> CargoResult<T>
-> CargoResult<T> where
where F: FnMut(&mut git2::Credentials) -> CargoResult<T> F: FnMut(&mut git2::Credentials) -> CargoResult<T>,
{ {
let mut cred_helper = git2::CredentialHelper::new(url); let mut cred_helper = git2::CredentialHelper::new(url);
cred_helper.config(cfg); cred_helper.config(cfg);
@ -459,7 +468,7 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
if allowed.contains(git2::CredentialType::USERNAME) { if allowed.contains(git2::CredentialType::USERNAME) {
debug_assert!(username.is_none()); debug_assert!(username.is_none());
ssh_username_requested = true; ssh_username_requested = true;
return Err(git2::Error::from_str("gonna try usernames later")) return Err(git2::Error::from_str("gonna try usernames later"));
} }
// An "SSH_KEY" authentication indicates that we need some sort of SSH // An "SSH_KEY" authentication indicates that we need some sort of SSH
@ -479,7 +488,7 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
let username = username.unwrap(); let username = username.unwrap();
debug_assert!(!ssh_username_requested); debug_assert!(!ssh_username_requested);
ssh_agent_attempts.push(username.to_string()); ssh_agent_attempts.push(username.to_string());
return git2::Cred::ssh_key_from_agent(username) return git2::Cred::ssh_key_from_agent(username);
} }
// Sometimes libgit2 will ask for a username/password in plaintext. This // Sometimes libgit2 will ask for a username/password in plaintext. This
@ -490,13 +499,13 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
if allowed.contains(git2::CredentialType::USER_PASS_PLAINTEXT) { if allowed.contains(git2::CredentialType::USER_PASS_PLAINTEXT) {
let r = git2::Cred::credential_helper(cfg, url, username); let r = git2::Cred::credential_helper(cfg, url, username);
cred_helper_bad = Some(r.is_err()); cred_helper_bad = Some(r.is_err());
return r return r;
} }
// I'm... not sure what the DEFAULT kind of authentication is, but seems // I'm... not sure what the DEFAULT kind of authentication is, but seems
// easy to support? // easy to support?
if allowed.contains(git2::CredentialType::DEFAULT) { if allowed.contains(git2::CredentialType::DEFAULT) {
return git2::Cred::default() return git2::Cred::default();
} }
// Whelp, we tried our best // Whelp, we tried our best
@ -540,7 +549,7 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
attempts += 1; attempts += 1;
if attempts == 1 { if attempts == 1 {
ssh_agent_attempts.push(s.to_string()); ssh_agent_attempts.push(s.to_string());
return git2::Cred::ssh_key_from_agent(&s) return git2::Cred::ssh_key_from_agent(&s);
} }
} }
Err(git2::Error::from_str("no authentication available")) Err(git2::Error::from_str("no authentication available"))
@ -559,13 +568,13 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
// errors happened). Otherwise something else is funny so we bail // errors happened). Otherwise something else is funny so we bail
// out. // out.
if attempts != 2 { if attempts != 2 {
break break;
} }
} }
} }
if res.is_ok() || !any_attempts { if res.is_ok() || !any_attempts {
return res.map_err(From::from) return res.map_err(From::from);
} }
// In the case of an authentication failure (where we tried something) then // In the case of an authentication failure (where we tried something) then
@ -573,23 +582,32 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
// tried. // tried.
let res = res.map_err(CargoError::from).chain_err(|| { let res = res.map_err(CargoError::from).chain_err(|| {
let mut msg = "failed to authenticate when downloading \ let mut msg = "failed to authenticate when downloading \
repository".to_string(); repository"
.to_string();
if !ssh_agent_attempts.is_empty() { if !ssh_agent_attempts.is_empty() {
let names = ssh_agent_attempts.iter() let names = ssh_agent_attempts
.map(|s| format!("`{}`", s)) .iter()
.collect::<Vec<_>>() .map(|s| format!("`{}`", s))
.join(", "); .collect::<Vec<_>>()
msg.push_str(&format!("\nattempted ssh-agent authentication, but \ .join(", ");
none of the usernames {} succeeded", names)); msg.push_str(&format!(
"\nattempted ssh-agent authentication, but \
none of the usernames {} succeeded",
names
));
} }
if let Some(failed_cred_helper) = cred_helper_bad { if let Some(failed_cred_helper) = cred_helper_bad {
if failed_cred_helper { if failed_cred_helper {
msg.push_str("\nattempted to find username/password via \ msg.push_str(
git's `credential.helper` support, but failed"); "\nattempted to find username/password via \
git's `credential.helper` support, but failed",
);
} else { } else {
msg.push_str("\nattempted to find username/password via \ msg.push_str(
`credential.helper`, but maybe the found \ "\nattempted to find username/password via \
credentials were incorrect"); `credential.helper`, but maybe the found \
credentials were incorrect",
);
} }
} }
msg msg
@ -597,9 +615,7 @@ fn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F)
Ok(res) Ok(res)
} }
fn reset(repo: &git2::Repository, fn reset(repo: &git2::Repository, obj: &git2::Object, config: &Config) -> CargoResult<()> {
obj: &git2::Object,
config: &Config) -> CargoResult<()> {
let mut pb = Progress::new("Checkout", config); let mut pb = Progress::new("Checkout", config);
let mut opts = git2::build::CheckoutBuilder::new(); let mut opts = git2::build::CheckoutBuilder::new();
opts.progress(|_, cur, max| { opts.progress(|_, cur, max| {
@ -609,12 +625,12 @@ fn reset(repo: &git2::Repository,
Ok(()) Ok(())
} }
pub fn with_fetch_options(git_config: &git2::Config, pub fn with_fetch_options(
url: &Url, git_config: &git2::Config,
config: &Config, url: &Url,
cb: &mut FnMut(git2::FetchOptions) -> CargoResult<()>) config: &Config,
-> CargoResult<()> cb: &mut FnMut(git2::FetchOptions) -> CargoResult<()>,
{ ) -> CargoResult<()> {
let mut progress = Progress::new("Fetch", config); let mut progress = Progress::new("Fetch", config);
network::with_retry(config, || { network::with_retry(config, || {
with_authentication(url.as_str(), git_config, |f| { with_authentication(url.as_str(), git_config, |f| {
@ -622,7 +638,9 @@ pub fn with_fetch_options(git_config: &git2::Config,
rcb.credentials(f); rcb.credentials(f);
rcb.transfer_progress(|stats| { rcb.transfer_progress(|stats| {
progress.tick(stats.indexed_objects(), stats.total_objects()).is_ok() progress
.tick(stats.indexed_objects(), stats.total_objects())
.is_ok()
}); });
// Create a local anonymous remote in the repository to fetch the // Create a local anonymous remote in the repository to fetch the
@ -636,13 +654,17 @@ pub fn with_fetch_options(git_config: &git2::Config,
}) })
} }
pub fn fetch(repo: &mut git2::Repository, pub fn fetch(
url: &Url, repo: &mut git2::Repository,
refspec: &str, url: &Url,
config: &Config) -> CargoResult<()> { refspec: &str,
config: &Config,
) -> CargoResult<()> {
if config.frozen() { if config.frozen() {
bail!("attempting to update a git repository, but --frozen \ bail!(
was specified") "attempting to update a git repository, but --frozen \
was specified"
)
} }
if !config.network_allowed() { if !config.network_allowed() {
bail!("can't update a git repository in the offline mode") bail!("can't update a git repository in the offline mode")
@ -655,7 +677,7 @@ pub fn fetch(repo: &mut git2::Repository,
let mut handle = config.http()?.borrow_mut(); let mut handle = config.http()?.borrow_mut();
debug!("attempting github fast path for {}", url); debug!("attempting github fast path for {}", url);
if github_up_to_date(&mut handle, url, &oid) { if github_up_to_date(&mut handle, url, &oid) {
return Ok(()) return Ok(());
} else { } else {
debug!("fast path failed, falling back to a git fetch"); debug!("fast path failed, falling back to a git fetch");
} }
@ -694,14 +716,16 @@ pub fn fetch(repo: &mut git2::Repository,
if !repo_reinitialized && err.class() == git2::ErrorClass::Reference { if !repo_reinitialized && err.class() == git2::ErrorClass::Reference {
repo_reinitialized = true; repo_reinitialized = true;
debug!("looks like this is a corrupt repository, reinitializing \ debug!(
and trying again"); "looks like this is a corrupt repository, reinitializing \
and trying again"
);
if reinitialize(repo).is_ok() { if reinitialize(repo).is_ok() {
continue continue;
} }
} }
return Err(err.into()) return Err(err.into());
} }
Ok(()) Ok(())
}) })
@ -727,31 +751,38 @@ fn maybe_gc_repo(repo: &mut git2::Repository) -> CargoResult<()> {
Ok(e) => e.count(), Ok(e) => e.count(),
Err(_) => { Err(_) => {
debug!("skipping gc as pack dir appears gone"); debug!("skipping gc as pack dir appears gone");
return Ok(()) return Ok(());
} }
}; };
let max = env::var("__CARGO_PACKFILE_LIMIT").ok() let max = env::var("__CARGO_PACKFILE_LIMIT")
.ok()
.and_then(|s| s.parse::<usize>().ok()) .and_then(|s| s.parse::<usize>().ok())
.unwrap_or(100); .unwrap_or(100);
if entries < max { if entries < max {
debug!("skipping gc as there's only {} pack files", entries); debug!("skipping gc as there's only {} pack files", entries);
return Ok(()) return Ok(());
} }
// First up, try a literal `git gc` by shelling out to git. This is pretty // First up, try a literal `git gc` by shelling out to git. This is pretty
// likely to fail though as we may not have `git` installed. Note that // likely to fail though as we may not have `git` installed. Note that
// libgit2 doesn't currently implement the gc operation, so there's no // libgit2 doesn't currently implement the gc operation, so there's no
// equivalent there. // equivalent there.
match Command::new("git").arg("gc").current_dir(repo.path()).output() { match Command::new("git")
.arg("gc")
.current_dir(repo.path())
.output()
{
Ok(out) => { Ok(out) => {
debug!("git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}", debug!(
out.status, "git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}",
String::from_utf8_lossy(&out.stdout), out.status,
String::from_utf8_lossy(&out.stderr)); String::from_utf8_lossy(&out.stdout),
String::from_utf8_lossy(&out.stderr)
);
if out.status.success() { if out.status.success() {
let new = git2::Repository::open(repo.path())?; let new = git2::Repository::open(repo.path())?;
mem::replace(repo, new); mem::replace(repo, new);
return Ok(()) return Ok(());
} }
} }
Err(e) => debug!("git-gc failed to spawn: {}", e), Err(e) => debug!("git-gc failed to spawn: {}", e),
@ -774,7 +805,7 @@ fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> {
for entry in path.read_dir()? { for entry in path.read_dir()? {
let entry = entry?; let entry = entry?;
if entry.file_name().to_str() == Some("tmp") { if entry.file_name().to_str() == Some("tmp") {
continue continue;
} }
let path = entry.path(); let path = entry.path();
drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path))); drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path)));
@ -818,11 +849,13 @@ fn github_up_to_date(handle: &mut Easy, url: &Url, oid: &git2::Oid) -> bool {
let username = try!(pieces.next()); let username = try!(pieces.next());
let repo = try!(pieces.next()); let repo = try!(pieces.next());
if pieces.next().is_some() { if pieces.next().is_some() {
return false return false;
} }
let url = format!("https://api.github.com/repos/{}/{}/commits/master", let url = format!(
username, repo); "https://api.github.com/repos/{}/{}/commits/master",
username, repo
);
try!(handle.get(true).ok()); try!(handle.get(true).ok());
try!(handle.url(&url).ok()); try!(handle.url(&url).ok());
try!(handle.useragent("cargo").ok()); try!(handle.useragent("cargo").ok());

View file

@ -8,9 +8,9 @@ use glob::Pattern;
use ignore::Match; use ignore::Match;
use ignore::gitignore::GitignoreBuilder; use ignore::gitignore::GitignoreBuilder;
use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry}; use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary};
use ops; use ops;
use util::{self, CargoResult, internal}; use util::{self, internal, CargoResult};
use util::Config; use util::Config;
pub struct PathSource<'cfg> { pub struct PathSource<'cfg> {
@ -27,8 +27,7 @@ impl<'cfg> PathSource<'cfg> {
/// ///
/// This source will only return the package at precisely the `path` /// This source will only return the package at precisely the `path`
/// specified, and it will be an error if there's not a package at `path`. /// specified, and it will be an error if there's not a package at `path`.
pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> {
-> PathSource<'cfg> {
PathSource { PathSource {
source_id: id.clone(), source_id: id.clone(),
path: path.to_path_buf(), path: path.to_path_buf(),
@ -47,11 +46,10 @@ impl<'cfg> PathSource<'cfg> {
/// ///
/// Note that this should be used with care and likely shouldn't be chosen /// Note that this should be used with care and likely shouldn't be chosen
/// by default! /// by default!
pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config) pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> {
-> PathSource<'cfg> {
PathSource { PathSource {
recursive: true, recursive: true,
.. PathSource::new(root, id, config) ..PathSource::new(root, id, config)
} }
} }
@ -62,7 +60,7 @@ impl<'cfg> PathSource<'cfg> {
match self.packages.iter().find(|p| p.root() == &*self.path) { match self.packages.iter().find(|p| p.root() == &*self.path) {
Some(pkg) => Ok(pkg.clone()), Some(pkg) => Ok(pkg.clone()),
None => Err(internal("no package found in source")) None => Err(internal("no package found in source")),
} }
} }
@ -115,9 +113,8 @@ impl<'cfg> PathSource<'cfg> {
} else { } else {
p p
}; };
Pattern::new(pattern).map_err(|e| { Pattern::new(pattern)
format_err!("could not parse glob pattern `{}`: {}", p, e) .map_err(|e| format_err!("could not parse glob pattern `{}`: {}", p, e))
})
}; };
let glob_exclude = pkg.manifest() let glob_exclude = pkg.manifest()
@ -134,7 +131,9 @@ impl<'cfg> PathSource<'cfg> {
let glob_should_package = |relative_path: &Path| -> bool { let glob_should_package = |relative_path: &Path| -> bool {
fn glob_match(patterns: &Vec<Pattern>, relative_path: &Path) -> bool { fn glob_match(patterns: &Vec<Pattern>, relative_path: &Path) -> bool {
patterns.iter().any(|pattern| pattern.matches_path(relative_path)) patterns
.iter()
.any(|pattern| pattern.matches_path(relative_path))
} }
// include and exclude options are mutually exclusive. // include and exclude options are mutually exclusive.
@ -162,10 +161,9 @@ impl<'cfg> PathSource<'cfg> {
let ignore_should_package = |relative_path: &Path| -> CargoResult<bool> { let ignore_should_package = |relative_path: &Path| -> CargoResult<bool> {
// include and exclude options are mutually exclusive. // include and exclude options are mutually exclusive.
if no_include_option { if no_include_option {
match ignore_exclude.matched_path_or_any_parents( match ignore_exclude
relative_path, .matched_path_or_any_parents(relative_path, /* is_dir */ false)
/* is_dir */ false, {
) {
Match::None => Ok(true), Match::None => Ok(true),
Match::Ignore(_) => Ok(false), Match::Ignore(_) => Ok(false),
Match::Whitelist(pattern) => Err(format_err!( Match::Whitelist(pattern) => Err(format_err!(
@ -174,10 +172,9 @@ impl<'cfg> PathSource<'cfg> {
)), )),
} }
} else { } else {
match ignore_include.matched_path_or_any_parents( match ignore_include
relative_path, .matched_path_or_any_parents(relative_path, /* is_dir */ false)
/* is_dir */ false, {
) {
Match::None => Ok(false), Match::None => Ok(false),
Match::Ignore(_) => Ok(true), Match::Ignore(_) => Ok(true),
Match::Whitelist(pattern) => Err(format_err!( Match::Whitelist(pattern) => Err(format_err!(
@ -198,42 +195,34 @@ impl<'cfg> PathSource<'cfg> {
if glob_should_package != ignore_should_package { if glob_should_package != ignore_should_package {
if glob_should_package { if glob_should_package {
if no_include_option { if no_include_option {
self.config self.config.shell().warn(format!(
.shell() "Pattern matching for Cargo's include/exclude fields is changing and \
.warn(format!( file `{}` WILL be excluded in a future Cargo version.\n\
"Pattern matching for Cargo's include/exclude fields is changing and \ See https://github.com/rust-lang/cargo/issues/4268 for more info",
file `{}` WILL be excluded in a future Cargo version.\n\ relative_path.display()
See https://github.com/rust-lang/cargo/issues/4268 for more info", ))?;
relative_path.display()
))?;
} else { } else {
self.config self.config.shell().warn(format!(
.shell() "Pattern matching for Cargo's include/exclude fields is changing and \
.warn(format!( file `{}` WILL NOT be included in a future Cargo version.\n\
"Pattern matching for Cargo's include/exclude fields is changing and \ See https://github.com/rust-lang/cargo/issues/4268 for more info",
file `{}` WILL NOT be included in a future Cargo version.\n\ relative_path.display()
See https://github.com/rust-lang/cargo/issues/4268 for more info", ))?;
relative_path.display()
))?;
} }
} else if no_include_option { } else if no_include_option {
self.config self.config.shell().warn(format!(
.shell() "Pattern matching for Cargo's include/exclude fields is changing and \
.warn(format!( file `{}` WILL NOT be excluded in a future Cargo version.\n\
"Pattern matching for Cargo's include/exclude fields is changing and \ See https://github.com/rust-lang/cargo/issues/4268 for more info",
file `{}` WILL NOT be excluded in a future Cargo version.\n\ relative_path.display()
See https://github.com/rust-lang/cargo/issues/4268 for more info", ))?;
relative_path.display()
))?;
} else { } else {
self.config self.config.shell().warn(format!(
.shell() "Pattern matching for Cargo's include/exclude fields is changing and \
.warn(format!( file `{}` WILL be included in a future Cargo version.\n\
"Pattern matching for Cargo's include/exclude fields is changing and \ See https://github.com/rust-lang/cargo/issues/4268 for more info",
file `{}` WILL be included in a future Cargo version.\n\ relative_path.display()
See https://github.com/rust-lang/cargo/issues/4268 for more info", ))?;
relative_path.display()
))?;
} }
} }
@ -252,11 +241,12 @@ impl<'cfg> PathSource<'cfg> {
// Returns Some(_) if found sibling Cargo.toml and .git folder; // Returns Some(_) if found sibling Cargo.toml and .git folder;
// otherwise caller should fall back on full file list. // otherwise caller should fall back on full file list.
fn discover_git_and_list_files(&self, fn discover_git_and_list_files(
pkg: &Package, &self,
root: &Path, pkg: &Package,
filter: &mut FnMut(&Path) -> CargoResult<bool>) root: &Path,
-> Option<CargoResult<Vec<PathBuf>>> { filter: &mut FnMut(&Path) -> CargoResult<bool>,
) -> Option<CargoResult<Vec<PathBuf>>> {
// If this package is in a git repository, then we really do want to // If this package is in a git repository, then we really do want to
// query the git repository as it takes into account items such as // query the git repository as it takes into account items such as
// .gitignore. We're not quite sure where the git repository is, // .gitignore. We're not quite sure where the git repository is,
@ -276,8 +266,7 @@ impl<'cfg> PathSource<'cfg> {
Ok(index) => index, Ok(index) => index,
Err(err) => return Some(Err(err.into())), Err(err) => return Some(Err(err.into())),
}; };
let path = util::without_prefix(root, cur) let path = util::without_prefix(root, cur).unwrap().join("Cargo.toml");
.unwrap().join("Cargo.toml");
if index.get_path(&path, 0).is_some() { if index.get_path(&path, 0).is_some() {
return Some(self.list_files_git(pkg, repo, filter)); return Some(self.list_files_git(pkg, repo, filter));
} }
@ -285,7 +274,7 @@ impl<'cfg> PathSource<'cfg> {
} }
// don't cross submodule boundaries // don't cross submodule boundaries
if cur.join(".git").is_dir() { if cur.join(".git").is_dir() {
break break;
} }
match cur.parent() { match cur.parent() {
Some(parent) => cur = parent, Some(parent) => cur = parent,
@ -295,14 +284,16 @@ impl<'cfg> PathSource<'cfg> {
None None
} }
fn list_files_git(&self, pkg: &Package, repo: git2::Repository, fn list_files_git(
filter: &mut FnMut(&Path) -> CargoResult<bool>) &self,
-> CargoResult<Vec<PathBuf>> { pkg: &Package,
repo: git2::Repository,
filter: &mut FnMut(&Path) -> CargoResult<bool>,
) -> CargoResult<Vec<PathBuf>> {
warn!("list_files_git {}", pkg.package_id()); warn!("list_files_git {}", pkg.package_id());
let index = repo.index()?; let index = repo.index()?;
let root = repo.workdir().ok_or_else(|| { let root = repo.workdir()
internal("Can't list files on a bare repository.") .ok_or_else(|| internal("Can't list files on a bare repository."))?;
})?;
let pkg_path = pkg.root(); let pkg_path = pkg.root();
let mut ret = Vec::<PathBuf>::new(); let mut ret = Vec::<PathBuf>::new();
@ -325,11 +316,9 @@ impl<'cfg> PathSource<'cfg> {
opts.pathspec(suffix); opts.pathspec(suffix);
} }
let statuses = repo.statuses(Some(&mut opts))?; let statuses = repo.statuses(Some(&mut opts))?;
let untracked = statuses.iter().filter_map(|entry| { let untracked = statuses.iter().filter_map(|entry| match entry.status() {
match entry.status() { git2::Status::WT_NEW => Some((join(root, entry.path_bytes()), None)),
git2::Status::WT_NEW => Some((join(root, entry.path_bytes()), None)), _ => None,
_ => None,
}
}); });
let mut subpackages_found = Vec::new(); let mut subpackages_found = Vec::new();
@ -341,15 +330,14 @@ impl<'cfg> PathSource<'cfg> {
// bit obove via the `pathspec` function call, but we need to filter // bit obove via the `pathspec` function call, but we need to filter
// the entries in the index as well. // the entries in the index as well.
if !file_path.starts_with(pkg_path) { if !file_path.starts_with(pkg_path) {
continue continue;
} }
match file_path.file_name().and_then(|s| s.to_str()) { match file_path.file_name().and_then(|s| s.to_str()) {
// Filter out Cargo.lock and target always, we don't want to // Filter out Cargo.lock and target always, we don't want to
// package a lock file no one will ever read and we also avoid // package a lock file no one will ever read and we also avoid
// build artifacts // build artifacts
Some("Cargo.lock") | Some("Cargo.lock") | Some("target") => continue,
Some("target") => continue,
// Keep track of all sub-packages found and also strip out all // Keep track of all sub-packages found and also strip out all
// matches we've found so far. Note, though, that if we find // matches we've found so far. Note, though, that if we find
@ -360,7 +348,7 @@ impl<'cfg> PathSource<'cfg> {
warn!("subpackage found: {}", path.display()); warn!("subpackage found: {}", path.display());
ret.retain(|p| !p.starts_with(path)); ret.retain(|p| !p.starts_with(path));
subpackages_found.push(path.to_path_buf()); subpackages_found.push(path.to_path_buf());
continue continue;
} }
} }
@ -370,15 +358,14 @@ impl<'cfg> PathSource<'cfg> {
// If this file is part of any other sub-package we've found so far, // If this file is part of any other sub-package we've found so far,
// skip it. // skip it.
if subpackages_found.iter().any(|p| file_path.starts_with(p)) { if subpackages_found.iter().any(|p| file_path.starts_with(p)) {
continue continue;
} }
if is_dir.unwrap_or_else(|| file_path.is_dir()) { if is_dir.unwrap_or_else(|| file_path.is_dir()) {
warn!(" found submodule {}", file_path.display()); warn!(" found submodule {}", file_path.display());
let rel = util::without_prefix(&file_path, root).unwrap(); let rel = util::without_prefix(&file_path, root).unwrap();
let rel = rel.to_str().ok_or_else(|| { let rel = rel.to_str()
format_err!("invalid utf-8 filename: {}", rel.display()) .ok_or_else(|| format_err!("invalid utf-8 filename: {}", rel.display()))?;
})?;
// Git submodules are currently only named through `/` path // Git submodules are currently only named through `/` path
// separators, explicitly not `\` which windows uses. Who knew? // separators, explicitly not `\` which windows uses. Who knew?
let rel = rel.replace(r"\", "/"); let rel = rel.replace(r"\", "/");
@ -410,32 +397,39 @@ impl<'cfg> PathSource<'cfg> {
use std::str; use std::str;
match str::from_utf8(data) { match str::from_utf8(data) {
Ok(s) => Ok(path.join(s)), Ok(s) => Ok(path.join(s)),
Err(..) => Err(internal("cannot process path in git with a non \ Err(..) => Err(internal(
unicode filename")), "cannot process path in git with a non \
unicode filename",
)),
} }
} }
} }
fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> CargoResult<bool>) fn list_files_walk(
-> CargoResult<Vec<PathBuf>> { &self,
pkg: &Package,
filter: &mut FnMut(&Path) -> CargoResult<bool>,
) -> CargoResult<Vec<PathBuf>> {
let mut ret = Vec::new(); let mut ret = Vec::new();
PathSource::walk(pkg.root(), &mut ret, true, filter)?; PathSource::walk(pkg.root(), &mut ret, true, filter)?;
Ok(ret) Ok(ret)
} }
fn walk(path: &Path, ret: &mut Vec<PathBuf>, fn walk(
is_root: bool, filter: &mut FnMut(&Path) -> CargoResult<bool>) path: &Path,
-> CargoResult<()> ret: &mut Vec<PathBuf>,
{ is_root: bool,
filter: &mut FnMut(&Path) -> CargoResult<bool>,
) -> CargoResult<()> {
if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) { if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) {
if (*filter)(path)? { if (*filter)(path)? {
ret.push(path.to_path_buf()); ret.push(path.to_path_buf());
} }
return Ok(()) return Ok(());
} }
// Don't recurse into any sub-packages that we have // Don't recurse into any sub-packages that we have
if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() { if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() {
return Ok(()) return Ok(());
} }
// For package integration tests, we need to sort the paths in a deterministic order to // For package integration tests, we need to sort the paths in a deterministic order to
@ -451,7 +445,7 @@ impl<'cfg> PathSource<'cfg> {
let name = path.file_name().and_then(|s| s.to_str()); let name = path.file_name().and_then(|s| s.to_str());
// Skip dotfile directories // Skip dotfile directories
if name.map(|s| s.starts_with('.')) == Some(true) { if name.map(|s| s.starts_with('.')) == Some(true) {
continue continue;
} }
if is_root { if is_root {
// Skip cargo artifacts // Skip cargo artifacts
@ -473,9 +467,7 @@ impl<'cfg> Debug for PathSource<'cfg> {
} }
impl<'cfg> Registry for PathSource<'cfg> { impl<'cfg> Registry for PathSource<'cfg> {
fn query(&mut self, fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
for s in self.packages.iter().map(|p| p.summary()) { for s in self.packages.iter().map(|p| p.summary()) {
if dep.matches(s) { if dep.matches(s) {
f(s.clone()) f(s.clone())
@ -512,9 +504,8 @@ impl<'cfg> Source for PathSource<'cfg> {
trace!("getting packages; id={}", id); trace!("getting packages; id={}", id);
let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id); let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id);
pkg.cloned().ok_or_else(|| { pkg.cloned()
internal(format!("failed to find {} in path source", id)) .ok_or_else(|| internal(format!("failed to find {} in path source", id)))
})
} }
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> { fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
@ -530,9 +521,9 @@ impl<'cfg> Source for PathSource<'cfg> {
// condition where this path was rm'ed - either way, // condition where this path was rm'ed - either way,
// we can ignore the error and treat the path's mtime // we can ignore the error and treat the path's mtime
// as 0. // as 0.
let mtime = fs::metadata(&file).map(|meta| { let mtime = fs::metadata(&file)
FileTime::from_last_modification_time(&meta) .map(|meta| FileTime::from_last_modification_time(&meta))
}).unwrap_or(FileTime::zero()); .unwrap_or(FileTime::zero());
warn!("{} {}", mtime, file.display()); warn!("{} {}", mtime, file.display());
if mtime > max { if mtime > max {
max = mtime; max = mtime;

View file

@ -6,10 +6,10 @@ use serde_json;
use semver::Version; use semver::Version;
use core::dependency::Dependency; use core::dependency::Dependency;
use core::{SourceId, Summary, PackageId}; use core::{PackageId, SourceId, Summary};
use sources::registry::{RegistryPackage, INDEX_LOCK}; use sources::registry::{RegistryPackage, INDEX_LOCK};
use sources::registry::RegistryData; use sources::registry::RegistryData;
use util::{CargoResult, internal, Filesystem, Config}; use util::{internal, CargoResult, Config, Filesystem};
pub struct RegistryIndex<'cfg> { pub struct RegistryIndex<'cfg> {
source_id: SourceId, source_id: SourceId,
@ -21,11 +21,12 @@ pub struct RegistryIndex<'cfg> {
} }
impl<'cfg> RegistryIndex<'cfg> { impl<'cfg> RegistryIndex<'cfg> {
pub fn new(id: &SourceId, pub fn new(
path: &Filesystem, id: &SourceId,
config: &'cfg Config, path: &Filesystem,
locked: bool) config: &'cfg Config,
-> RegistryIndex<'cfg> { locked: bool,
) -> RegistryIndex<'cfg> {
RegistryIndex { RegistryIndex {
source_id: id.clone(), source_id: id.clone(),
path: path.clone(), path: path.clone(),
@ -37,30 +38,30 @@ impl<'cfg> RegistryIndex<'cfg> {
} }
/// Return the hash listed for a specified PackageId. /// Return the hash listed for a specified PackageId.
pub fn hash(&mut self, pub fn hash(&mut self, pkg: &PackageId, load: &mut RegistryData) -> CargoResult<String> {
pkg: &PackageId,
load: &mut RegistryData)
-> CargoResult<String> {
let name = &*pkg.name(); let name = &*pkg.name();
let version = pkg.version(); let version = pkg.version();
if let Some(s) = self.hashes.get(name).and_then(|v| v.get(version)) { if let Some(s) = self.hashes.get(name).and_then(|v| v.get(version)) {
return Ok(s.clone()) return Ok(s.clone());
} }
// Ok, we're missing the key, so parse the index file to load it. // Ok, we're missing the key, so parse the index file to load it.
self.summaries(name, load)?; self.summaries(name, load)?;
self.hashes.get(name).and_then(|v| v.get(version)).ok_or_else(|| { self.hashes
internal(format!("no hash listed for {}", pkg)) .get(name)
}).map(|s| s.clone()) .and_then(|v| v.get(version))
.ok_or_else(|| internal(format!("no hash listed for {}", pkg)))
.map(|s| s.clone())
} }
/// Parse the on-disk metadata for the package provided /// Parse the on-disk metadata for the package provided
/// ///
/// Returns a list of pairs of (summary, yanked) for the package name /// Returns a list of pairs of (summary, yanked) for the package name
/// specified. /// specified.
pub fn summaries(&mut self, pub fn summaries(
name: &str, &mut self,
load: &mut RegistryData) name: &str,
-> CargoResult<&Vec<(Summary, bool)>> { load: &mut RegistryData,
) -> CargoResult<&Vec<(Summary, bool)>> {
if self.cache.contains_key(name) { if self.cache.contains_key(name) {
return Ok(&self.cache[name]); return Ok(&self.cache[name]);
} }
@ -69,27 +70,25 @@ impl<'cfg> RegistryIndex<'cfg> {
Ok(&self.cache[name]) Ok(&self.cache[name])
} }
fn load_summaries(&mut self, fn load_summaries(
name: &str, &mut self,
load: &mut RegistryData) name: &str,
-> CargoResult<Vec<(Summary, bool)>> { load: &mut RegistryData,
) -> CargoResult<Vec<(Summary, bool)>> {
let (root, _lock) = if self.locked { let (root, _lock) = if self.locked {
let lock = self.path.open_ro(Path::new(INDEX_LOCK), let lock = self.path
self.config, .open_ro(Path::new(INDEX_LOCK), self.config, "the registry index");
"the registry index");
match lock { match lock {
Ok(lock) => { Ok(lock) => (lock.path().parent().unwrap().to_path_buf(), Some(lock)),
(lock.path().parent().unwrap().to_path_buf(), Some(lock))
}
Err(_) => return Ok(Vec::new()), Err(_) => return Ok(Vec::new()),
} }
} else { } else {
(self.path.clone().into_path_unlocked(), None) (self.path.clone().into_path_unlocked(), None)
}; };
let fs_name = name.chars().flat_map(|c| { let fs_name = name.chars()
c.to_lowercase() .flat_map(|c| c.to_lowercase())
}).collect::<String>(); .collect::<String>();
// see module comment for why this is structured the way it is // see module comment for why this is structured the way it is
let path = match fs_name.len() { let path = match fs_name.len() {
@ -102,13 +101,10 @@ impl<'cfg> RegistryIndex<'cfg> {
let mut hit_closure = false; let mut hit_closure = false;
let err = load.load(&root, Path::new(&path), &mut |contents| { let err = load.load(&root, Path::new(&path), &mut |contents| {
hit_closure = true; hit_closure = true;
let contents = str::from_utf8(contents).map_err(|_| { let contents = str::from_utf8(contents)
format_err!("registry index file was not valid utf-8") .map_err(|_| format_err!("registry index file was not valid utf-8"))?;
})?;
ret.reserve(contents.lines().count()); ret.reserve(contents.lines().count());
let lines = contents.lines() let lines = contents.lines().map(|s| s.trim()).filter(|l| !l.is_empty());
.map(|s| s.trim())
.filter(|l| !l.is_empty());
let online = !self.config.cli_unstable().offline; let online = !self.config.cli_unstable().offline;
// Attempt forwards-compatibility on the index by ignoring // Attempt forwards-compatibility on the index by ignoring
@ -117,7 +113,7 @@ impl<'cfg> RegistryIndex<'cfg> {
// interpretation of each line here and older cargo will simply // interpretation of each line here and older cargo will simply
// ignore the new lines. // ignore the new lines.
ret.extend(lines.filter_map(|line| { ret.extend(lines.filter_map(|line| {
self.parse_registry_package(line).ok().and_then(|v|{ self.parse_registry_package(line).ok().and_then(|v| {
if online || load.is_crate_downloaded(v.0.package_id()) { if online || load.is_crate_downloaded(v.0.package_id()) {
Some(v) Some(v)
} else { } else {
@ -143,10 +139,15 @@ impl<'cfg> RegistryIndex<'cfg> {
/// package. /// package.
/// ///
/// The returned boolean is whether or not the summary has been yanked. /// The returned boolean is whether or not the summary has been yanked.
fn parse_registry_package(&mut self, line: &str) fn parse_registry_package(&mut self, line: &str) -> CargoResult<(Summary, bool)> {
-> CargoResult<(Summary, bool)> {
let RegistryPackage { let RegistryPackage {
name, vers, cksum, deps, features, yanked, links name,
vers,
cksum,
deps,
features,
yanked,
links,
} = super::DEFAULT_ID.set(&self.source_id, || { } = super::DEFAULT_ID.set(&self.source_id, || {
serde_json::from_str::<RegistryPackage>(line) serde_json::from_str::<RegistryPackage>(line)
})?; })?;
@ -156,37 +157,37 @@ impl<'cfg> RegistryIndex<'cfg> {
if self.hashes.contains_key(&name[..]) { if self.hashes.contains_key(&name[..]) {
self.hashes.get_mut(&name[..]).unwrap().insert(vers, cksum); self.hashes.get_mut(&name[..]).unwrap().insert(vers, cksum);
} else { } else {
self.hashes.entry(name.into_owned()) self.hashes
.entry(name.into_owned())
.or_insert_with(HashMap::new) .or_insert_with(HashMap::new)
.insert(vers, cksum); .insert(vers, cksum);
} }
Ok((summary, yanked.unwrap_or(false))) Ok((summary, yanked.unwrap_or(false)))
} }
pub fn query(&mut self, pub fn query(
dep: &Dependency, &mut self,
load: &mut RegistryData, dep: &Dependency,
f: &mut FnMut(Summary)) load: &mut RegistryData,
-> CargoResult<()> { f: &mut FnMut(Summary),
) -> CargoResult<()> {
let source_id = self.source_id.clone(); let source_id = self.source_id.clone();
let summaries = self.summaries(&*dep.name(), load)?; let summaries = self.summaries(&*dep.name(), load)?;
let summaries = summaries.iter().filter(|&&(_, yanked)| { let summaries = summaries
dep.source_id().precise().is_some() || !yanked .iter()
}).map(|s| s.0.clone()); .filter(|&&(_, yanked)| dep.source_id().precise().is_some() || !yanked)
.map(|s| s.0.clone());
// Handle `cargo update --precise` here. If specified, our own source // Handle `cargo update --precise` here. If specified, our own source
// will have a precise version listed of the form `<pkg>=<req>` where // will have a precise version listed of the form `<pkg>=<req>` where
// `<pkg>` is the name of a crate on this source and `<req>` is the // `<pkg>` is the name of a crate on this source and `<req>` is the
// version requested (argument to `--precise`). // version requested (argument to `--precise`).
let summaries = summaries.filter(|s| { let summaries = summaries.filter(|s| match source_id.precise() {
match source_id.precise() { Some(p) if p.starts_with(&*dep.name()) && p[dep.name().len()..].starts_with('=') => {
Some(p) if p.starts_with(&*dep.name()) && let vers = &p[dep.name().len() + 1..];
p[dep.name().len()..].starts_with('=') => { s.version().to_string() == vers
let vers = &p[dep.name().len() + 1..];
s.version().to_string() == vers
}
_ => true,
} }
_ => true,
}); });
for summary in summaries { for summary in summaries {

View file

@ -4,10 +4,10 @@ use std::path::Path;
use core::PackageId; use core::PackageId;
use hex; use hex;
use sources::registry::{RegistryData, RegistryConfig}; use sources::registry::{RegistryConfig, RegistryData};
use util::FileLock; use util::FileLock;
use util::paths; use util::paths;
use util::{Config, Sha256, Filesystem}; use util::{Config, Filesystem, Sha256};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
pub struct LocalRegistry<'cfg> { pub struct LocalRegistry<'cfg> {
@ -18,9 +18,7 @@ pub struct LocalRegistry<'cfg> {
} }
impl<'cfg> LocalRegistry<'cfg> { impl<'cfg> LocalRegistry<'cfg> {
pub fn new(root: &Path, pub fn new(root: &Path, config: &'cfg Config, name: &str) -> LocalRegistry<'cfg> {
config: &'cfg Config,
name: &str) -> LocalRegistry<'cfg> {
LocalRegistry { LocalRegistry {
src_path: config.registry_source_path().join(name), src_path: config.registry_source_path().join(name),
index_path: Filesystem::new(root.join("index")), index_path: Filesystem::new(root.join("index")),
@ -35,10 +33,12 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
&self.index_path &self.index_path
} }
fn load(&self, fn load(
root: &Path, &self,
path: &Path, root: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()> { path: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>,
) -> CargoResult<()> {
data(&paths::read_bytes(&root.join(path))?) data(&paths::read_bytes(&root.join(path))?)
} }
@ -54,29 +54,27 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
// these directories exist. // these directories exist.
let root = self.root.clone().into_path_unlocked(); let root = self.root.clone().into_path_unlocked();
if !root.is_dir() { if !root.is_dir() {
bail!("local registry path is not a directory: {}", bail!("local registry path is not a directory: {}", root.display())
root.display())
} }
let index_path = self.index_path.clone().into_path_unlocked(); let index_path = self.index_path.clone().into_path_unlocked();
if !index_path.is_dir() { if !index_path.is_dir() {
bail!("local registry index path is not a directory: {}", bail!(
index_path.display()) "local registry index path is not a directory: {}",
index_path.display()
)
} }
Ok(()) Ok(())
} }
fn download(&mut self, pkg: &PackageId, checksum: &str) fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock> {
-> CargoResult<FileLock> {
let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version()); let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version());
let mut crate_file = self.root.open_ro(&crate_file, let mut crate_file = self.root.open_ro(&crate_file, self.config, "crate file")?;
self.config,
"crate file")?;
// If we've already got an unpacked version of this crate, then skip the // If we've already got an unpacked version of this crate, then skip the
// checksum below as it is in theory already verified. // checksum below as it is in theory already verified.
let dst = format!("{}-{}", pkg.name(), pkg.version()); let dst = format!("{}-{}", pkg.name(), pkg.version());
if self.src_path.join(dst).into_path_unlocked().exists() { if self.src_path.join(dst).into_path_unlocked().exists() {
return Ok(crate_file) return Ok(crate_file);
} }
self.config.shell().status("Unpacking", pkg)?; self.config.shell().status("Unpacking", pkg)?;
@ -86,11 +84,11 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
let mut state = Sha256::new(); let mut state = Sha256::new();
let mut buf = [0; 64 * 1024]; let mut buf = [0; 64 * 1024];
loop { loop {
let n = crate_file.read(&mut buf).chain_err(|| { let n = crate_file
format!("failed to read `{}`", crate_file.path().display()) .read(&mut buf)
})?; .chain_err(|| format!("failed to read `{}`", crate_file.path().display()))?;
if n == 0 { if n == 0 {
break break;
} }
state.update(&buf[..n]); state.update(&buf[..n]);
} }

View file

@ -162,17 +162,17 @@ use std::borrow::Cow;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::fmt; use std::fmt;
use std::fs::File; use std::fs::File;
use std::path::{PathBuf, Path}; use std::path::{Path, PathBuf};
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use semver::Version; use semver::Version;
use serde::de; use serde::de;
use tar::Archive; use tar::Archive;
use core::{Source, SourceId, PackageId, Package, Summary, Registry}; use core::{Package, PackageId, Registry, Source, SourceId, Summary};
use core::dependency::{Dependency, Kind}; use core::dependency::{Dependency, Kind};
use sources::PathSource; use sources::PathSource;
use util::{CargoResult, Config, internal, FileLock, Filesystem}; use util::{internal, CargoResult, Config, FileLock, Filesystem};
use util::errors::CargoResultExt; use util::errors::CargoResultExt;
use util::hex; use util::hex;
use util::to_url::ToUrl; use util::to_url::ToUrl;
@ -220,8 +220,7 @@ struct RegistryPackage<'a> {
features: BTreeMap<String, Vec<String>>, features: BTreeMap<String, Vec<String>>,
cksum: String, cksum: String,
yanked: Option<bool>, yanked: Option<bool>,
#[serde(default)] #[serde(default)] links: Option<String>,
links: Option<String>,
} }
struct DependencyList { struct DependencyList {
@ -242,17 +241,19 @@ struct RegistryDependency<'a> {
pub trait RegistryData { pub trait RegistryData {
fn index_path(&self) -> &Filesystem; fn index_path(&self) -> &Filesystem;
fn load(&self, fn load(
_root: &Path, &self,
path: &Path, _root: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()>; path: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>,
) -> CargoResult<()>;
fn config(&mut self) -> CargoResult<Option<RegistryConfig>>; fn config(&mut self) -> CargoResult<Option<RegistryConfig>>;
fn update_index(&mut self) -> CargoResult<()>; fn update_index(&mut self) -> CargoResult<()>;
fn download(&mut self, fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock>;
pkg: &PackageId,
checksum: &str) -> CargoResult<FileLock>;
fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool { true } fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool {
true
}
} }
mod index; mod index;
@ -266,35 +267,31 @@ fn short_name(id: &SourceId) -> String {
} }
impl<'cfg> RegistrySource<'cfg> { impl<'cfg> RegistrySource<'cfg> {
pub fn remote(source_id: &SourceId, pub fn remote(source_id: &SourceId, config: &'cfg Config) -> RegistrySource<'cfg> {
config: &'cfg Config) -> RegistrySource<'cfg> {
let name = short_name(source_id); let name = short_name(source_id);
let ops = remote::RemoteRegistry::new(source_id, config, &name); let ops = remote::RemoteRegistry::new(source_id, config, &name);
RegistrySource::new(source_id, config, &name, Box::new(ops), true) RegistrySource::new(source_id, config, &name, Box::new(ops), true)
} }
pub fn local(source_id: &SourceId, pub fn local(source_id: &SourceId, path: &Path, config: &'cfg Config) -> RegistrySource<'cfg> {
path: &Path,
config: &'cfg Config) -> RegistrySource<'cfg> {
let name = short_name(source_id); let name = short_name(source_id);
let ops = local::LocalRegistry::new(path, config, &name); let ops = local::LocalRegistry::new(path, config, &name);
RegistrySource::new(source_id, config, &name, Box::new(ops), false) RegistrySource::new(source_id, config, &name, Box::new(ops), false)
} }
fn new(source_id: &SourceId, fn new(
config: &'cfg Config, source_id: &SourceId,
name: &str, config: &'cfg Config,
ops: Box<RegistryData + 'cfg>, name: &str,
index_locked: bool) -> RegistrySource<'cfg> { ops: Box<RegistryData + 'cfg>,
index_locked: bool,
) -> RegistrySource<'cfg> {
RegistrySource { RegistrySource {
src_path: config.registry_source_path().join(name), src_path: config.registry_source_path().join(name),
config, config,
source_id: source_id.clone(), source_id: source_id.clone(),
updated: false, updated: false,
index: index::RegistryIndex::new(source_id, index: index::RegistryIndex::new(source_id, ops.index_path(), config, index_locked),
ops.index_path(),
config,
index_locked),
index_locked, index_locked,
ops, ops,
} }
@ -311,12 +308,9 @@ impl<'cfg> RegistrySource<'cfg> {
/// compiled. /// compiled.
/// ///
/// No action is taken if the source looks like it's already unpacked. /// No action is taken if the source looks like it's already unpacked.
fn unpack_package(&self, fn unpack_package(&self, pkg: &PackageId, tarball: &FileLock) -> CargoResult<PathBuf> {
pkg: &PackageId, let dst = self.src_path
tarball: &FileLock) .join(&format!("{}-{}", pkg.name(), pkg.version()));
-> CargoResult<PathBuf> {
let dst = self.src_path.join(&format!("{}-{}", pkg.name(),
pkg.version()));
dst.create_dir()?; dst.create_dir()?;
// Note that we've already got the `tarball` locked above, and that // Note that we've already got the `tarball` locked above, and that
// implies a lock on the unpacked destination as well, so this access // implies a lock on the unpacked destination as well, so this access
@ -324,7 +318,7 @@ impl<'cfg> RegistrySource<'cfg> {
let dst = dst.into_path_unlocked(); let dst = dst.into_path_unlocked();
let ok = dst.join(".cargo-ok"); let ok = dst.join(".cargo-ok");
if ok.exists() { if ok.exists() {
return Ok(dst) return Ok(dst);
} }
let gz = GzDecoder::new(tarball.file()); let gz = GzDecoder::new(tarball.file());
@ -333,7 +327,8 @@ impl<'cfg> RegistrySource<'cfg> {
let parent = dst.parent().unwrap(); let parent = dst.parent().unwrap();
for entry in tar.entries()? { for entry in tar.entries()? {
let mut entry = entry.chain_err(|| "failed to iterate over archive")?; let mut entry = entry.chain_err(|| "failed to iterate over archive")?;
let entry_path = entry.path() let entry_path = entry
.path()
.chain_err(|| "failed to read entry path")? .chain_err(|| "failed to read entry path")?
.into_owned(); .into_owned();
@ -344,15 +339,18 @@ impl<'cfg> RegistrySource<'cfg> {
// crates.io should also block uploads with these sorts of tarballs, // crates.io should also block uploads with these sorts of tarballs,
// but be extra sure by adding a check here as well. // but be extra sure by adding a check here as well.
if !entry_path.starts_with(prefix) { if !entry_path.starts_with(prefix) {
bail!("invalid tarball downloaded, contains \ bail!(
a file at {:?} which isn't under {:?}", "invalid tarball downloaded, contains \
entry_path, prefix) a file at {:?} which isn't under {:?}",
entry_path,
prefix
)
} }
// Once that's verified, unpack the entry as usual. // Once that's verified, unpack the entry as usual.
entry.unpack_in(parent).chain_err(|| { entry
format!("failed to unpack entry at `{}`", entry_path.display()) .unpack_in(parent)
})?; .chain_err(|| format!("failed to unpack entry at `{}`", entry_path.display()))?;
} }
File::create(&ok)?; File::create(&ok)?;
Ok(dst.clone()) Ok(dst.clone())
@ -361,18 +359,14 @@ impl<'cfg> RegistrySource<'cfg> {
fn do_update(&mut self) -> CargoResult<()> { fn do_update(&mut self) -> CargoResult<()> {
self.ops.update_index()?; self.ops.update_index()?;
let path = self.ops.index_path(); let path = self.ops.index_path();
self.index = index::RegistryIndex::new(&self.source_id, self.index =
path, index::RegistryIndex::new(&self.source_id, path, self.config, self.index_locked);
self.config,
self.index_locked);
Ok(()) Ok(())
} }
} }
impl<'cfg> Registry for RegistrySource<'cfg> { impl<'cfg> Registry for RegistrySource<'cfg> {
fn query(&mut self, fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
// If this is a precise dependency, then it came from a lockfile and in // If this is a precise dependency, then it came from a lockfile and in
// theory the registry is known to contain this version. If, however, we // theory the registry is known to contain this version. If, however, we
// come back with no summaries, then our registry may need to be // come back with no summaries, then our registry may need to be
@ -384,7 +378,7 @@ impl<'cfg> Registry for RegistrySource<'cfg> {
f(s); f(s);
})?; })?;
if called { if called {
return Ok(()) return Ok(());
} else { } else {
self.do_update()?; self.do_update()?;
} }
@ -424,9 +418,8 @@ impl<'cfg> Source for RegistrySource<'cfg> {
fn download(&mut self, package: &PackageId) -> CargoResult<Package> { fn download(&mut self, package: &PackageId) -> CargoResult<Package> {
let hash = self.index.hash(package, &mut *self.ops)?; let hash = self.index.hash(package, &mut *self.ops)?;
let path = self.ops.download(package, &hash)?; let path = self.ops.download(package, &hash)?;
let path = self.unpack_package(package, &path).chain_err(|| { let path = self.unpack_package(package, &path)
internal(format!("failed to unpack package `{}`", package)) .chain_err(|| internal(format!("failed to unpack package `{}`", package)))?;
})?;
let mut src = PathSource::new(&path, &self.source_id, self.config); let mut src = PathSource::new(&path, &self.source_id, self.config);
src.update()?; src.update()?;
let pkg = src.download(package)?; let pkg = src.download(package)?;
@ -436,9 +429,11 @@ impl<'cfg> Source for RegistrySource<'cfg> {
// *summary* loaded from the Cargo.toml we just downloaded with the one // *summary* loaded from the Cargo.toml we just downloaded with the one
// we loaded from the index. // we loaded from the index.
let summaries = self.index.summaries(&*package.name(), &mut *self.ops)?; let summaries = self.index.summaries(&*package.name(), &mut *self.ops)?;
let summary = summaries.iter().map(|s| &s.0).find(|s| { let summary = summaries
s.package_id() == package .iter()
}).expect("summary not found"); .map(|s| &s.0)
.find(|s| s.package_id() == package)
.expect("summary not found");
let mut manifest = pkg.manifest().clone(); let mut manifest = pkg.manifest().clone();
manifest.set_summary(summary.clone()); manifest.set_summary(summary.clone());
Ok(Package::new(manifest, pkg.manifest_path())) Ok(Package::new(manifest, pkg.manifest_path()))
@ -463,7 +458,8 @@ scoped_thread_local!(static DEFAULT_ID: SourceId);
impl<'de> de::Deserialize<'de> for DependencyList { impl<'de> de::Deserialize<'de> for DependencyList {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: de::Deserializer<'de>, where
D: de::Deserializer<'de>,
{ {
return deserializer.deserialize_seq(Visitor); return deserializer.deserialize_seq(Visitor);
@ -477,16 +473,15 @@ impl<'de> de::Deserialize<'de> for DependencyList {
} }
fn visit_seq<A>(self, mut seq: A) -> Result<DependencyList, A::Error> fn visit_seq<A>(self, mut seq: A) -> Result<DependencyList, A::Error>
where A: de::SeqAccess<'de>, where
A: de::SeqAccess<'de>,
{ {
let mut ret = Vec::new(); let mut ret = Vec::new();
if let Some(size) = seq.size_hint() { if let Some(size) = seq.size_hint() {
ret.reserve(size); ret.reserve(size);
} }
while let Some(element) = seq.next_element::<RegistryDependency>()? { while let Some(element) = seq.next_element::<RegistryDependency>()? {
ret.push(parse_registry_dependency(element).map_err(|e| { ret.push(parse_registry_dependency(element).map_err(|e| de::Error::custom(e))?);
de::Error::custom(e)
})?);
} }
Ok(DependencyList { inner: ret }) Ok(DependencyList { inner: ret })
@ -496,18 +491,22 @@ impl<'de> de::Deserialize<'de> for DependencyList {
} }
/// Converts an encoded dependency in the registry to a cargo dependency /// Converts an encoded dependency in the registry to a cargo dependency
fn parse_registry_dependency(dep: RegistryDependency) fn parse_registry_dependency(dep: RegistryDependency) -> CargoResult<Dependency> {
-> CargoResult<Dependency> {
let RegistryDependency { let RegistryDependency {
name, req, mut features, optional, default_features, target, kind, registry name,
req,
mut features,
optional,
default_features,
target,
kind,
registry,
} = dep; } = dep;
let id = if let Some(registry) = registry { let id = if let Some(registry) = registry {
SourceId::for_registry(&registry.to_url()?)? SourceId::for_registry(&registry.to_url()?)?
} else { } else {
DEFAULT_ID.with(|id| { DEFAULT_ID.with(|id| id.clone())
id.clone()
})
}; };
let mut dep = Dependency::parse_no_deprecated(&name, Some(&req), &id)?; let mut dep = Dependency::parse_no_deprecated(&name, Some(&req), &id)?;
@ -530,10 +529,10 @@ fn parse_registry_dependency(dep: RegistryDependency)
features.retain(|s| !s.is_empty()); features.retain(|s| !s.is_empty());
dep.set_optional(optional) dep.set_optional(optional)
.set_default_features(default_features) .set_default_features(default_features)
.set_features(features) .set_features(features)
.set_platform(platform) .set_platform(platform)
.set_kind(kind); .set_kind(kind);
Ok(dep) Ok(dep)
} }

View file

@ -1,4 +1,4 @@
use std::cell::{RefCell, Ref, Cell}; use std::cell::{Cell, Ref, RefCell};
use std::fmt::Write as FmtWrite; use std::fmt::Write as FmtWrite;
use std::io::SeekFrom; use std::io::SeekFrom;
use std::io::prelude::*; use std::io::prelude::*;
@ -13,10 +13,10 @@ use lazycell::LazyCell;
use core::{PackageId, SourceId}; use core::{PackageId, SourceId};
use sources::git; use sources::git;
use sources::registry::{RegistryData, RegistryConfig, INDEX_LOCK, CRATE_TEMPLATE, VERSION_TEMPLATE}; use sources::registry::{RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE};
use util::network; use util::network;
use util::{FileLock, Filesystem}; use util::{FileLock, Filesystem};
use util::{Config, Sha256, ToUrl, Progress}; use util::{Config, Progress, Sha256, ToUrl};
use util::errors::{CargoResult, CargoResultExt, HttpNot200}; use util::errors::{CargoResult, CargoResultExt, HttpNot200};
pub struct RemoteRegistry<'cfg> { pub struct RemoteRegistry<'cfg> {
@ -30,8 +30,7 @@ pub struct RemoteRegistry<'cfg> {
} }
impl<'cfg> RemoteRegistry<'cfg> { impl<'cfg> RemoteRegistry<'cfg> {
pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str) pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> {
-> RemoteRegistry<'cfg> {
RemoteRegistry { RemoteRegistry {
index_path: config.registry_index_path().join(name), index_path: config.registry_index_path().join(name),
cache_path: config.registry_cache_path().join(name), cache_path: config.registry_cache_path().join(name),
@ -49,13 +48,13 @@ impl<'cfg> RemoteRegistry<'cfg> {
// Fast path without a lock // Fast path without a lock
if let Ok(repo) = git2::Repository::open(&path) { if let Ok(repo) = git2::Repository::open(&path) {
return Ok(repo) return Ok(repo);
} }
// Ok, now we need to lock and try the whole thing over again. // Ok, now we need to lock and try the whole thing over again.
let lock = self.index_path.open_rw(Path::new(INDEX_LOCK), let lock =
self.config, self.index_path
"the registry index")?; .open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?;
match git2::Repository::open(&path) { match git2::Repository::open(&path) {
Ok(repo) => Ok(repo), Ok(repo) => Ok(repo),
Err(_) => { Err(_) => {
@ -90,7 +89,7 @@ impl<'cfg> RemoteRegistry<'cfg> {
{ {
let tree = self.tree.borrow(); let tree = self.tree.borrow();
if tree.is_some() { if tree.is_some() {
return Ok(Ref::map(tree, |s| s.as_ref().unwrap())) return Ok(Ref::map(tree, |s| s.as_ref().unwrap()));
} }
} }
let repo = self.repo()?; let repo = self.repo()?;
@ -109,9 +108,7 @@ impl<'cfg> RemoteRegistry<'cfg> {
// (`RemoteRegistry`) so we then just need to ensure that the tree is // (`RemoteRegistry`) so we then just need to ensure that the tree is
// destroyed first in the destructor, hence the destructor on // destroyed first in the destructor, hence the destructor on
// `RemoteRegistry` below. // `RemoteRegistry` below.
let tree = unsafe { let tree = unsafe { mem::transmute::<git2::Tree, git2::Tree<'static>>(tree) };
mem::transmute::<git2::Tree, git2::Tree<'static>>(tree)
};
*self.tree.borrow_mut() = Some(tree); *self.tree.borrow_mut() = Some(tree);
Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap())) Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap()))
} }
@ -122,10 +119,12 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
&self.index_path &self.index_path
} }
fn load(&self, fn load(
_root: &Path, &self,
path: &Path, _root: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>) -> CargoResult<()> { path: &Path,
data: &mut FnMut(&[u8]) -> CargoResult<()>,
) -> CargoResult<()> {
// Note that the index calls this method and the filesystem is locked // Note that the index calls this method and the filesystem is locked
// in the index, so we don't need to worry about an `update_index` // in the index, so we don't need to worry about an `update_index`
// happening in a different process. // happening in a different process.
@ -142,9 +141,9 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
fn config(&mut self) -> CargoResult<Option<RegistryConfig>> { fn config(&mut self) -> CargoResult<Option<RegistryConfig>> {
self.repo()?; // create intermediate dirs and initialize the repo self.repo()?; // create intermediate dirs and initialize the repo
let _lock = self.index_path.open_ro(Path::new(INDEX_LOCK), let _lock =
self.config, self.index_path
"the registry index")?; .open_ro(Path::new(INDEX_LOCK), self.config, "the registry index")?;
let mut config = None; let mut config = None;
self.load(Path::new(""), Path::new("config.json"), &mut |json| { self.load(Path::new(""), Path::new("config.json"), &mut |json| {
config = Some(serde_json::from_slice(json)?); config = Some(serde_json::from_slice(json)?);
@ -172,23 +171,23 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
self.repo()?; self.repo()?;
self.head.set(None); self.head.set(None);
*self.tree.borrow_mut() = None; *self.tree.borrow_mut() = None;
let _lock = self.index_path.open_rw(Path::new(INDEX_LOCK), let _lock =
self.config, self.index_path
"the registry index")?; .open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?;
self.config.shell().status("Updating", self.source_id.display_registry())?; self.config
.shell()
.status("Updating", self.source_id.display_registry())?;
// git fetch origin master // git fetch origin master
let url = self.source_id.url(); let url = self.source_id.url();
let refspec = "refs/heads/master:refs/remotes/origin/master"; let refspec = "refs/heads/master:refs/remotes/origin/master";
let repo = self.repo.borrow_mut().unwrap(); let repo = self.repo.borrow_mut().unwrap();
git::fetch(repo, url, refspec, self.config).chain_err(|| { git::fetch(repo, url, refspec, self.config)
format!("failed to fetch `{}`", url) .chain_err(|| format!("failed to fetch `{}`", url))?;
})?;
Ok(()) Ok(())
} }
fn download(&mut self, pkg: &PackageId, checksum: &str) fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock> {
-> CargoResult<FileLock> {
let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
let path = Path::new(&filename); let path = Path::new(&filename);
@ -201,13 +200,13 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) { if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) {
let meta = dst.file().metadata()?; let meta = dst.file().metadata()?;
if meta.len() > 0 { if meta.len() > 0 {
return Ok(dst) return Ok(dst);
} }
} }
let mut dst = self.cache_path.open_rw(path, self.config, &filename)?; let mut dst = self.cache_path.open_rw(path, self.config, &filename)?;
let meta = dst.file().metadata()?; let meta = dst.file().metadata()?;
if meta.len() > 0 { if meta.len() > 0 {
return Ok(dst) return Ok(dst);
} }
self.config.shell().status("Downloading", pkg)?; self.config.shell().status("Downloading", pkg)?;
@ -216,8 +215,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) { if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) {
write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap(); write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap();
} }
let url = url let url = url.replace(CRATE_TEMPLATE, &*pkg.name())
.replace(CRATE_TEMPLATE, &*pkg.name())
.replace(VERSION_TEMPLATE, &pkg.version().to_string()) .replace(VERSION_TEMPLATE, &pkg.version().to_string())
.to_url()?; .to_url()?;
@ -251,7 +249,10 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
let code = handle.response_code()?; let code = handle.response_code()?;
if code != 200 && code != 0 { if code != 200 && code != 0 {
let url = handle.effective_url()?.unwrap_or(&url); let url = handle.effective_url()?.unwrap_or(&url);
Err(HttpNot200 { code, url: url.to_string() }.into()) Err(HttpNot200 {
code,
url: url.to_string(),
}.into())
} else { } else {
Ok(()) Ok(())
} }
@ -267,19 +268,17 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
Ok(dst) Ok(dst)
} }
fn is_crate_downloaded(&self, pkg: &PackageId) -> bool { fn is_crate_downloaded(&self, pkg: &PackageId) -> bool {
let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
let path = Path::new(&filename); let path = Path::new(&filename);
if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) { if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) {
if let Ok(meta) = dst.file().metadata(){ if let Ok(meta) = dst.file().metadata() {
return meta.len() > 0; return meta.len() > 0;
} }
} }
false false
} }
} }
impl<'cfg> Drop for RemoteRegistry<'cfg> { impl<'cfg> Drop for RemoteRegistry<'cfg> {

View file

@ -1,4 +1,4 @@
use core::{Source, Registry, PackageId, Package, Dependency, Summary, SourceId}; use core::{Dependency, Package, PackageId, Registry, Source, SourceId, Summary};
use util::errors::{CargoResult, CargoResultExt}; use util::errors::{CargoResult, CargoResultExt};
pub struct ReplacedSource<'cfg> { pub struct ReplacedSource<'cfg> {
@ -8,9 +8,11 @@ pub struct ReplacedSource<'cfg> {
} }
impl<'cfg> ReplacedSource<'cfg> { impl<'cfg> ReplacedSource<'cfg> {
pub fn new(to_replace: &SourceId, pub fn new(
replace_with: &SourceId, to_replace: &SourceId,
src: Box<Source + 'cfg>) -> ReplacedSource<'cfg> { replace_with: &SourceId,
src: Box<Source + 'cfg>,
) -> ReplacedSource<'cfg> {
ReplacedSource { ReplacedSource {
to_replace: to_replace.clone(), to_replace: to_replace.clone(),
replace_with: replace_with.clone(), replace_with: replace_with.clone(),
@ -20,18 +22,15 @@ impl<'cfg> ReplacedSource<'cfg> {
} }
impl<'cfg> Registry for ReplacedSource<'cfg> { impl<'cfg> Registry for ReplacedSource<'cfg> {
fn query(&mut self, fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
dep: &Dependency,
f: &mut FnMut(Summary)) -> CargoResult<()> {
let (replace_with, to_replace) = (&self.replace_with, &self.to_replace); let (replace_with, to_replace) = (&self.replace_with, &self.to_replace);
let dep = dep.clone().map_source(to_replace, replace_with); let dep = dep.clone().map_source(to_replace, replace_with);
self.inner.query(&dep, &mut |summary| { self.inner
f(summary.map_source(replace_with, to_replace)) .query(&dep, &mut |summary| {
}).chain_err(|| { f(summary.map_source(replace_with, to_replace))
format!("failed to query replaced source {}", })
self.to_replace) .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?;
})?;
Ok(()) Ok(())
} }
@ -50,19 +49,17 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
} }
fn update(&mut self) -> CargoResult<()> { fn update(&mut self) -> CargoResult<()> {
self.inner.update().chain_err(|| { self.inner
format!("failed to update replaced source {}", .update()
self.to_replace) .chain_err(|| format!("failed to update replaced source {}", self.to_replace))?;
})?;
Ok(()) Ok(())
} }
fn download(&mut self, id: &PackageId) -> CargoResult<Package> { fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
let id = id.with_source_id(&self.replace_with); let id = id.with_source_id(&self.replace_with);
let pkg = self.inner.download(&id).chain_err(|| { let pkg = self.inner
format!("failed to download replaced source {}", .download(&id)
self.to_replace) .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
})?;
Ok(pkg.map_source(&self.replace_with, &self.to_replace)) Ok(pkg.map_source(&self.replace_with, &self.to_replace))
} }

View file

@ -77,8 +77,10 @@ impl FromStr for CfgExpr {
let mut p = Parser::new(s); let mut p = Parser::new(s);
let e = p.expr()?; let e = p.expr()?;
if p.t.next().is_some() { if p.t.next().is_some() {
bail!("can only have one cfg-expression, consider using all() or \ bail!(
any() explicitly") "can only have one cfg-expression, consider using all() or \
any() explicitly"
)
} }
Ok(e) Ok(e)
} }
@ -121,8 +123,7 @@ impl<'a> Parser<'a> {
fn expr(&mut self) -> CargoResult<CfgExpr> { fn expr(&mut self) -> CargoResult<CfgExpr> {
match self.t.peek() { match self.t.peek() {
Some(&Ok(Token::Ident(op @ "all"))) | Some(&Ok(Token::Ident(op @ "all"))) | Some(&Ok(Token::Ident(op @ "any"))) => {
Some(&Ok(Token::Ident(op @ "any"))) => {
self.t.next(); self.t.next();
let mut e = Vec::new(); let mut e = Vec::new();
self.eat(Token::LeftParen)?; self.eat(Token::LeftParen)?;
@ -130,7 +131,7 @@ impl<'a> Parser<'a> {
e.push(self.expr()?); e.push(self.expr()?);
if !self.try(Token::Comma) { if !self.try(Token::Comma) {
self.eat(Token::RightParen)?; self.eat(Token::RightParen)?;
break break;
} }
} }
if op == "all" { if op == "all" {
@ -147,11 +148,11 @@ impl<'a> Parser<'a> {
Ok(CfgExpr::Not(Box::new(e))) Ok(CfgExpr::Not(Box::new(e)))
} }
Some(&Ok(..)) => self.cfg().map(CfgExpr::Value), Some(&Ok(..)) => self.cfg().map(CfgExpr::Value),
Some(&Err(..)) => { Some(&Err(..)) => Err(self.t.next().unwrap().err().unwrap()),
Err(self.t.next().unwrap().err().unwrap()) None => bail!(
} "expected start of a cfg expression, \
None => bail!("expected start of a cfg expression, \ found nothing"
found nothing"), ),
} }
} }
@ -161,8 +162,7 @@ impl<'a> Parser<'a> {
let e = if self.try(Token::Equals) { let e = if self.try(Token::Equals) {
let val = match self.t.next() { let val = match self.t.next() {
Some(Ok(Token::String(s))) => s, Some(Ok(Token::String(s))) => s,
Some(Ok(t)) => bail!("expected a string, found {}", Some(Ok(t)) => bail!("expected a string, found {}", t.classify()),
t.classify()),
Some(Err(e)) => return Err(e), Some(Err(e)) => return Err(e),
None => bail!("expected a string, found nothing"), None => bail!("expected a string, found nothing"),
}; };
@ -190,8 +190,7 @@ impl<'a> Parser<'a> {
fn eat(&mut self, token: Token<'a>) -> CargoResult<()> { fn eat(&mut self, token: Token<'a>) -> CargoResult<()> {
match self.t.next() { match self.t.next() {
Some(Ok(ref t)) if token == *t => Ok(()), Some(Ok(ref t)) if token == *t => Ok(()),
Some(Ok(t)) => bail!("expected {}, found {}", token.classify(), Some(Ok(t)) => bail!("expected {}, found {}", token.classify(), t.classify()),
t.classify()),
Some(Err(e)) => Err(e), Some(Err(e)) => Err(e),
None => bail!("expected {}, but cfg expr ended", token.classify()), None => bail!("expected {}, but cfg expr ended", token.classify()),
} }
@ -212,28 +211,31 @@ impl<'a> Iterator for Tokenizer<'a> {
Some((start, '"')) => { Some((start, '"')) => {
while let Some((end, ch)) = self.s.next() { while let Some((end, ch)) = self.s.next() {
if ch == '"' { if ch == '"' {
return Some(Ok(Token::String(&self.orig[start+1..end]))) return Some(Ok(Token::String(&self.orig[start + 1..end])));
} }
} }
return Some(Err(format_err!("unterminated string in cfg"))) return Some(Err(format_err!("unterminated string in cfg")));
} }
Some((start, ch)) if is_ident_start(ch) => { Some((start, ch)) if is_ident_start(ch) => {
while let Some(&(end, ch)) = self.s.peek() { while let Some(&(end, ch)) = self.s.peek() {
if !is_ident_rest(ch) { if !is_ident_rest(ch) {
return Some(Ok(Token::Ident(&self.orig[start..end]))) return Some(Ok(Token::Ident(&self.orig[start..end])));
} else { } else {
self.s.next(); self.s.next();
} }
} }
return Some(Ok(Token::Ident(&self.orig[start..]))) return Some(Ok(Token::Ident(&self.orig[start..])));
} }
Some((_, ch)) => { Some((_, ch)) => {
return Some(Err(format_err!("unexpected character in \ return Some(Err(format_err!(
cfg `{}`, expected parens, \ "unexpected character in \
a comma, an identifier, or \ cfg `{}`, expected parens, \
a string", ch))) a comma, an identifier, or \
a string",
ch
)))
} }
None => return None None => return None,
} }
} }
} }

View file

@ -19,12 +19,12 @@ use toml;
use lazycell::LazyCell; use lazycell::LazyCell;
use core::shell::Verbosity; use core::shell::Verbosity;
use core::{Shell, CliUnstable, SourceId}; use core::{CliUnstable, Shell, SourceId};
use ops; use ops;
use url::Url; use url::Url;
use util::ToUrl; use util::ToUrl;
use util::Rustc; use util::Rustc;
use util::errors::{CargoResult, CargoResultExt, CargoError, internal}; use util::errors::{internal, CargoError, CargoResult, CargoResultExt};
use util::paths; use util::paths;
use util::toml as cargo_toml; use util::toml as cargo_toml;
use util::Filesystem; use util::Filesystem;
@ -68,9 +68,7 @@ pub struct Config {
} }
impl Config { impl Config {
pub fn new(shell: Shell, pub fn new(shell: Shell, cwd: PathBuf, homedir: PathBuf) -> Config {
cwd: PathBuf,
homedir: PathBuf) -> Config {
static mut GLOBAL_JOBSERVER: *mut jobserver::Client = 0 as *mut _; static mut GLOBAL_JOBSERVER: *mut jobserver::Client = 0 as *mut _;
static INIT: Once = ONCE_INIT; static INIT: Once = ONCE_INIT;
@ -108,18 +106,21 @@ impl Config {
pub fn default() -> CargoResult<Config> { pub fn default() -> CargoResult<Config> {
let shell = Shell::new(); let shell = Shell::new();
let cwd = env::current_dir().chain_err(|| { let cwd =
"couldn't get the current directory of the process" env::current_dir().chain_err(|| "couldn't get the current directory of the process")?;
})?;
let homedir = homedir(&cwd).ok_or_else(|| { let homedir = homedir(&cwd).ok_or_else(|| {
format_err!("Cargo couldn't find your home directory. \ format_err!(
This probably means that $HOME was not set.") "Cargo couldn't find your home directory. \
This probably means that $HOME was not set."
)
})?; })?;
Ok(Config::new(shell, cwd, homedir)) Ok(Config::new(shell, cwd, homedir))
} }
/// The user's cargo home directory (OS-dependent) /// The user's cargo home directory (OS-dependent)
pub fn home(&self) -> &Filesystem { &self.home_path } pub fn home(&self) -> &Filesystem {
&self.home_path
}
/// The cargo git directory (`<cargo_home>/git`) /// The cargo git directory (`<cargo_home>/git`)
pub fn git_path(&self) -> Filesystem { pub fn git_path(&self) -> Filesystem {
@ -148,66 +149,74 @@ impl Config {
/// Get the path to the `rustdoc` executable /// Get the path to the `rustdoc` executable
pub fn rustdoc(&self) -> CargoResult<&Path> { pub fn rustdoc(&self) -> CargoResult<&Path> {
self.rustdoc.try_borrow_with(|| self.get_tool("rustdoc")).map(AsRef::as_ref) self.rustdoc
.try_borrow_with(|| self.get_tool("rustdoc"))
.map(AsRef::as_ref)
} }
/// Get the path to the `rustc` executable /// Get the path to the `rustc` executable
pub fn rustc(&self) -> CargoResult<&Rustc> { pub fn rustc(&self) -> CargoResult<&Rustc> {
self.rustc.try_borrow_with(|| Rustc::new(self.get_tool("rustc")?, self.rustc.try_borrow_with(|| {
self.maybe_get_tool("rustc_wrapper")?)) Rustc::new(
self.get_tool("rustc")?,
self.maybe_get_tool("rustc_wrapper")?,
)
})
} }
/// Get the path to the `cargo` executable /// Get the path to the `cargo` executable
pub fn cargo_exe(&self) -> CargoResult<&Path> { pub fn cargo_exe(&self) -> CargoResult<&Path> {
self.cargo_exe.try_borrow_with(|| { self.cargo_exe
fn from_current_exe() -> CargoResult<PathBuf> { .try_borrow_with(|| {
// Try fetching the path to `cargo` using env::current_exe(). fn from_current_exe() -> CargoResult<PathBuf> {
// The method varies per operating system and might fail; in particular, // Try fetching the path to `cargo` using env::current_exe().
// it depends on /proc being mounted on Linux, and some environments // The method varies per operating system and might fail; in particular,
// (like containers or chroots) may not have that available. // it depends on /proc being mounted on Linux, and some environments
let exe = env::current_exe()?.canonicalize()?; // (like containers or chroots) may not have that available.
Ok(exe) let exe = env::current_exe()?.canonicalize()?;
} Ok(exe)
fn from_argv() -> CargoResult<PathBuf> {
// Grab argv[0] and attempt to resolve it to an absolute path.
// If argv[0] has one component, it must have come from a PATH lookup,
// so probe PATH in that case.
// Otherwise, it has multiple components and is either:
// - a relative path (e.g. `./cargo`, `target/debug/cargo`), or
// - an absolute path (e.g. `/usr/local/bin/cargo`).
// In either case, Path::canonicalize will return the full absolute path
// to the target if it exists
let argv0 = env::args_os()
.map(PathBuf::from)
.next()
.ok_or(format_err!("no argv[0]"))?;
if argv0.components().count() == 1 {
probe_path(argv0)
} else {
Ok(argv0.canonicalize()?)
} }
}
fn probe_path(argv0: PathBuf) -> CargoResult<PathBuf> { fn from_argv() -> CargoResult<PathBuf> {
let paths = env::var_os("PATH").ok_or(format_err!("no PATH"))?; // Grab argv[0] and attempt to resolve it to an absolute path.
for path in env::split_paths(&paths) { // If argv[0] has one component, it must have come from a PATH lookup,
let candidate = PathBuf::from(path).join(&argv0); // so probe PATH in that case.
if candidate.is_file() { // Otherwise, it has multiple components and is either:
// PATH may have a component like "." in it, so we still need to // - a relative path (e.g. `./cargo`, `target/debug/cargo`), or
// canonicalize. // - an absolute path (e.g. `/usr/local/bin/cargo`).
return Ok(candidate.canonicalize()?) // In either case, Path::canonicalize will return the full absolute path
// to the target if it exists
let argv0 = env::args_os()
.map(PathBuf::from)
.next()
.ok_or(format_err!("no argv[0]"))?;
if argv0.components().count() == 1 {
probe_path(argv0)
} else {
Ok(argv0.canonicalize()?)
} }
} }
bail!("no cargo executable candidate found in PATH") fn probe_path(argv0: PathBuf) -> CargoResult<PathBuf> {
} let paths = env::var_os("PATH").ok_or(format_err!("no PATH"))?;
for path in env::split_paths(&paths) {
let candidate = PathBuf::from(path).join(&argv0);
if candidate.is_file() {
// PATH may have a component like "." in it, so we still need to
// canonicalize.
return Ok(candidate.canonicalize()?);
}
}
let exe = from_current_exe() bail!("no cargo executable candidate found in PATH")
.or_else(|_| from_argv()) }
.chain_err(|| "couldn't get the path to cargo executable")?;
Ok(exe) let exe = from_current_exe()
}).map(AsRef::as_ref) .or_else(|_| from_argv())
.chain_err(|| "couldn't get the path to cargo executable")?;
Ok(exe)
})
.map(AsRef::as_ref)
} }
pub fn values(&self) -> CargoResult<&HashMap<String, ConfigValue>> { pub fn values(&self) -> CargoResult<&HashMap<String, ConfigValue>> {
@ -224,7 +233,9 @@ impl Config {
} }
} }
pub fn cwd(&self) -> &Path { &self.cwd } pub fn cwd(&self) -> &Path {
&self.cwd
}
pub fn target_dir(&self) -> CargoResult<Option<Filesystem>> { pub fn target_dir(&self) -> CargoResult<Option<Filesystem>> {
if let Some(dir) = env::var_os("CARGO_TARGET_DIR") { if let Some(dir) = env::var_os("CARGO_TARGET_DIR") {
@ -252,16 +263,19 @@ impl Config {
None => return Ok(None), None => return Ok(None),
} }
} }
CV::Integer(_, ref path) | CV::Integer(_, ref path)
CV::String(_, ref path) | | CV::String(_, ref path)
CV::List(_, ref path) | | CV::List(_, ref path)
CV::Boolean(_, ref path) => { | CV::Boolean(_, ref path) => {
let idx = key.split('.').take(i) let idx = key.split('.').take(i).fold(0, |n, s| n + s.len()) + i - 1;
.fold(0, |n, s| n + s.len()) + i - 1;
let key_so_far = &key[..idx]; let key_so_far = &key[..idx];
bail!("expected table for configuration key `{}`, \ bail!(
but found {} in {}", "expected table for configuration key `{}`, \
key_so_far, val.desc(), path.display()) but found {} in {}",
key_so_far,
val.desc(),
path.display()
)
} }
} }
} }
@ -269,35 +283,32 @@ impl Config {
} }
fn get_env<V: FromStr>(&self, key: &str) -> CargoResult<Option<Value<V>>> fn get_env<V: FromStr>(&self, key: &str) -> CargoResult<Option<Value<V>>>
where CargoError: From<V::Err> where
CargoError: From<V::Err>,
{ {
let key = key.replace(".", "_") let key = key.replace(".", "_")
.replace("-", "_") .replace("-", "_")
.chars() .chars()
.flat_map(|c| c.to_uppercase()) .flat_map(|c| c.to_uppercase())
.collect::<String>(); .collect::<String>();
match env::var(&format!("CARGO_{}", key)) { match env::var(&format!("CARGO_{}", key)) {
Ok(value) => { Ok(value) => Ok(Some(Value {
Ok(Some(Value { val: value.parse()?,
val: value.parse()?, definition: Definition::Environment,
definition: Definition::Environment, })),
}))
}
Err(..) => Ok(None), Err(..) => Ok(None),
} }
} }
pub fn get_string(&self, key: &str) -> CargoResult<Option<Value<String>>> { pub fn get_string(&self, key: &str) -> CargoResult<Option<Value<String>>> {
if let Some(v) = self.get_env(key)? { if let Some(v) = self.get_env(key)? {
return Ok(Some(v)) return Ok(Some(v));
} }
match self.get(key)? { match self.get(key)? {
Some(CV::String(i, path)) => { Some(CV::String(i, path)) => Ok(Some(Value {
Ok(Some(Value { val: i,
val: i, definition: Definition::Path(path),
definition: Definition::Path(path), })),
}))
}
Some(val) => self.expected("string", key, val), Some(val) => self.expected("string", key, val),
None => Ok(None), None => Ok(None),
} }
@ -305,23 +316,20 @@ impl Config {
pub fn get_bool(&self, key: &str) -> CargoResult<Option<Value<bool>>> { pub fn get_bool(&self, key: &str) -> CargoResult<Option<Value<bool>>> {
if let Some(v) = self.get_env(key)? { if let Some(v) = self.get_env(key)? {
return Ok(Some(v)) return Ok(Some(v));
} }
match self.get(key)? { match self.get(key)? {
Some(CV::Boolean(b, path)) => { Some(CV::Boolean(b, path)) => Ok(Some(Value {
Ok(Some(Value { val: b,
val: b, definition: Definition::Path(path),
definition: Definition::Path(path), })),
}))
}
Some(val) => self.expected("bool", key, val), Some(val) => self.expected("bool", key, val),
None => Ok(None), None => Ok(None),
} }
} }
fn string_to_path(&self, value: String, definition: &Definition) -> PathBuf { fn string_to_path(&self, value: String, definition: &Definition) -> PathBuf {
let is_path = value.contains('/') || let is_path = value.contains('/') || (cfg!(windows) && value.contains('\\'));
(cfg!(windows) && value.contains('\\'));
if is_path { if is_path {
definition.root(self).join(value) definition.root(self).join(value)
} else { } else {
@ -334,79 +342,74 @@ impl Config {
if let Some(val) = self.get_string(key)? { if let Some(val) = self.get_string(key)? {
Ok(Some(Value { Ok(Some(Value {
val: self.string_to_path(val.val, &val.definition), val: self.string_to_path(val.val, &val.definition),
definition: val.definition definition: val.definition,
})) }))
} else { } else {
Ok(None) Ok(None)
} }
} }
pub fn get_path_and_args(&self, key: &str) pub fn get_path_and_args(
-> CargoResult<Option<Value<(PathBuf, Vec<String>)>>> { &self,
key: &str,
) -> CargoResult<Option<Value<(PathBuf, Vec<String>)>>> {
if let Some(mut val) = self.get_list_or_split_string(key)? { if let Some(mut val) = self.get_list_or_split_string(key)? {
if !val.val.is_empty() { if !val.val.is_empty() {
return Ok(Some(Value { return Ok(Some(Value {
val: (self.string_to_path(val.val.remove(0), &val.definition), val.val), val: (
definition: val.definition self.string_to_path(val.val.remove(0), &val.definition),
val.val,
),
definition: val.definition,
})); }));
} }
} }
Ok(None) Ok(None)
} }
pub fn get_list(&self, key: &str) pub fn get_list(&self, key: &str) -> CargoResult<Option<Value<Vec<(String, PathBuf)>>>> {
-> CargoResult<Option<Value<Vec<(String, PathBuf)>>>> {
match self.get(key)? { match self.get(key)? {
Some(CV::List(i, path)) => { Some(CV::List(i, path)) => Ok(Some(Value {
Ok(Some(Value { val: i,
val: i, definition: Definition::Path(path),
definition: Definition::Path(path), })),
}))
}
Some(val) => self.expected("list", key, val), Some(val) => self.expected("list", key, val),
None => Ok(None), None => Ok(None),
} }
} }
pub fn get_list_or_split_string(&self, key: &str) pub fn get_list_or_split_string(&self, key: &str) -> CargoResult<Option<Value<Vec<String>>>> {
-> CargoResult<Option<Value<Vec<String>>>> {
match self.get_env::<String>(key) { match self.get_env::<String>(key) {
Ok(Some(value)) => Ok(Some(value)) => {
return Ok(Some(Value { return Ok(Some(Value {
val: value.val.split(' ').map(str::to_string).collect(), val: value.val.split(' ').map(str::to_string).collect(),
definition: value.definition definition: value.definition,
})), }))
}
Err(err) => return Err(err), Err(err) => return Err(err),
Ok(None) => (), Ok(None) => (),
} }
match self.get(key)? { match self.get(key)? {
Some(CV::List(i, path)) => { Some(CV::List(i, path)) => Ok(Some(Value {
Ok(Some(Value { val: i.into_iter().map(|(s, _)| s).collect(),
val: i.into_iter().map(|(s, _)| s).collect(), definition: Definition::Path(path),
definition: Definition::Path(path), })),
})) Some(CV::String(i, path)) => Ok(Some(Value {
} val: i.split(' ').map(str::to_string).collect(),
Some(CV::String(i, path)) => { definition: Definition::Path(path),
Ok(Some(Value { })),
val: i.split(' ').map(str::to_string).collect(),
definition: Definition::Path(path),
}))
}
Some(val) => self.expected("list or string", key, val), Some(val) => self.expected("list or string", key, val),
None => Ok(None), None => Ok(None),
} }
} }
pub fn get_table(&self, key: &str) pub fn get_table(&self, key: &str) -> CargoResult<Option<Value<HashMap<String, CV>>>> {
-> CargoResult<Option<Value<HashMap<String, CV>>>> {
match self.get(key)? { match self.get(key)? {
Some(CV::Table(i, path)) => { Some(CV::Table(i, path)) => Ok(Some(Value {
Ok(Some(Value { val: i,
val: i, definition: Definition::Path(path),
definition: Definition::Path(path), })),
}))
}
Some(val) => self.expected("table", key, val), Some(val) => self.expected("table", key, val),
None => Ok(None), None => Ok(None),
} }
@ -414,15 +417,13 @@ impl Config {
pub fn get_i64(&self, key: &str) -> CargoResult<Option<Value<i64>>> { pub fn get_i64(&self, key: &str) -> CargoResult<Option<Value<i64>>> {
if let Some(v) = self.get_env(key)? { if let Some(v) = self.get_env(key)? {
return Ok(Some(v)) return Ok(Some(v));
} }
match self.get(key)? { match self.get(key)? {
Some(CV::Integer(i, path)) => { Some(CV::Integer(i, path)) => Ok(Some(Value {
Ok(Some(Value { val: i,
val: i, definition: Definition::Path(path),
definition: Definition::Path(path), })),
}))
}
Some(val) => self.expected("integer", key, val), Some(val) => self.expected("integer", key, val),
None => Ok(None), None => Ok(None),
} }
@ -433,8 +434,11 @@ impl Config {
Some(v) => { Some(v) => {
let value = v.val; let value = v.val;
if value < 0 { if value < 0 {
bail!("net.retry must be positive, but found {} in {}", bail!(
v.val, v.definition) "net.retry must be positive, but found {} in {}",
v.val,
v.definition
)
} else { } else {
Ok(value) Ok(value)
} }
@ -444,20 +448,21 @@ impl Config {
} }
pub fn expected<T>(&self, ty: &str, key: &str, val: CV) -> CargoResult<T> { pub fn expected<T>(&self, ty: &str, key: &str, val: CV) -> CargoResult<T> {
val.expected(ty, key).map_err(|e| { val.expected(ty, key)
format_err!("invalid configuration for key `{}`\n{}", key, e) .map_err(|e| format_err!("invalid configuration for key `{}`\n{}", key, e))
})
} }
pub fn configure(&mut self, pub fn configure(
verbose: u32, &mut self,
quiet: Option<bool>, verbose: u32,
color: &Option<String>, quiet: Option<bool>,
frozen: bool, color: &Option<String>,
locked: bool, frozen: bool,
unstable_flags: &[String]) -> CargoResult<()> { locked: bool,
unstable_flags: &[String],
) -> CargoResult<()> {
let extra_verbose = verbose >= 2; let extra_verbose = verbose >= 2;
let verbose = if verbose == 0 {None} else {Some(true)}; let verbose = if verbose == 0 { None } else { Some(true) };
// Ignore errors in the configuration files. // Ignore errors in the configuration files.
let cfg_verbose = self.get_bool("term.verbose").unwrap_or(None).map(|v| v.val); let cfg_verbose = self.get_bool("term.verbose").unwrap_or(None).map(|v| v.val);
@ -466,8 +471,7 @@ impl Config {
let color = color.as_ref().or_else(|| cfg_color.as_ref()); let color = color.as_ref().or_else(|| cfg_color.as_ref());
let verbosity = match (verbose, cfg_verbose, quiet) { let verbosity = match (verbose, cfg_verbose, quiet) {
(Some(true), _, None) | (Some(true), _, None) | (None, Some(true), None) => Verbosity::Verbose,
(None, Some(true), None) => Verbosity::Verbose,
// command line takes precedence over configuration, so ignore the // command line takes precedence over configuration, so ignore the
// configuration. // configuration.
@ -482,11 +486,10 @@ impl Config {
// Can't actually get `Some(false)` as a value from the command // Can't actually get `Some(false)` as a value from the command
// line, so just ignore them here to appease exhaustiveness checking // line, so just ignore them here to appease exhaustiveness checking
// in match statements. // in match statements.
(Some(false), _, _) | (Some(false), _, _)
(_, _, Some(false)) | | (_, _, Some(false))
| (None, Some(false), None)
(None, Some(false), None) | | (None, None, None) => Verbosity::Normal,
(None, None, None) => Verbosity::Normal,
}; };
self.shell().set_verbosity(verbosity); self.shell().set_verbosity(verbosity);
@ -526,23 +529,19 @@ impl Config {
walk_tree(&self.cwd, |path| { walk_tree(&self.cwd, |path| {
let mut contents = String::new(); let mut contents = String::new();
let mut file = File::open(&path)?; let mut file = File::open(&path)?;
file.read_to_string(&mut contents).chain_err(|| { file.read_to_string(&mut contents)
format!("failed to read configuration file `{}`", .chain_err(|| format!("failed to read configuration file `{}`", path.display()))?;
path.display()) let toml = cargo_toml::parse(&contents, path, self).chain_err(|| {
})?; format!("could not parse TOML configuration in `{}`", path.display())
let toml = cargo_toml::parse(&contents,
path,
self).chain_err(|| {
format!("could not parse TOML configuration in `{}`",
path.display())
})?; })?;
let value = CV::from_toml(path, toml).chain_err(|| { let value = CV::from_toml(path, toml).chain_err(|| {
format!("failed to load TOML configuration from `{}`", format!(
path.display()) "failed to load TOML configuration from `{}`",
})?; path.display()
cfg.merge(value).chain_err(|| { )
format!("failed to merge configuration at `{}`", path.display())
})?; })?;
cfg.merge(value)
.chain_err(|| format!("failed to merge configuration at `{}`", path.display()))?;
Ok(()) Ok(())
}).chain_err(|| "Couldn't load Cargo configuration")?; }).chain_err(|| "Couldn't load Cargo configuration")?;
@ -555,16 +554,18 @@ impl Config {
/// Gets the index for a registry. /// Gets the index for a registry.
pub fn get_registry_index(&self, registry: &str) -> CargoResult<Url> { pub fn get_registry_index(&self, registry: &str) -> CargoResult<Url> {
Ok(match self.get_string(&format!("registries.{}.index", registry))? { Ok(
Some(index) => { match self.get_string(&format!("registries.{}.index", registry))? {
let url = index.val.to_url()?; Some(index) => {
if url.username() != "" || url.password().is_some() { let url = index.val.to_url()?;
bail!("Registry URLs may not contain credentials"); if url.username() != "" || url.password().is_some() {
bail!("Registry URLs may not contain credentials");
}
url
} }
url None => bail!("No index found for registry: `{}`", registry),
} },
None => bail!("No index found for registry: `{}`", registry), )
})
} }
/// Loads credentials config from the credentials file into the ConfigValue object, if present. /// Loads credentials config from the credentials file into the ConfigValue object, if present.
@ -578,17 +579,24 @@ impl Config {
let mut contents = String::new(); let mut contents = String::new();
let mut file = File::open(&credentials)?; let mut file = File::open(&credentials)?;
file.read_to_string(&mut contents).chain_err(|| { file.read_to_string(&mut contents).chain_err(|| {
format!("failed to read configuration file `{}`", credentials.display()) format!(
"failed to read configuration file `{}`",
credentials.display()
)
})?; })?;
let toml = cargo_toml::parse(&contents, let toml = cargo_toml::parse(&contents, &credentials, self).chain_err(|| {
&credentials, format!(
self).chain_err(|| { "could not parse TOML configuration in `{}`",
format!("could not parse TOML configuration in `{}`", credentials.display()) credentials.display()
)
})?; })?;
let mut value = CV::from_toml(&credentials, toml).chain_err(|| { let mut value = CV::from_toml(&credentials, toml).chain_err(|| {
format!("failed to load TOML configuration from `{}`", credentials.display()) format!(
"failed to load TOML configuration from `{}`",
credentials.display()
)
})?; })?;
// backwards compatibility for old .cargo/credentials layout // backwards compatibility for old .cargo/credentials layout
@ -618,7 +626,9 @@ impl Config {
/// Look for a path for `tool` in an environment variable or config path, but return `None` /// Look for a path for `tool` in an environment variable or config path, but return `None`
/// if it's not present. /// if it's not present.
fn maybe_get_tool(&self, tool: &str) -> CargoResult<Option<PathBuf>> { fn maybe_get_tool(&self, tool: &str) -> CargoResult<Option<PathBuf>> {
let var = tool.chars().flat_map(|c| c.to_uppercase()).collect::<String>(); let var = tool.chars()
.flat_map(|c| c.to_uppercase())
.collect::<String>();
if let Some(tool_path) = env::var_os(&var) { if let Some(tool_path) = env::var_os(&var) {
let maybe_relative = match tool_path.to_str() { let maybe_relative = match tool_path.to_str() {
Some(s) => s.contains("/") || s.contains("\\"), Some(s) => s.contains("/") || s.contains("\\"),
@ -629,7 +639,7 @@ impl Config {
} else { } else {
PathBuf::from(tool_path) PathBuf::from(tool_path)
}; };
return Ok(Some(path)) return Ok(Some(path));
} }
let var = format!("build.{}", tool); let var = format!("build.{}", tool);
@ -652,9 +662,8 @@ impl Config {
} }
pub fn http(&self) -> CargoResult<&RefCell<Easy>> { pub fn http(&self) -> CargoResult<&RefCell<Easy>> {
let http = self.easy.try_borrow_with(|| { let http = self.easy
ops::http_handle(self).map(RefCell::new) .try_borrow_with(|| ops::http_handle(self).map(RefCell::new))?;
})?;
{ {
let mut http = http.borrow_mut(); let mut http = http.borrow_mut();
http.reset(); http.reset();
@ -664,7 +673,8 @@ impl Config {
} }
pub fn crates_io_source_id<F>(&self, f: F) -> CargoResult<SourceId> pub fn crates_io_source_id<F>(&self, f: F) -> CargoResult<SourceId>
where F: FnMut() -> CargoResult<SourceId> where
F: FnMut() -> CargoResult<SourceId>,
{ {
Ok(self.crates_io_source_id.try_borrow_with(f)?.clone()) Ok(self.crates_io_source_id.try_borrow_with(f)?.clone())
} }
@ -673,10 +683,10 @@ impl Config {
#[derive(Eq, PartialEq, Clone, Copy)] #[derive(Eq, PartialEq, Clone, Copy)]
pub enum Location { pub enum Location {
Project, Project,
Global Global,
} }
#[derive(Eq,PartialEq,Clone,Deserialize)] #[derive(Eq, PartialEq, Clone, Deserialize)]
pub enum ConfigValue { pub enum ConfigValue {
Integer(i64, PathBuf), Integer(i64, PathBuf),
String(String, PathBuf), String(String, PathBuf),
@ -698,16 +708,15 @@ pub enum Definition {
impl fmt::Debug for ConfigValue { impl fmt::Debug for ConfigValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self { match *self {
CV::Integer(i, ref path) => write!(f, "{} (from {})", i, CV::Integer(i, ref path) => write!(f, "{} (from {})", i, path.display()),
path.display()), CV::Boolean(b, ref path) => write!(f, "{} (from {})", b, path.display()),
CV::Boolean(b, ref path) => write!(f, "{} (from {})", b, CV::String(ref s, ref path) => write!(f, "{} (from {})", s, path.display()),
path.display()),
CV::String(ref s, ref path) => write!(f, "{} (from {})", s,
path.display()),
CV::List(ref list, ref path) => { CV::List(ref list, ref path) => {
write!(f, "[")?; write!(f, "[")?;
for (i, &(ref s, ref path)) in list.iter().enumerate() { for (i, &(ref s, ref path)) in list.iter().enumerate() {
if i > 0 { write!(f, ", ")?; } if i > 0 {
write!(f, ", ")?;
}
write!(f, "{} (from {})", s, path.display())?; write!(f, "{} (from {})", s, path.display())?;
} }
write!(f, "] (from {})", path.display()) write!(f, "] (from {})", path.display())
@ -738,25 +747,29 @@ impl ConfigValue {
toml::Value::String(val) => Ok(CV::String(val, path.to_path_buf())), toml::Value::String(val) => Ok(CV::String(val, path.to_path_buf())),
toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())), toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())),
toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())), toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())),
toml::Value::Array(val) => { toml::Value::Array(val) => Ok(CV::List(
Ok(CV::List(val.into_iter().map(|toml| { val.into_iter()
match toml { .map(|toml| match toml {
toml::Value::String(val) => Ok((val, path.to_path_buf())), toml::Value::String(val) => Ok((val, path.to_path_buf())),
v => bail!("expected string but found {} in list", v => bail!("expected string but found {} in list", v.type_str()),
v.type_str()), })
} .collect::<CargoResult<_>>()?,
}).collect::<CargoResult<_>>()?, path.to_path_buf())) path.to_path_buf(),
} )),
toml::Value::Table(val) => { toml::Value::Table(val) => Ok(CV::Table(
Ok(CV::Table(val.into_iter().map(|(key, value)| { val.into_iter()
let value = CV::from_toml(path, value).chain_err(|| { .map(|(key, value)| {
format!("failed to parse key `{}`", key) let value = CV::from_toml(path, value)
})?; .chain_err(|| format!("failed to parse key `{}`", key))?;
Ok((key, value)) Ok((key, value))
}).collect::<CargoResult<_>>()?, path.to_path_buf())) })
} .collect::<CargoResult<_>>()?,
v => bail!("found TOML configuration value of unknown type `{}`", path.to_path_buf(),
v.type_str()), )),
v => bail!(
"found TOML configuration value of unknown type `{}`",
v.type_str()
),
} }
} }
@ -765,21 +778,20 @@ impl ConfigValue {
CV::Boolean(s, _) => toml::Value::Boolean(s), CV::Boolean(s, _) => toml::Value::Boolean(s),
CV::String(s, _) => toml::Value::String(s), CV::String(s, _) => toml::Value::String(s),
CV::Integer(i, _) => toml::Value::Integer(i), CV::Integer(i, _) => toml::Value::Integer(i),
CV::List(l, _) => toml::Value::Array(l CV::List(l, _) => {
.into_iter() toml::Value::Array(l.into_iter().map(|(s, _)| toml::Value::String(s)).collect())
.map(|(s, _)| toml::Value::String(s)) }
.collect()), CV::Table(l, _) => {
CV::Table(l, _) => toml::Value::Table(l.into_iter() toml::Value::Table(l.into_iter().map(|(k, v)| (k, v.into_toml())).collect())
.map(|(k, v)| (k, v.into_toml())) }
.collect()),
} }
} }
fn merge(&mut self, from: ConfigValue) -> CargoResult<()> { fn merge(&mut self, from: ConfigValue) -> CargoResult<()> {
match (self, from) { match (self, from) {
(&mut CV::String(..), CV::String(..)) | (&mut CV::String(..), CV::String(..))
(&mut CV::Integer(..), CV::Integer(..)) | | (&mut CV::Integer(..), CV::Integer(..))
(&mut CV::Boolean(..), CV::Boolean(..)) => {} | (&mut CV::Boolean(..), CV::Boolean(..)) => {}
(&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => { (&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => {
let new = mem::replace(new, Vec::new()); let new = mem::replace(new, Vec::new());
old.extend(new.into_iter()); old.extend(new.into_iter());
@ -792,23 +804,29 @@ impl ConfigValue {
let path = value.definition_path().to_path_buf(); let path = value.definition_path().to_path_buf();
let entry = entry.get_mut(); let entry = entry.get_mut();
entry.merge(value).chain_err(|| { entry.merge(value).chain_err(|| {
format!("failed to merge key `{}` between \ format!(
files:\n \ "failed to merge key `{}` between \
file 1: {}\n \ files:\n \
file 2: {}", file 1: {}\n \
key, file 2: {}",
entry.definition_path().display(), key,
path.display()) entry.definition_path().display(),
path.display()
)
})?; })?;
} }
Vacant(entry) => { entry.insert(value); } Vacant(entry) => {
entry.insert(value);
}
}; };
} }
} }
(expected, found) => { (expected, found) => {
return Err(internal(format!("expected {}, but found {}", return Err(internal(format!(
expected.desc(), found.desc()))) "expected {}, but found {}",
expected.desc(),
found.desc()
)))
} }
} }
@ -829,8 +847,7 @@ impl ConfigValue {
} }
} }
pub fn table(&self, key: &str) pub fn table(&self, key: &str) -> CargoResult<(&HashMap<String, ConfigValue>, &Path)> {
-> CargoResult<(&HashMap<String, ConfigValue>, &Path)> {
match *self { match *self {
CV::Table(ref table, ref p) => Ok((table, p)), CV::Table(ref table, ref p) => Ok((table, p)),
_ => self.expected("table", key), _ => self.expected("table", key),
@ -862,19 +879,23 @@ impl ConfigValue {
} }
pub fn definition_path(&self) -> &Path { pub fn definition_path(&self) -> &Path {
match *self { match *self {
CV::Boolean(_, ref p) | CV::Boolean(_, ref p)
CV::Integer(_, ref p) | | CV::Integer(_, ref p)
CV::String(_, ref p) | | CV::String(_, ref p)
CV::List(_, ref p) | | CV::List(_, ref p)
CV::Table(_, ref p) => p | CV::Table(_, ref p) => p,
} }
} }
pub fn expected<T>(&self, wanted: &str, key: &str) -> CargoResult<T> { pub fn expected<T>(&self, wanted: &str, key: &str) -> CargoResult<T> {
bail!("expected a {}, but found a {} for `{}` in {}", bail!(
wanted, self.desc(), key, "expected a {}, but found a {} for `{}` in {}",
self.definition_path().display()) wanted,
self.desc(),
key,
self.definition_path().display()
)
} }
} }
@ -901,7 +922,8 @@ pub fn homedir(cwd: &Path) -> Option<PathBuf> {
} }
fn walk_tree<F>(pwd: &Path, mut walk: F) -> CargoResult<()> fn walk_tree<F>(pwd: &Path, mut walk: F) -> CargoResult<()>
where F: FnMut(&Path) -> CargoResult<()> where
F: FnMut(&Path) -> CargoResult<()>,
{ {
let mut stash: HashSet<PathBuf> = HashSet::new(); let mut stash: HashSet<PathBuf> = HashSet::new();
@ -917,8 +939,10 @@ fn walk_tree<F>(pwd: &Path, mut walk: F) -> CargoResult<()>
// in our history to be sure we pick up that standard location for // in our history to be sure we pick up that standard location for
// information. // information.
let home = homedir(pwd).ok_or_else(|| { let home = homedir(pwd).ok_or_else(|| {
format_err!("Cargo couldn't find your home directory. \ format_err!(
This probably means that $HOME was not set.") "Cargo couldn't find your home directory. \
This probably means that $HOME was not set."
)
})?; })?;
let config = home.join("config"); let config = home.join("config");
if !stash.contains(&config) && fs::metadata(&config).is_ok() { if !stash.contains(&config) && fs::metadata(&config).is_ok() {
@ -928,13 +952,11 @@ fn walk_tree<F>(pwd: &Path, mut walk: F) -> CargoResult<()>
Ok(()) Ok(())
} }
pub fn save_credentials(cfg: &Config, pub fn save_credentials(cfg: &Config, token: String, registry: Option<String>) -> CargoResult<()> {
token: String,
registry: Option<String>) -> CargoResult<()> {
let mut file = { let mut file = {
cfg.home_path.create_dir()?; cfg.home_path.create_dir()?;
cfg.home_path.open_rw(Path::new("credentials"), cfg, cfg.home_path
"credentials' config file")? .open_rw(Path::new("credentials"), cfg, "credentials' config file")?
}; };
let (key, value) = { let (key, value) = {
@ -947,7 +969,10 @@ pub fn save_credentials(cfg: &Config,
if let Some(registry) = registry { if let Some(registry) = registry {
let mut map = HashMap::new(); let mut map = HashMap::new();
map.insert(registry, table); map.insert(registry, table);
("registries".into(), CV::Table(map, file.path().to_path_buf())) (
"registries".into(),
CV::Table(map, file.path().to_path_buf()),
)
} else { } else {
("registry".into(), table) ("registry".into(), table)
} }
@ -955,7 +980,10 @@ pub fn save_credentials(cfg: &Config,
let mut contents = String::new(); let mut contents = String::new();
file.read_to_string(&mut contents).chain_err(|| { file.read_to_string(&mut contents).chain_err(|| {
format!("failed to read configuration file `{}`", file.path().display()) format!(
"failed to read configuration file `{}`",
file.path().display()
)
})?; })?;
let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?; let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?;
@ -964,12 +992,12 @@ pub fn save_credentials(cfg: &Config,
if let Some(token) = toml.as_table_mut().unwrap().remove("token") { if let Some(token) = toml.as_table_mut().unwrap().remove("token") {
let mut map = HashMap::new(); let mut map = HashMap::new();
map.insert("token".to_string(), token); map.insert("token".to_string(), token);
toml.as_table_mut().unwrap().insert("registry".into(), map.into()); toml.as_table_mut()
.unwrap()
.insert("registry".into(), map.into());
} }
toml.as_table_mut() toml.as_table_mut().unwrap().insert(key, value.into_toml());
.unwrap()
.insert(key, value.into_toml());
let contents = toml.to_string(); let contents = toml.to_string();
file.seek(SeekFrom::Start(0))?; file.seek(SeekFrom::Start(0))?;
@ -980,7 +1008,7 @@ pub fn save_credentials(cfg: &Config,
return Ok(()); return Ok(());
#[cfg(unix)] #[cfg(unix)]
fn set_permissions(file: & File, mode: u32) -> CargoResult<()> { fn set_permissions(file: &File, mode: u32) -> CargoResult<()> {
use std::os::unix::fs::PermissionsExt; use std::os::unix::fs::PermissionsExt;
let mut perms = file.metadata()?.permissions(); let mut perms = file.metadata()?.permissions();
@ -991,7 +1019,7 @@ pub fn save_credentials(cfg: &Config,
#[cfg(not(unix))] #[cfg(not(unix))]
#[allow(unused)] #[allow(unused)]
fn set_permissions(file: & File, mode: u32) -> CargoResult<()> { fn set_permissions(file: &File, mode: u32) -> CargoResult<()> {
Ok(()) Ok(())
} }
} }

View file

@ -8,7 +8,7 @@ use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::hash::Hash; use std::hash::Hash;
pub use self::Freshness::{Fresh, Dirty}; pub use self::Freshness::{Dirty, Fresh};
#[derive(Debug)] #[derive(Debug)]
pub struct DependencyQueue<K: Eq + Hash, V> { pub struct DependencyQueue<K: Eq + Hash, V> {
@ -51,7 +51,10 @@ pub enum Freshness {
impl Freshness { impl Freshness {
pub fn combine(&self, other: Freshness) -> Freshness { pub fn combine(&self, other: Freshness) -> Freshness {
match *self { Fresh => other, Dirty => Dirty } match *self {
Fresh => other,
Dirty => Dirty,
}
} }
} }
@ -77,11 +80,7 @@ impl<K: Hash + Eq + Clone, V> DependencyQueue<K, V> {
/// ///
/// It is assumed that any dependencies of this package will eventually also /// It is assumed that any dependencies of this package will eventually also
/// be added to the dependency queue. /// be added to the dependency queue.
pub fn queue(&mut self, pub fn queue(&mut self, fresh: Freshness, key: K, value: V, dependencies: &[K]) -> &mut V {
fresh: Freshness,
key: K,
value: V,
dependencies: &[K]) -> &mut V {
let slot = match self.dep_map.entry(key.clone()) { let slot = match self.dep_map.entry(key.clone()) {
Occupied(v) => return &mut v.into_mut().1, Occupied(v) => return &mut v.into_mut().1,
Vacant(v) => v, Vacant(v) => v,
@ -94,8 +93,9 @@ impl<K: Hash + Eq + Clone, V> DependencyQueue<K, V> {
let mut my_dependencies = HashSet::new(); let mut my_dependencies = HashSet::new();
for dep in dependencies { for dep in dependencies {
my_dependencies.insert(dep.clone()); my_dependencies.insert(dep.clone());
let rev = self.reverse_dep_map.entry(dep.clone()) let rev = self.reverse_dep_map
.or_insert_with(HashSet::new); .entry(dep.clone())
.or_insert_with(HashSet::new);
rev.insert(key.clone()); rev.insert(key.clone());
} }
&mut slot.insert((my_dependencies, value)).1 &mut slot.insert((my_dependencies, value)).1
@ -122,12 +122,13 @@ impl<K: Hash + Eq + Clone, V> DependencyQueue<K, V> {
results.insert(key.clone(), IN_PROGRESS); results.insert(key.clone(), IN_PROGRESS);
let depth = 1 + map.get(&key) let depth = 1
.into_iter() + map.get(&key)
.flat_map(|it| it) .into_iter()
.map(|dep| depth(dep, map, results)) .flat_map(|it| it)
.max() .map(|dep| depth(dep, map, results))
.unwrap_or(0); .max()
.unwrap_or(0);
*results.get_mut(key).unwrap() = depth; *results.get_mut(key).unwrap() = depth;
@ -150,16 +151,21 @@ impl<K: Hash + Eq + Clone, V> DependencyQueue<K, V> {
// TODO: it'd be best here to throw in a heuristic of crate size as // TODO: it'd be best here to throw in a heuristic of crate size as
// well. For example how long did this crate historically take to // well. For example how long did this crate historically take to
// compile? How large is its source code? etc. // compile? How large is its source code? etc.
let next = self.dep_map.iter() let next = self.dep_map
.iter()
.filter(|&(_, &(ref deps, _))| deps.is_empty()) .filter(|&(_, &(ref deps, _))| deps.is_empty())
.map(|(key, _)| key.clone()) .map(|(key, _)| key.clone())
.max_by_key(|k| self.depth[k]); .max_by_key(|k| self.depth[k]);
let key = match next { let key = match next {
Some(key) => key, Some(key) => key,
None => return None None => return None,
}; };
let (_, data) = self.dep_map.remove(&key).unwrap(); let (_, data) = self.dep_map.remove(&key).unwrap();
let fresh = if self.dirty.contains(&key) {Dirty} else {Fresh}; let fresh = if self.dirty.contains(&key) {
Dirty
} else {
Fresh
};
self.pending.insert(key.clone()); self.pending.insert(key.clone());
Some((fresh, key, data)) Some((fresh, key, data))
} }

View file

@ -1,7 +1,7 @@
#![allow(unknown_lints)] #![allow(unknown_lints)]
use std::fmt; use std::fmt;
use std::process::{Output, ExitStatus}; use std::process::{ExitStatus, Output};
use std::str; use std::str;
use core::{TargetKind, Workspace}; use core::{TargetKind, Workspace};
@ -13,16 +13,19 @@ pub type CargoResult<T> = Result<T, Error>;
pub trait CargoResultExt<T, E> { pub trait CargoResultExt<T, E> {
fn chain_err<F, D>(self, f: F) -> Result<T, Context<D>> fn chain_err<F, D>(self, f: F) -> Result<T, Context<D>>
where F: FnOnce() -> D, where
D: fmt::Display + Send + Sync + 'static; F: FnOnce() -> D,
D: fmt::Display + Send + Sync + 'static;
} }
impl<T, E> CargoResultExt<T, E> for Result<T, E> impl<T, E> CargoResultExt<T, E> for Result<T, E>
where E: Into<Error>, where
E: Into<Error>,
{ {
fn chain_err<F, D>(self, f: F) -> Result<T, Context<D>> fn chain_err<F, D>(self, f: F) -> Result<T, Context<D>>
where F: FnOnce() -> D, where
D: fmt::Display + Send + Sync + 'static, F: FnOnce() -> D,
D: fmt::Display + Send + Sync + 'static,
{ {
self.map_err(|failure| { self.map_err(|failure| {
let context = f(); let context = f();
@ -93,7 +96,11 @@ pub struct CargoTestError {
pub enum Test { pub enum Test {
Multiple, Multiple,
Doc, Doc,
UnitTest{kind: TargetKind, name: String, pkg_name: String} UnitTest {
kind: TargetKind,
name: String,
pkg_name: String,
},
} }
impl CargoTestError { impl CargoTestError {
@ -101,9 +108,11 @@ impl CargoTestError {
if errors.is_empty() { if errors.is_empty() {
panic!("Cannot create CargoTestError from empty Vec") panic!("Cannot create CargoTestError from empty Vec")
} }
let desc = errors.iter().map(|error| error.desc.clone()) let desc = errors
.collect::<Vec<String>>() .iter()
.join("\n"); .map(|error| error.desc.clone())
.collect::<Vec<String>>()
.join("\n");
CargoTestError { CargoTestError {
test, test,
desc, desc,
@ -114,7 +123,11 @@ impl CargoTestError {
pub fn hint(&self, ws: &Workspace) -> String { pub fn hint(&self, ws: &Workspace) -> String {
match self.test { match self.test {
Test::UnitTest{ref kind, ref name, ref pkg_name} => { Test::UnitTest {
ref kind,
ref name,
ref pkg_name,
} => {
let pkg_info = if ws.members().count() > 1 && ws.is_virtual() { let pkg_info = if ws.members().count() > 1 && ws.is_virtual() {
format!("-p {} ", pkg_name) format!("-p {} ", pkg_name)
} else { } else {
@ -122,21 +135,24 @@ impl CargoTestError {
}; };
match *kind { match *kind {
TargetKind::Bench => TargetKind::Bench => {
format!("test failed, to rerun pass '{}--bench {}'", pkg_info, name), format!("test failed, to rerun pass '{}--bench {}'", pkg_info, name)
TargetKind::Bin => }
format!("test failed, to rerun pass '{}--bin {}'", pkg_info, name), TargetKind::Bin => {
TargetKind::Lib(_) => format!("test failed, to rerun pass '{}--bin {}'", pkg_info, name)
format!("test failed, to rerun pass '{}--lib'", pkg_info), }
TargetKind::Test => TargetKind::Lib(_) => format!("test failed, to rerun pass '{}--lib'", pkg_info),
format!("test failed, to rerun pass '{}--test {}'", pkg_info, name), TargetKind::Test => {
TargetKind::ExampleBin | TargetKind::ExampleLib(_) => format!("test failed, to rerun pass '{}--test {}'", pkg_info, name)
format!("test failed, to rerun pass '{}--example {}", pkg_info, name), }
_ => "test failed.".into() TargetKind::ExampleBin | TargetKind::ExampleLib(_) => {
format!("test failed, to rerun pass '{}--example {}", pkg_info, name)
}
_ => "test failed.".into(),
} }
}, }
Test::Doc => "test failed, to rerun pass '--doc'".into(), Test::Doc => "test failed, to rerun pass '--doc'".into(),
_ => "test failed.".into() _ => "test failed.".into(),
} }
} }
} }
@ -150,17 +166,25 @@ pub type CliResult = Result<(), CliError>;
pub struct CliError { pub struct CliError {
pub error: Option<CargoError>, pub error: Option<CargoError>,
pub unknown: bool, pub unknown: bool,
pub exit_code: i32 pub exit_code: i32,
} }
impl CliError { impl CliError {
pub fn new(error: CargoError, code: i32) -> CliError { pub fn new(error: CargoError, code: i32) -> CliError {
let unknown = error.downcast_ref::<Internal>().is_some(); let unknown = error.downcast_ref::<Internal>().is_some();
CliError { error: Some(error), exit_code: code, unknown } CliError {
error: Some(error),
exit_code: code,
unknown,
}
} }
pub fn code(code: i32) -> CliError { pub fn code(code: i32) -> CliError {
CliError { error: None, exit_code: code, unknown: false } CliError {
error: None,
exit_code: code,
unknown: false,
}
} }
} }
@ -177,14 +201,14 @@ impl From<clap::Error> for CliError {
} }
} }
// ============================================================================= // =============================================================================
// Construction helpers // Construction helpers
pub fn process_error(msg: &str, pub fn process_error(
status: Option<&ExitStatus>, msg: &str,
output: Option<&Output>) -> ProcessError status: Option<&ExitStatus>,
{ output: Option<&Output>,
) -> ProcessError {
let exit = match status { let exit = match status {
Some(s) => status_to_string(s), Some(s) => status_to_string(s),
None => "never executed".to_string(), None => "never executed".to_string(),

View file

@ -1,16 +1,16 @@
use std::fs::{self, File, OpenOptions}; use std::fs::{self, File, OpenOptions};
use std::io::{Seek, Read, Write, SeekFrom}; use std::io::{Read, Seek, SeekFrom, Write};
use std::io; use std::io;
use std::path::{Path, PathBuf, Display}; use std::path::{Display, Path, PathBuf};
use termcolor::Color::Cyan; use termcolor::Color::Cyan;
use fs2::{FileExt, lock_contended_error}; use fs2::{lock_contended_error, FileExt};
#[allow(unused_imports)] #[allow(unused_imports)]
use libc; use libc;
use util::Config; use util::Config;
use util::paths; use util::paths;
use util::errors::{CargoResult, CargoResultExt, CargoError}; use util::errors::{CargoError, CargoResult, CargoResultExt};
pub struct FileLock { pub struct FileLock {
f: Option<File>, f: Option<File>,
@ -55,7 +55,7 @@ impl FileLock {
for entry in path.parent().unwrap().read_dir()? { for entry in path.parent().unwrap().read_dir()? {
let entry = entry?; let entry = entry?;
if Some(&entry.file_name()[..]) == path.file_name() { if Some(&entry.file_name()[..]) == path.file_name() {
continue continue;
} }
let kind = entry.file_type()?; let kind = entry.file_type()?;
if kind.is_dir() { if kind.is_dir() {
@ -160,17 +160,17 @@ impl Filesystem {
/// ///
/// The returned file can be accessed to look at the path and also has /// The returned file can be accessed to look at the path and also has
/// read/write access to the underlying file. /// read/write access to the underlying file.
pub fn open_rw<P>(&self, pub fn open_rw<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
path: P, where
config: &Config, P: AsRef<Path>,
msg: &str) -> CargoResult<FileLock>
where P: AsRef<Path>
{ {
self.open(path.as_ref(), self.open(
OpenOptions::new().read(true).write(true).create(true), path.as_ref(),
State::Exclusive, OpenOptions::new().read(true).write(true).create(true),
config, State::Exclusive,
msg) config,
msg,
)
} }
/// Opens shared access to a file, returning the locked version of a file. /// Opens shared access to a file, returning the locked version of a file.
@ -182,55 +182,60 @@ impl Filesystem {
/// The returned file can be accessed to look at the path and also has read /// The returned file can be accessed to look at the path and also has read
/// access to the underlying file. Any writes to the file will return an /// access to the underlying file. Any writes to the file will return an
/// error. /// error.
pub fn open_ro<P>(&self, pub fn open_ro<P>(&self, path: P, config: &Config, msg: &str) -> CargoResult<FileLock>
path: P, where
config: &Config, P: AsRef<Path>,
msg: &str) -> CargoResult<FileLock>
where P: AsRef<Path>
{ {
self.open(path.as_ref(), self.open(
OpenOptions::new().read(true), path.as_ref(),
State::Shared, OpenOptions::new().read(true),
config, State::Shared,
msg) config,
msg,
)
} }
fn open(&self, fn open(
path: &Path, &self,
opts: &OpenOptions, path: &Path,
state: State, opts: &OpenOptions,
config: &Config, state: State,
msg: &str) -> CargoResult<FileLock> { config: &Config,
msg: &str,
) -> CargoResult<FileLock> {
let path = self.root.join(path); let path = self.root.join(path);
// If we want an exclusive lock then if we fail because of NotFound it's // If we want an exclusive lock then if we fail because of NotFound it's
// likely because an intermediate directory didn't exist, so try to // likely because an intermediate directory didn't exist, so try to
// create the directory and then continue. // create the directory and then continue.
let f = opts.open(&path).or_else(|e| { let f = opts.open(&path)
if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive { .or_else(|e| {
create_dir_all(path.parent().unwrap())?; if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive {
opts.open(&path) create_dir_all(path.parent().unwrap())?;
} else { opts.open(&path)
Err(e) } else {
} Err(e)
}).chain_err(|| { }
format!("failed to open: {}", path.display()) })
})?; .chain_err(|| format!("failed to open: {}", path.display()))?;
match state { match state {
State::Exclusive => { State::Exclusive => {
acquire(config, msg, &path, acquire(config, msg, &path, &|| f.try_lock_exclusive(), &|| {
&|| f.try_lock_exclusive(), f.lock_exclusive()
&|| f.lock_exclusive())?; })?;
} }
State::Shared => { State::Shared => {
acquire(config, msg, &path, acquire(config, msg, &path, &|| f.try_lock_shared(), &|| {
&|| f.try_lock_shared(), f.lock_shared()
&|| f.lock_shared())?; })?;
} }
State::Unlocked => {} State::Unlocked => {}
} }
Ok(FileLock { f: Some(f), path, state }) Ok(FileLock {
f: Some(f),
path,
state,
})
} }
} }
@ -261,12 +266,13 @@ impl PartialEq<Filesystem> for Path {
/// ///
/// Returns an error if the lock could not be acquired or if any error other /// Returns an error if the lock could not be acquired or if any error other
/// than a contention error happens. /// than a contention error happens.
fn acquire(config: &Config, fn acquire(
msg: &str, config: &Config,
path: &Path, msg: &str,
try: &Fn() -> io::Result<()>, path: &Path,
block: &Fn() -> io::Result<()>) -> CargoResult<()> { try: &Fn() -> io::Result<()>,
block: &Fn() -> io::Result<()>,
) -> CargoResult<()> {
// File locking on Unix is currently implemented via `flock`, which is known // File locking on Unix is currently implemented via `flock`, which is known
// to be broken on NFS. We could in theory just ignore errors that happen on // to be broken on NFS. We could in theory just ignore errors that happen on
// NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking // NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking
@ -278,7 +284,7 @@ fn acquire(config: &Config,
// //
// [1]: https://github.com/rust-lang/cargo/issues/2615 // [1]: https://github.com/rust-lang/cargo/issues/2615
if is_on_nfs_mount(path) { if is_on_nfs_mount(path) {
return Ok(()) return Ok(());
} }
match try() { match try() {
@ -289,25 +295,29 @@ fn acquire(config: &Config,
// implement file locking. We detect that here via the return value of // implement file locking. We detect that here via the return value of
// locking (e.g. inspecting errno). // locking (e.g. inspecting errno).
#[cfg(unix)] #[cfg(unix)]
Err(ref e) if e.raw_os_error() == Some(libc::ENOTSUP) => return Ok(()), Err(ref e) if e.raw_os_error() == Some(libc::ENOTSUP) =>
{
return Ok(())
}
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) => return Ok(()), Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) =>
{
return Ok(())
}
Err(e) => { Err(e) => {
if e.raw_os_error() != lock_contended_error().raw_os_error() { if e.raw_os_error() != lock_contended_error().raw_os_error() {
let e = CargoError::from(e); let e = CargoError::from(e);
let cx = format!("failed to lock file: {}", path.display()); let cx = format!("failed to lock file: {}", path.display());
return Err(e.context(cx).into()) return Err(e.context(cx).into());
} }
} }
} }
let msg = format!("waiting for file lock on {}", msg); let msg = format!("waiting for file lock on {}", msg);
config.shell().status_with_color("Blocking", &msg, Cyan)?; config.shell().status_with_color("Blocking", &msg, Cyan)?;
block().chain_err(|| { block().chain_err(|| format!("failed to lock file: {}", path.display()))?;
format!("failed to lock file: {}", path.display())
})?;
return Ok(()); return Ok(());
#[cfg(all(target_os = "linux", not(target_env = "musl")))] #[cfg(all(target_os = "linux", not(target_env = "musl")))]
@ -341,7 +351,7 @@ fn create_dir_all(path: &Path) -> io::Result<()> {
Err(e) => { Err(e) => {
if e.kind() == io::ErrorKind::NotFound { if e.kind() == io::ErrorKind::NotFound {
if let Some(p) = path.parent() { if let Some(p) = path.parent() {
return create_dir_all(p).and_then(|()| create_dir(path)) return create_dir_all(p).and_then(|()| create_dir(path));
} }
} }
Err(e) Err(e)

View file

@ -4,12 +4,12 @@ use std::collections::hash_set::{HashSet, Iter};
use std::collections::hash_map::{HashMap, Keys}; use std::collections::hash_map::{HashMap, Keys};
pub struct Graph<N> { pub struct Graph<N> {
nodes: HashMap<N, HashSet<N>> nodes: HashMap<N, HashSet<N>>,
} }
enum Mark { enum Mark {
InProgress, InProgress,
Done Done,
} }
pub type Nodes<'a, N> = Keys<'a, N, HashSet<N>>; pub type Nodes<'a, N> = Keys<'a, N, HashSet<N>>;
@ -17,17 +17,23 @@ pub type Edges<'a, N> = Iter<'a, N>;
impl<N: Eq + Hash + Clone> Graph<N> { impl<N: Eq + Hash + Clone> Graph<N> {
pub fn new() -> Graph<N> { pub fn new() -> Graph<N> {
Graph { nodes: HashMap::new() } Graph {
nodes: HashMap::new(),
}
} }
pub fn add(&mut self, node: N, children: &[N]) { pub fn add(&mut self, node: N, children: &[N]) {
self.nodes.entry(node) self.nodes
.entry(node)
.or_insert_with(HashSet::new) .or_insert_with(HashSet::new)
.extend(children.iter().cloned()); .extend(children.iter().cloned());
} }
pub fn link(&mut self, node: N, child: N) { pub fn link(&mut self, node: N, child: N) {
self.nodes.entry(node).or_insert_with(HashSet::new).insert(child); self.nodes
.entry(node)
.or_insert_with(HashSet::new)
.insert(child);
} }
pub fn get_nodes(&self) -> &HashMap<N, HashSet<N>> { pub fn get_nodes(&self) -> &HashMap<N, HashSet<N>> {
@ -118,12 +124,16 @@ impl<N: fmt::Display + Eq + Hash> fmt::Debug for Graph<N> {
} }
impl<N: Eq + Hash> PartialEq for Graph<N> { impl<N: Eq + Hash> PartialEq for Graph<N> {
fn eq(&self, other: &Graph<N>) -> bool { self.nodes.eq(&other.nodes) } fn eq(&self, other: &Graph<N>) -> bool {
self.nodes.eq(&other.nodes)
}
} }
impl<N: Eq + Hash> Eq for Graph<N> {} impl<N: Eq + Hash> Eq for Graph<N> {}
impl<N: Eq + Hash + Clone> Clone for Graph<N> { impl<N: Eq + Hash + Clone> Clone for Graph<N> {
fn clone(&self) -> Graph<N> { fn clone(&self) -> Graph<N> {
Graph { nodes: self.nodes.clone() } Graph {
nodes: self.nodes.clone(),
}
} }
} }

View file

@ -1,12 +1,12 @@
#![allow(deprecated)] #![allow(deprecated)]
use hex; use hex;
use std::hash::{Hasher, Hash, SipHasher}; use std::hash::{Hash, Hasher, SipHasher};
pub fn to_hex(num: u64) -> String { pub fn to_hex(num: u64) -> String {
hex::encode(&[ hex::encode(&[
(num >> 0) as u8, (num >> 0) as u8,
(num >> 8) as u8, (num >> 8) as u8,
(num >> 16) as u8, (num >> 16) as u8,
(num >> 24) as u8, (num >> 24) as u8,
(num >> 32) as u8, (num >> 32) as u8,

View file

@ -21,7 +21,7 @@ pub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult<PathBuf> {
loop { loop {
let manifest = current.join(file); let manifest = current.join(file);
if fs::metadata(&manifest).is_ok() { if fs::metadata(&manifest).is_ok() {
return Ok(manifest) return Ok(manifest);
} }
match current.parent() { match current.parent() {
@ -30,13 +30,15 @@ pub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult<PathBuf> {
} }
} }
bail!("could not find `{}` in `{}` or any parent directory", bail!(
file, pwd.display()) "could not find `{}` in `{}` or any parent directory",
file,
pwd.display()
)
} }
/// Find the root Cargo.toml /// Find the root Cargo.toml
pub fn find_root_manifest_for_wd(manifest_path: Option<&str>, cwd: &Path) pub fn find_root_manifest_for_wd(manifest_path: Option<&str>, cwd: &Path) -> CargoResult<PathBuf> {
-> CargoResult<PathBuf> {
match manifest_path { match manifest_path {
Some(path) => { Some(path) => {
let absolute_path = paths::normalize_path(&cwd.join(&path)); let absolute_path = paths::normalize_path(&cwd.join(&path));
@ -47,7 +49,7 @@ pub fn find_root_manifest_for_wd(manifest_path: Option<&str>, cwd: &Path)
bail!("manifest path `{}` does not exist", path) bail!("manifest path `{}` does not exist", path)
} }
Ok(absolute_path) Ok(absolute_path)
}, }
None => find_project_manifest(cwd, "Cargo.toml"), None => find_project_manifest(cwd, "Cargo.toml"),
} }
} }

View file

@ -86,7 +86,7 @@ mod imp {
let job = CreateJobObjectW(0 as *mut _, 0 as *const _); let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
if job.is_null() { if job.is_null() {
return None return None;
} }
let job = Handle { inner: job }; let job = Handle { inner: job };
@ -96,14 +96,15 @@ mod imp {
// our children will reside in the job once we spawn a process. // our children will reside in the job once we spawn a process.
let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION; let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
info = mem::zeroed(); info = mem::zeroed();
info.BasicLimitInformation.LimitFlags = info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; let r = SetInformationJobObject(
let r = SetInformationJobObject(job.inner, job.inner,
JobObjectExtendedLimitInformation, JobObjectExtendedLimitInformation,
&mut info as *mut _ as LPVOID, &mut info as *mut _ as LPVOID,
mem::size_of_val(&info) as DWORD); mem::size_of_val(&info) as DWORD,
);
if r == 0 { if r == 0 {
return None return None;
} }
// Assign our process to this job object, meaning that our children will // Assign our process to this job object, meaning that our children will
@ -111,7 +112,7 @@ mod imp {
let me = GetCurrentProcess(); let me = GetCurrentProcess();
let r = AssignProcessToJobObject(job.inner, me); let r = AssignProcessToJobObject(job.inner, me);
if r == 0 { if r == 0 {
return None return None;
} }
Some(Setup { job }) Some(Setup { job })
@ -140,13 +141,13 @@ mod imp {
let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION; let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
info = mem::zeroed(); info = mem::zeroed();
let r = SetInformationJobObject( let r = SetInformationJobObject(
self.job.inner, self.job.inner,
JobObjectExtendedLimitInformation, JobObjectExtendedLimitInformation,
&mut info as *mut _ as LPVOID, &mut info as *mut _ as LPVOID,
mem::size_of_val(&info) as DWORD); mem::size_of_val(&info) as DWORD,
);
if r == 0 { if r == 0 {
info!("failed to configure job object to defaults: {}", info!("failed to configure job object to defaults: {}", last_err());
last_err());
} }
} }
} }
@ -162,14 +163,15 @@ mod imp {
let mut jobs: Jobs = mem::zeroed(); let mut jobs: Jobs = mem::zeroed();
let r = QueryInformationJobObject( let r = QueryInformationJobObject(
self.job.inner, self.job.inner,
JobObjectBasicProcessIdList, JobObjectBasicProcessIdList,
&mut jobs as *mut _ as LPVOID, &mut jobs as *mut _ as LPVOID,
mem::size_of_val(&jobs) as DWORD, mem::size_of_val(&jobs) as DWORD,
0 as *mut _); 0 as *mut _,
);
if r == 0 { if r == 0 {
info!("failed to query job object: {}", last_err()); info!("failed to query job object: {}", last_err());
return false return false;
} }
let mut killed = false; let mut killed = false;
@ -177,46 +179,44 @@ mod imp {
assert!(list.len() > 0); assert!(list.len() > 0);
info!("found {} remaining processes", list.len() - 1); info!("found {} remaining processes", list.len() - 1);
let list = list.iter().filter(|&&id| { let list = list.iter()
// let's not kill ourselves .filter(|&&id| {
id as DWORD != GetCurrentProcessId() // let's not kill ourselves
}).filter_map(|&id| { id as DWORD != GetCurrentProcessId()
// Open the process with the necessary rights, and if this })
// fails then we probably raced with the process exiting so we .filter_map(|&id| {
// ignore the problem. // Open the process with the necessary rights, and if this
let flags = PROCESS_QUERY_INFORMATION | // fails then we probably raced with the process exiting so we
PROCESS_TERMINATE | // ignore the problem.
SYNCHRONIZE; let flags = PROCESS_QUERY_INFORMATION | PROCESS_TERMINATE | SYNCHRONIZE;
let p = OpenProcess(flags, FALSE, id as DWORD); let p = OpenProcess(flags, FALSE, id as DWORD);
if p.is_null() { if p.is_null() {
None None
} else { } else {
Some(Handle { inner: p }) Some(Handle { inner: p })
} }
}).filter(|p| { })
// Test if this process was actually in the job object or not. .filter(|p| {
// If it's not then we likely raced with something else // Test if this process was actually in the job object or not.
// recycling this PID, so we just skip this step. // If it's not then we likely raced with something else
let mut res = 0; // recycling this PID, so we just skip this step.
let r = IsProcessInJob(p.inner, self.job.inner, &mut res); let mut res = 0;
if r == 0 { let r = IsProcessInJob(p.inner, self.job.inner, &mut res);
info!("failed to test is process in job: {}", last_err()); if r == 0 {
return false info!("failed to test is process in job: {}", last_err());
} return false;
res == TRUE }
}); res == TRUE
});
for p in list { for p in list {
// Load the file which this process was spawned from. We then // Load the file which this process was spawned from. We then
// later use this for identification purposes. // later use this for identification purposes.
let mut buf = [0; 1024]; let mut buf = [0; 1024];
let r = GetProcessImageFileNameW(p.inner, let r = GetProcessImageFileNameW(p.inner, buf.as_mut_ptr(), buf.len() as DWORD);
buf.as_mut_ptr(),
buf.len() as DWORD);
if r == 0 { if r == 0 {
info!("failed to get image name: {}", last_err()); info!("failed to get image name: {}", last_err());
continue continue;
} }
let s = OsString::from_wide(&buf[..r as usize]); let s = OsString::from_wide(&buf[..r as usize]);
info!("found remaining: {:?}", s); info!("found remaining: {:?}", s);
@ -235,7 +235,7 @@ mod imp {
if let Some(s) = s.to_str() { if let Some(s) = s.to_str() {
if s.contains("mspdbsrv") { if s.contains("mspdbsrv") {
info!("\toops, this is mspdbsrv"); info!("\toops, this is mspdbsrv");
continue continue;
} }
} }
@ -252,7 +252,7 @@ mod imp {
let r = WaitForSingleObject(p.inner, INFINITE); let r = WaitForSingleObject(p.inner, INFINITE);
if r != 0 { if r != 0 {
info!("failed to wait for process to die: {}", last_err()); info!("failed to wait for process to die: {}", last_err());
return false return false;
} }
killed = true; killed = true;
} }
@ -263,7 +263,9 @@ mod imp {
impl Drop for Handle { impl Drop for Handle {
fn drop(&mut self) { fn drop(&mut self) {
unsafe { CloseHandle(self.inner); } unsafe {
CloseHandle(self.inner);
}
} }
} }
} }

View file

@ -1,19 +1,21 @@
use std::cmp; use std::cmp;
pub fn lev_distance(me: &str, t: &str) -> usize { pub fn lev_distance(me: &str, t: &str) -> usize {
if me.is_empty() { return t.chars().count(); } if me.is_empty() {
if t.is_empty() { return me.chars().count(); } return t.chars().count();
}
if t.is_empty() {
return me.chars().count();
}
let mut dcol = (0..t.len() + 1).collect::<Vec<_>>(); let mut dcol = (0..t.len() + 1).collect::<Vec<_>>();
let mut t_last = 0; let mut t_last = 0;
for (i, sc) in me.chars().enumerate() { for (i, sc) in me.chars().enumerate() {
let mut current = i; let mut current = i;
dcol[0] = current + 1; dcol[0] = current + 1;
for (j, tc) in t.chars().enumerate() { for (j, tc) in t.chars().enumerate() {
let next = dcol[j + 1]; let next = dcol[j + 1];
if sc == tc { if sc == tc {
@ -33,11 +35,12 @@ pub fn lev_distance(me: &str, t: &str) -> usize {
#[test] #[test]
fn test_lev_distance() { fn test_lev_distance() {
use std::char::{ from_u32, MAX }; use std::char::{from_u32, MAX};
// Test bytelength agnosticity // Test bytelength agnosticity
for c in (0u32..MAX as u32) for c in (0u32..MAX as u32)
.filter_map(|i| from_u32(i)) .filter_map(|i| from_u32(i))
.map(|i| i.to_string()) { .map(|i| i.to_string())
{
assert_eq!(lev_distance(&c, &c), 0); assert_eq!(lev_distance(&c, &c), 0);
} }

View file

@ -1,7 +1,7 @@
use serde::ser; use serde::ser;
use serde_json::{self, Value}; use serde_json::{self, Value};
use core::{PackageId, Target, Profile}; use core::{PackageId, Profile, Target};
pub trait Message: ser::Serialize { pub trait Message: ser::Serialize {
fn reason(&self) -> &str; fn reason(&self) -> &str;

View file

@ -1,21 +1,21 @@
pub use self::cfg::{Cfg, CfgExpr}; pub use self::cfg::{Cfg, CfgExpr};
pub use self::config::{Config, ConfigValue, homedir}; pub use self::config::{homedir, Config, ConfigValue};
pub use self::dependency_queue::{DependencyQueue, Fresh, Dirty, Freshness}; pub use self::dependency_queue::{DependencyQueue, Dirty, Fresh, Freshness};
pub use self::errors::{CargoResult, CargoResultExt, CargoError, Test, CliResult}; pub use self::errors::{CargoError, CargoResult, CargoResultExt, CliResult, Test};
pub use self::errors::{CliError, ProcessError, CargoTestError}; pub use self::errors::{CargoTestError, CliError, ProcessError};
pub use self::errors::{process_error, internal}; pub use self::errors::{internal, process_error};
pub use self::flock::{FileLock, Filesystem}; pub use self::flock::{FileLock, Filesystem};
pub use self::graph::Graph; pub use self::graph::Graph;
pub use self::hex::{to_hex, short_hash, hash_u64}; pub use self::hex::{short_hash, to_hex, hash_u64};
pub use self::lev_distance::{lev_distance}; pub use self::lev_distance::lev_distance;
pub use self::paths::{join_paths, path2bytes, bytes2path, dylib_path}; pub use self::paths::{dylib_path, join_paths, bytes2path, path2bytes};
pub use self::paths::{normalize_path, dylib_path_envvar, without_prefix}; pub use self::paths::{dylib_path_envvar, normalize_path, without_prefix};
pub use self::process_builder::{process, ProcessBuilder}; pub use self::process_builder::{process, ProcessBuilder};
pub use self::rustc::Rustc; pub use self::rustc::Rustc;
pub use self::sha256::Sha256; pub use self::sha256::Sha256;
pub use self::to_semver::ToSemver; pub use self::to_semver::ToSemver;
pub use self::to_url::ToUrl; pub use self::to_url::ToUrl;
pub use self::vcs::{GitRepo, HgRepo, PijulRepo, FossilRepo}; pub use self::vcs::{FossilRepo, GitRepo, HgRepo, PijulRepo};
pub use self::read2::read2; pub use self::read2::read2;
pub use self::progress::Progress; pub use self::progress::Progress;

View file

@ -10,23 +10,21 @@ fn maybe_spurious(err: &Error) -> bool {
for e in err.causes() { for e in err.causes() {
if let Some(git_err) = e.downcast_ref::<git2::Error>() { if let Some(git_err) = e.downcast_ref::<git2::Error>() {
match git_err.class() { match git_err.class() {
git2::ErrorClass::Net | git2::ErrorClass::Net | git2::ErrorClass::Os => return true,
git2::ErrorClass::Os => return true, _ => (),
_ => ()
} }
} }
if let Some(curl_err) = e.downcast_ref::<curl::Error>() { if let Some(curl_err) = e.downcast_ref::<curl::Error>() {
if curl_err.is_couldnt_connect() || if curl_err.is_couldnt_connect() || curl_err.is_couldnt_resolve_proxy()
curl_err.is_couldnt_resolve_proxy() || || curl_err.is_couldnt_resolve_host()
curl_err.is_couldnt_resolve_host() || || curl_err.is_operation_timedout() || curl_err.is_recv_error()
curl_err.is_operation_timedout() || {
curl_err.is_recv_error() { return true;
return true
} }
} }
if let Some(not_200) = e.downcast_ref::<HttpNot200>() { if let Some(not_200) = e.downcast_ref::<HttpNot200>() {
if 500 <= not_200.code && not_200.code < 600 { if 500 <= not_200.code && not_200.code < 600 {
return true return true;
} }
} }
} }
@ -47,15 +45,19 @@ fn maybe_spurious(err: &Error) -> bool {
/// cargo_result = network::with_retry(&config, || something.download()); /// cargo_result = network::with_retry(&config, || something.download());
/// ``` /// ```
pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T> pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>
where F: FnMut() -> CargoResult<T> where
F: FnMut() -> CargoResult<T>,
{ {
let mut remaining = config.net_retry()?; let mut remaining = config.net_retry()?;
loop { loop {
match callback() { match callback() {
Ok(ret) => return Ok(ret), Ok(ret) => return Ok(ret),
Err(ref e) if maybe_spurious(e) && remaining > 0 => { Err(ref e) if maybe_spurious(e) && remaining > 0 => {
let msg = format!("spurious network error ({} tries \ let msg = format!(
remaining): {}", remaining, e); "spurious network error ({} tries \
remaining): {}",
remaining, e
);
config.shell().warn(msg)?; config.shell().warn(msg)?;
remaining -= 1; remaining -= 1;
} }
@ -67,8 +69,14 @@ pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>
#[test] #[test]
fn with_retry_repeats_the_call_then_works() { fn with_retry_repeats_the_call_then_works() {
//Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry
let error1 = HttpNot200 { code: 501, url: "Uri".to_string() }.into(); let error1 = HttpNot200 {
let error2 = HttpNot200 { code: 502, url: "Uri".to_string() }.into(); code: 501,
url: "Uri".to_string(),
}.into();
let error2 = HttpNot200 {
code: 502,
url: "Uri".to_string(),
}.into();
let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)]; let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
let config = Config::default().unwrap(); let config = Config::default().unwrap();
let result = with_retry(&config, || results.pop().unwrap()); let result = with_retry(&config, || results.pop().unwrap());
@ -81,9 +89,15 @@ fn with_retry_finds_nested_spurious_errors() {
//Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry
//String error messages are not considered spurious //String error messages are not considered spurious
let error1 = CargoError::from(HttpNot200 { code: 501, url: "Uri".to_string() }); let error1 = CargoError::from(HttpNot200 {
code: 501,
url: "Uri".to_string(),
});
let error1 = CargoError::from(error1.context("A non-spurious wrapping err")); let error1 = CargoError::from(error1.context("A non-spurious wrapping err"));
let error2 = CargoError::from(HttpNot200 { code: 502, url: "Uri".to_string() }); let error2 = CargoError::from(HttpNot200 {
code: 502,
url: "Uri".to_string(),
});
let error2 = CargoError::from(error2.context("A second chained error")); let error2 = CargoError::from(error2.context("A second chained error"));
let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)]; let mut results: Vec<CargoResult<()>> = vec![Ok(()), Err(error1), Err(error2)];
let config = Config::default().unwrap(); let config = Config::default().unwrap();

Some files were not shown because too many files have changed in this diff Show more