Merge remote-tracking branch 'origin/master' into proptest

This commit is contained in:
Eh2406 2018-09-19 22:04:11 -04:00
commit 40d9de46d6
124 changed files with 3824 additions and 2188 deletions

44
.github/stale.yml vendored Normal file
View file

@ -0,0 +1,44 @@
# Default values: https://probot.github.io/apps/stale/#installation
daysUntilStale: 180
exemptLabels:
- C-tracking-issue # keep tracking issues open
- C-feature-request # keep feature requests open (at least for now)
- "Feature accepted" # keep accepted features
staleLabel: stale
markComment: >
As there hasn't been any activity here in over 6 months I've marked this as
stale and if no further activity happens for 7 days I will close it.
[I'm a bot](https://github.com/probot/stale) so this may be in error! If this
issue should remain open, could someone (the author, a team member, or any
interested party) please comment to that effect?
The team would be especially grateful if such a comment included details such
as:
* Is this still relevant?
* If so, what is blocking it?
* Is it known what could be done to help move this forward?
Thank you for contributing!
(The cargo team is currently evaluating the use of Stale bot, and using #6035
as the tracking issue to gather feedback.)
If you're reading this comment from the distant future, fear not if this
was closed automatically. If you believe it's still an issue please leave a
comment and a team member can reopen this issue. Opening a new issue is also
acceptable!
closeComment: >
As I didn't see any updates I'm going to close this due to being stale.
Please see the previous comment for more information!
limitPerRun: 1 # 1 per hour, so 24 per day

View file

@ -18,13 +18,16 @@ matrix:
include:
- env: TARGET=x86_64-unknown-linux-gnu
ALT=i686-unknown-linux-gnu
if: branch != master OR type = pull_request
- env: TARGET=x86_64-apple-darwin
ALT=i686-apple-darwin
os: osx
if: branch != master OR type = pull_request
- env: TARGET=x86_64-unknown-linux-gnu
ALT=i686-unknown-linux-gnu
rust: beta
if: branch != master OR type = pull_request
# Minimum Rust supported channel. We enable these to make sure we
# continue to work on the advertised minimum Rust version.
@ -38,6 +41,7 @@ matrix:
- cargo +nightly generate-lockfile -Z minimal-versions
- cargo -V
- cargo test
if: branch != master OR type = pull_request
- env: TARGET=x86_64-unknown-linux-gnu
ALT=i686-unknown-linux-gnu
@ -48,6 +52,7 @@ matrix:
- cargo test
- cargo doc --no-deps
- (cd src/doc && mdbook build --dest-dir ../../target/doc)
if: branch != master OR type = pull_request
exclude:
- rust: stable

View file

@ -4,17 +4,27 @@ This document gives a high level overview of Cargo internals. You may
find it useful if you want to contribute to Cargo or if you are
interested in the inner workings of Cargo.
The purpose of Cargo is to formalize a canonical Rust workflow, by automating
the standard tasks associated with distributing software. Cargo simplifies
structuring a new project, adding dependencies, writing and running unit tests,
and more.
## Subcommands
Cargo is organized as a set of `clap` subcommands. All subcommands live in
Cargo is a single binary composed of a set of [`clap`][] subcommands. All subcommands live in
`src/bin/cargo/commands` directory. `src/bin/cargo/main.rs` is the entry point.
A typical subcommand, such as `src/bin/cargo/commands/build.rs`, parses command line
options, reads the configuration files, discovers the Cargo project in
the current directory and delegates the actual implementation to one
Each subcommand, such as `src/bin/cargo/commands/build.rs`, has its own API
interface, similarly to Git's, parsing command line options, reading the
configuration files, discovering the Cargo project in the current directory and
delegating the actual implementation to one
of the functions in `src/cargo/ops/mod.rs`. This short file is a good
place to find out about most of the things that Cargo can do.
Subcommands are designed to pipe to one another, and custom subcommands make
Cargo easy to extend and attach tools to.
[`clap`]: https://clap.rs/
## Important Data Structures

View file

@ -1,6 +1,6 @@
[package]
name = "cargo"
version = "0.30.0"
version = "0.32.0"
authors = ["Yehuda Katz <wycats@gmail.com>",
"Carl Lerche <me@carllerche.com>",
"Alex Crichton <alex@alexcrichton.com>"]
@ -18,11 +18,12 @@ path = "src/cargo/lib.rs"
[dependencies]
atty = "0.2"
crates-io = { path = "src/crates-io", version = "0.18" }
bytesize = "1.0"
crates-io = { path = "src/crates-io", version = "0.20" }
crossbeam-utils = "0.5"
crypto-hash = "0.3.1"
curl = "0.4.13"
env_logger = "0.5.4"
curl = { version = "0.4.17", features = ['http2'] }
env_logger = "0.5.11"
failure = "0.1.2"
filetime = "0.2"
flate2 = "1.0"

View file

@ -10,6 +10,7 @@ environment:
CFG_DISABLE_CROSS_TESTS: 1
install:
- if NOT defined APPVEYOR_PULL_REQUEST_NUMBER if "%APPVEYOR_REPO_BRANCH%" == "master" appveyor exit
- appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe
- rustup-init.exe -y --default-host x86_64-pc-windows-msvc --default-toolchain nightly
- set PATH=%PATH%;C:\Users\appveyor\.cargo\bin

View file

@ -34,7 +34,6 @@ Available unstable (nightly-only) flags:
-Z offline -- Offline mode that does not perform network requests
-Z unstable-options -- Allow the usage of unstable options such as --registry
-Z config-profile -- Read profiles from .cargo/config files
-Z compile-progress -- Display a progress bar while compiling
Run with 'cargo -Z [FLAG] [SUBCOMMAND]'"
);

View file

@ -147,8 +147,14 @@ pub trait AppExt: Sized {
a global configuration.",
).value_name("VCS")
.possible_values(&["git", "hg", "pijul", "fossil", "none"]),
)._arg(opt("bin", "Use a binary (application) template [default]"))
)
._arg(opt("bin", "Use a binary (application) template [default]"))
._arg(opt("lib", "Use a library template"))
._arg(
opt("edition", "Edition to set for the crate generated")
.possible_values(&["2015", "2018"])
.value_name("YEAR")
)
._arg(
opt(
"name",
@ -309,6 +315,7 @@ pub trait ArgMatchesExt {
),
target_rustdoc_args: None,
target_rustc_args: None,
local_rustdoc_args: None,
export_dir: None,
};
Ok(opts)
@ -339,6 +346,7 @@ pub trait ArgMatchesExt {
self._is_present("lib"),
self.value_of_path("path", config).unwrap(),
self._value_of("name").map(|s| s.to_string()),
self._value_of("edition").map(|s| s.to_string()),
)
}

View file

@ -26,7 +26,7 @@ pub fn cli() -> App {
"Benchmark all tests",
"Benchmark only the specified bench target",
"Benchmark all benches",
"Benchmark all targets (default)",
"Benchmark all targets",
)
.arg(opt("no-run", "Compile, but don't run benchmarks"))
.arg_package_spec(
@ -78,7 +78,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ops = TestOptions {
no_run: args.is_present("no-run"),
no_fail_fast: args.is_present("no-fail-fast"),
only_doc: false,
compile_opts,
};

View file

@ -22,7 +22,7 @@ pub fn cli() -> App {
"Build all tests",
"Build only the specified bench target",
"Build all benches",
"Build all targets (lib and bin targets by default)",
"Build all targets",
)
.arg_release("Build artifacts in release mode, with optimizations")
.arg_features()

View file

@ -21,7 +21,7 @@ pub fn cli() -> App {
"Check all tests",
"Check only the specified bench target",
"Check all benches",
"Check all targets (lib and bin targets by default)",
"Check all targets",
)
.arg_release("Check artifacts in release mode, with optimizations")
.arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))

View file

@ -51,7 +51,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
deps: !args.is_present("no-deps"),
};
let mut compile_opts = args.compile_options(config, mode)?;
compile_opts.target_rustdoc_args = if args.is_present("document-private-items") {
compile_opts.local_rustdoc_args = if args.is_present("document-private-items") {
Some(vec!["--document-private-items".to_string()])
} else {
None

View file

@ -21,7 +21,7 @@ pub fn cli() -> App {
"Fix all tests",
"Fix only the specified bench target",
"Fix all benches",
"Fix all targets (lib and bin targets by default)",
"Fix all targets (default)",
)
.arg_release("Fix artifacts in release mode, with optimizations")
.arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))
@ -73,7 +73,7 @@ pub fn cli() -> App {
)
.after_help(
"\
This Cargo subcommmand will automatically take rustc's suggestions from
This Cargo subcommand will automatically take rustc's suggestions from
diagnostics like warnings and apply them to your source code. This is intended
to help automate tasks that rustc itself already knows how to tell you to fix!
The `cargo fix` subcommand is also being developed for the Rust 2018 edition

View file

@ -75,6 +75,11 @@ continuous integration systems.",
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let mut compile_opts = args.compile_options(config, CompileMode::Build)?;
// for `cargo-install` we want to use what the user specified via `--target` and ignore what's
// in `.cargo/config` and what the environment says
compile_opts.build_config.requested_target = args.target();
compile_opts.build_config.release = !args.is_present("debug");
let krates = args.values_of("crate")

View file

@ -19,7 +19,7 @@ pub fn cli() -> App {
"Build all tests",
"Build only the specified bench target",
"Build all benches",
"Build all targets (lib and bin targets by default)",
"Build all targets",
)
.arg_release("Build artifacts in release mode, with optimizations")
.arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE"))

View file

@ -23,7 +23,7 @@ pub fn cli() -> App {
"Build all tests",
"Build only the specified bench target",
"Build all benches",
"Build all targets (default)",
"Build all targets",
)
.arg_release("Build artifacts in release mode, with optimizations")
.arg_features()

View file

@ -1,6 +1,6 @@
use command_prelude::*;
use cargo::ops;
use cargo::ops::{self, CompileFilter};
pub fn cli() -> App {
subcommand("test")
@ -27,7 +27,7 @@ pub fn cli() -> App {
"Test all tests",
"Test only the specified bench target",
"Test all benches",
"Test all targets (default)",
"Test all targets",
)
.arg(opt("doc", "Test only this library's documentation"))
.arg(opt("no-run", "Compile, but don't run tests"))
@ -92,8 +92,12 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ws = args.workspace(config)?;
let mut compile_opts = args.compile_options(config, CompileMode::Test)?;
let doc = args.is_present("doc");
if doc {
if let CompileFilter::Only { .. } = compile_opts.filter {
return Err(CliError::new(format_err!("Can't mix --doc with other target selecting options"), 101))
}
compile_opts.build_config.mode = CompileMode::Doctest;
compile_opts.filter = ops::CompileFilter::new(
true,
@ -112,7 +116,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
let ops = ops::TestOptions {
no_run: args.is_present("no-run"),
no_fail_fast: args.is_present("no-fail-fast"),
only_doc: doc,
compile_opts,
};

View file

@ -16,6 +16,8 @@ pub struct BuildConfig {
pub mode: CompileMode,
/// Whether to print std output in json format (for machine reading)
pub message_format: MessageFormat,
/// Force cargo to do a full rebuild and treat each target as changed.
pub force_rebuild: bool,
/// Output a build plan to stdout instead of actually compiling.
pub build_plan: bool,
/// Use Cargo itself as the wrapper around rustc, only used for `cargo fix`
@ -79,6 +81,7 @@ impl BuildConfig {
release: false,
mode,
message_format: MessageFormat::Human,
force_rebuild: false,
build_plan: false,
cargo_as_rustc_wrapper: false,
extra_rustc_env: Vec::new(),

View file

@ -1,11 +1,11 @@
use std::collections::HashMap;
use std::env;
use std::path::{Path, PathBuf};
use std::str::{self, FromStr};
use std::str;
use core::profiles::Profiles;
use core::{Dependency, Workspace};
use core::{Package, PackageId, PackageSet, Resolve};
use core::{PackageId, PackageSet, Resolve};
use util::errors::CargoResult;
use util::{profile, Cfg, CfgExpr, Config, Rustc};
@ -24,12 +24,8 @@ pub struct BuildContext<'a, 'cfg: 'a> {
pub resolve: &'a Resolve,
pub profiles: &'a Profiles,
pub build_config: &'a BuildConfig,
/// This is a workaround to carry the extra compiler args for either
/// `rustc` or `rustdoc` given on the command-line for the commands `cargo
/// rustc` and `cargo rustdoc`. These commands only support one target,
/// but we don't want the args passed to any dependencies, so we include
/// the `Unit` corresponding to the top-level target.
pub extra_compiler_args: Option<(Unit<'a>, Vec<String>)>,
/// Extra compiler args for either `rustc` or `rustdoc`.
pub extra_compiler_args: HashMap<Unit<'a>, Vec<String>>,
pub packages: &'a PackageSet<'cfg>,
/// Information about the compiler
@ -51,7 +47,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
config: &'cfg Config,
build_config: &'a BuildConfig,
profiles: &'a Profiles,
extra_compiler_args: Option<(Unit<'a>, Vec<String>)>,
extra_compiler_args: HashMap<Unit<'a>, Vec<String>>,
) -> CargoResult<BuildContext<'a, 'cfg>> {
let incremental_env = match env::var("CARGO_INCREMENTAL") {
Ok(v) => Some(v == "1"),
@ -111,11 +107,6 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
platform.matches(name, info.cfg())
}
/// Gets a package for the given package id.
pub fn get_package(&self, id: &PackageId) -> CargoResult<&'a Package> {
self.packages.get(id)
}
/// Get the user-specified linker for a particular host or target
pub fn linker(&self, kind: Kind) -> Option<&Path> {
self.target_config(kind).linker.as_ref().map(|s| s.as_ref())
@ -200,24 +191,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> {
}
pub fn extra_args_for(&self, unit: &Unit<'a>) -> Option<&Vec<String>> {
if let Some((ref args_unit, ref args)) = self.extra_compiler_args {
if args_unit == unit {
return Some(args);
}
}
None
}
/// Return the list of filenames read by cargo to generate the BuildContext
/// (all Cargo.toml, etc).
pub fn inputs(&self) -> CargoResult<Vec<PathBuf>> {
let mut inputs = Vec::new();
for id in self.packages.package_ids() {
let pkg = self.get_package(id)?;
inputs.push(pkg.manifest_path().to_path_buf());
}
inputs.sort();
Ok(inputs)
self.extra_compiler_args.get(unit)
}
}
@ -393,16 +367,9 @@ fn env_args(
// ...including target.'cfg(...)'.rustflags
if let Some(target_cfg) = target_cfg {
if let Some(table) = config.get_table("target")? {
let cfgs = table.val.keys().filter_map(|t| {
if t.starts_with("cfg(") && t.ends_with(')') {
let cfg = &t[4..t.len() - 1];
CfgExpr::from_str(cfg).ok().and_then(|c| {
if c.matches(target_cfg) {
Some(t)
} else {
None
}
})
let cfgs = table.val.keys().filter_map(|key| {
if CfgExpr::matches_key(key, target_cfg) {
Some(key)
} else {
None
}

View file

@ -4,10 +4,9 @@ use std::ffi::OsStr;
use std::path::PathBuf;
use semver::Version;
use lazycell::LazyCell;
use core::{Feature, Package, PackageId, Target, TargetKind};
use util::{self, join_paths, process, CargoResult, Config, ProcessBuilder};
use core::{Edition, Package, PackageId, Target, TargetKind};
use util::{self, join_paths, process, CargoResult, CfgExpr, Config, ProcessBuilder};
use super::BuildContext;
pub struct Doctest {
@ -77,23 +76,31 @@ pub struct Compilation<'cfg> {
config: &'cfg Config,
rustc_process: ProcessBuilder,
target_runner: LazyCell<Option<(PathBuf, Vec<String>)>>,
target_runner: Option<(PathBuf, Vec<String>)>,
}
impl<'cfg> Compilation<'cfg> {
pub fn new<'a>(bcx: &BuildContext<'a, 'cfg>) -> CargoResult<Compilation<'cfg>> {
let mut rustc = bcx.rustc.process();
// If we're using cargo as a rustc wrapper then we're in a situation
// like `cargo fix`. For now just disregard the `RUSTC_WRAPPER` env var
// (which is typically set to `sccache` for now). Eventually we'll
// probably want to implement `RUSTC_WRAPPER` for `cargo fix`, but we'll
// leave that open as a bug for now.
let mut rustc = if bcx.build_config.cargo_as_rustc_wrapper {
let mut rustc = bcx.rustc.process_no_wrapper();
let prog = rustc.get_program().to_owned();
rustc.env("RUSTC", prog);
rustc.program(env::current_exe()?);
rustc
} else {
bcx.rustc.process()
};
for (k, v) in bcx.build_config.extra_rustc_env.iter() {
rustc.env(k, v);
}
for arg in bcx.build_config.extra_rustc_args.iter() {
rustc.arg(arg);
}
if bcx.build_config.cargo_as_rustc_wrapper {
let prog = rustc.get_program().to_owned();
rustc.env("RUSTC", prog);
rustc.program(env::current_exe()?);
}
let srv = bcx.build_config.rustfix_diagnostic_server.borrow();
if let Some(server) = &*srv {
server.configure(&mut rustc);
@ -116,15 +123,14 @@ impl<'cfg> Compilation<'cfg> {
rustc_process: rustc,
host: bcx.host_triple().to_string(),
target: bcx.target_triple().to_string(),
target_runner: LazyCell::new(),
target_runner: target_runner(&bcx)?,
})
}
/// See `process`.
pub fn rustc_process(&self, pkg: &Package, target: &Target) -> CargoResult<ProcessBuilder> {
let mut p = self.fill_env(self.rustc_process.clone(), pkg, true)?;
let manifest = pkg.manifest();
if manifest.features().is_enabled(Feature::edition()) {
if target.edition() != Edition::Edition2015 {
p.arg(format!("--edition={}", target.edition()));
}
Ok(p)
@ -133,8 +139,7 @@ impl<'cfg> Compilation<'cfg> {
/// See `process`.
pub fn rustdoc_process(&self, pkg: &Package, target: &Target) -> CargoResult<ProcessBuilder> {
let mut p = self.fill_env(process(&*self.config.rustdoc()?), pkg, false)?;
let manifest = pkg.manifest();
if manifest.features().is_enabled(Feature::edition()) {
if target.edition() != Edition::Edition2015 {
p.arg("-Zunstable-options");
p.arg(format!("--edition={}", target.edition()));
}
@ -150,11 +155,8 @@ impl<'cfg> Compilation<'cfg> {
self.fill_env(process(cmd), pkg, true)
}
fn target_runner(&self) -> CargoResult<&Option<(PathBuf, Vec<String>)>> {
self.target_runner.try_borrow_with(|| {
let key = format!("target.{}.runner", self.target);
Ok(self.config.get_path_and_args(&key)?.map(|v| v.val))
})
fn target_runner(&self) -> &Option<(PathBuf, Vec<String>)> {
&self.target_runner
}
/// See `process`.
@ -163,7 +165,7 @@ impl<'cfg> Compilation<'cfg> {
cmd: T,
pkg: &Package,
) -> CargoResult<ProcessBuilder> {
let builder = if let Some((ref runner, ref args)) = *self.target_runner()? {
let builder = if let Some((ref runner, ref args)) = *self.target_runner() {
let mut builder = process(runner);
builder.args(args);
builder.arg(cmd);
@ -257,3 +259,39 @@ fn pre_version_component(v: &Version) -> String {
ret
}
fn target_runner(bcx: &BuildContext) -> CargoResult<Option<(PathBuf, Vec<String>)>> {
let target = bcx.target_triple();
// try target.{}.runner
let key = format!("target.{}.runner", target);
if let Some(v) = bcx.config.get_path_and_args(&key)? {
return Ok(Some(v.val));
}
// try target.'cfg(...)'.runner
if let Some(target_cfg) = bcx.target_info.cfg() {
if let Some(table) = bcx.config.get_table("target")? {
let mut matching_runner = None;
for key in table.val.keys() {
if CfgExpr::matches_key(key, target_cfg) {
let key = format!("target.{}.runner", key);
if let Some(runner) = bcx.config.get_path_and_args(&key)? {
// more than one match, error out
if matching_runner.is_some() {
bail!("several matching instances of `target.'cfg(..)'.runner` \
in `.cargo/config`")
}
matching_runner = Some(runner.val);
}
}
}
return Ok(matching_runner);
}
}
Ok(None)
}

View file

@ -99,6 +99,7 @@ pub struct Context<'a, 'cfg: 'a> {
primary_packages: HashSet<&'a PackageId>,
unit_dependencies: HashMap<Unit<'a>, Vec<Unit<'a>>>,
files: Option<CompilationFiles<'a, 'cfg>>,
package_cache: HashMap<&'a PackageId, &'a Package>,
}
impl<'a, 'cfg> Context<'a, 'cfg> {
@ -133,6 +134,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
primary_packages: HashSet::new(),
unit_dependencies: HashMap::new(),
files: None,
package_cache: HashMap::new(),
})
}
@ -157,14 +159,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
// part of this, that's all done next as part of the `execute`
// function which will run everything in order with proper
// parallelism.
super::compile(&mut self, &mut queue, &mut plan, unit, exec)?;
let force_rebuild = self.bcx.build_config.force_rebuild;
super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
}
// Now that we've figured out everything that we're going to do, do it!
queue.execute(&mut self, &mut plan)?;
if build_plan {
plan.set_inputs(self.bcx.inputs()?);
plan.set_inputs(self.inputs()?);
plan.output_plan();
}
@ -325,7 +328,12 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
};
self.primary_packages.extend(units.iter().map(|u| u.pkg.package_id()));
build_unit_dependencies(units, self.bcx, &mut self.unit_dependencies)?;
build_unit_dependencies(
units,
self.bcx,
&mut self.unit_dependencies,
&mut self.package_cache,
)?;
self.build_used_in_plugin_map(units)?;
let files = CompilationFiles::new(
units,
@ -494,6 +502,25 @@ impl<'a, 'cfg> Context<'a, 'cfg> {
pub fn is_primary_package(&self, unit: &Unit<'a>) -> bool {
self.primary_packages.contains(unit.pkg.package_id())
}
/// Gets a package for the given package id.
pub fn get_package(&self, id: &PackageId) -> CargoResult<&'a Package> {
self.package_cache.get(id)
.cloned()
.ok_or_else(|| format_err!("failed to find {}", id))
}
/// Return the list of filenames read by cargo to generate the BuildContext
/// (all Cargo.toml, etc).
pub fn inputs(&self) -> CargoResult<Vec<PathBuf>> {
let mut inputs = Vec::new();
for id in self.bcx.packages.package_ids() {
let pkg = self.get_package(id)?;
inputs.push(pkg.manifest_path().to_path_buf());
}
inputs.sort();
Ok(inputs)
}
}
#[derive(Default)]

View file

@ -15,46 +15,75 @@
//! (for example, with and without tests), so we actually build a dependency
//! graph of `Unit`s, which capture these properties.
use std::cell::RefCell;
use std::collections::{HashMap, HashSet};
use CargoResult;
use core::dependency::Kind as DepKind;
use core::profiles::ProfileFor;
use core::{Package, Target};
use core::{Package, Target, PackageId};
use core::package::Downloads;
use super::{BuildContext, CompileMode, Kind, Unit};
struct State<'a: 'tmp, 'cfg: 'a, 'tmp> {
bcx: &'tmp BuildContext<'a, 'cfg>,
deps: &'tmp mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
pkgs: RefCell<&'tmp mut HashMap<&'a PackageId, &'a Package>>,
waiting_on_download: HashSet<&'a PackageId>,
downloads: Downloads<'a, 'cfg>,
}
pub fn build_unit_dependencies<'a, 'cfg>(
roots: &[Unit<'a>],
bcx: &BuildContext<'a, 'cfg>,
deps: &mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
pkgs: &mut HashMap<&'a PackageId, &'a Package>,
) -> CargoResult<()> {
assert!(deps.is_empty(), "can only build unit deps once");
for unit in roots.iter() {
// Dependencies of tests/benches should not have `panic` set.
// We check the global test mode to see if we are running in `cargo
// test` in which case we ensure all dependencies have `panic`
// cleared, and avoid building the lib thrice (once with `panic`, once
// without, once for --test). In particular, the lib included for
// doctests and examples are `Build` mode here.
let profile_for = if unit.mode.is_any_test() || bcx.build_config.test() {
ProfileFor::TestDependency
} else {
ProfileFor::Any
};
deps_of(unit, bcx, deps, profile_for)?;
}
trace!("ALL UNIT DEPENDENCIES {:#?}", deps);
let mut state = State {
bcx,
deps,
pkgs: RefCell::new(pkgs),
waiting_on_download: HashSet::new(),
downloads: bcx.packages.enable_download()?,
};
connect_run_custom_build_deps(bcx, deps);
loop {
for unit in roots.iter() {
state.get(unit.pkg.package_id())?;
// Dependencies of tests/benches should not have `panic` set.
// We check the global test mode to see if we are running in `cargo
// test` in which case we ensure all dependencies have `panic`
// cleared, and avoid building the lib thrice (once with `panic`, once
// without, once for --test). In particular, the lib included for
// doctests and examples are `Build` mode here.
let profile_for = if unit.mode.is_any_test() || bcx.build_config.test() {
ProfileFor::TestDependency
} else {
ProfileFor::Any
};
deps_of(unit, &mut state, profile_for)?;
}
if state.waiting_on_download.len() > 0 {
state.finish_some_downloads()?;
state.deps.clear();
} else {
break
}
}
trace!("ALL UNIT DEPENDENCIES {:#?}", state.deps);
connect_run_custom_build_deps(&mut state);
Ok(())
}
fn deps_of<'a, 'cfg>(
fn deps_of<'a, 'cfg, 'tmp>(
unit: &Unit<'a>,
bcx: &BuildContext<'a, 'cfg>,
deps: &mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
state: &mut State<'a, 'cfg, 'tmp>,
profile_for: ProfileFor,
) -> CargoResult<()> {
// Currently the `deps` map does not include `profile_for`. This should
@ -63,12 +92,12 @@ fn deps_of<'a, 'cfg>(
// `TestDependency`. `CustomBuild` should also be fine since if the
// requested unit's settings are the same as `Any`, `CustomBuild` can't
// affect anything else in the hierarchy.
if !deps.contains_key(unit) {
let unit_deps = compute_deps(unit, bcx, profile_for)?;
if !state.deps.contains_key(unit) {
let unit_deps = compute_deps(unit, state, profile_for)?;
let to_insert: Vec<_> = unit_deps.iter().map(|&(unit, _)| unit).collect();
deps.insert(*unit, to_insert);
state.deps.insert(*unit, to_insert);
for (unit, profile_for) in unit_deps {
deps_of(&unit, bcx, deps, profile_for)?;
deps_of(&unit, state, profile_for)?;
}
}
Ok(())
@ -78,63 +107,82 @@ fn deps_of<'a, 'cfg>(
/// for that package.
/// This returns a vec of `(Unit, ProfileFor)` pairs. The `ProfileFor`
/// is the profile type that should be used for dependencies of the unit.
fn compute_deps<'a, 'cfg>(
fn compute_deps<'a, 'cfg, 'tmp>(
unit: &Unit<'a>,
bcx: &BuildContext<'a, 'cfg>,
state: &mut State<'a, 'cfg, 'tmp>,
profile_for: ProfileFor,
) -> CargoResult<Vec<(Unit<'a>, ProfileFor)>> {
if unit.mode.is_run_custom_build() {
return compute_deps_custom_build(unit, bcx);
return compute_deps_custom_build(unit, state.bcx);
} else if unit.mode.is_doc() && !unit.mode.is_any_test() {
// Note: This does not include Doctest.
return compute_deps_doc(unit, bcx);
return compute_deps_doc(unit, state);
}
let bcx = state.bcx;
let id = unit.pkg.package_id();
let deps = bcx.resolve.deps(id);
let mut ret = deps.filter(|&(_id, deps)| {
assert!(!deps.is_empty());
deps.iter().any(|dep| {
// If this target is a build command, then we only want build
// dependencies, otherwise we want everything *other than* build
// dependencies.
if unit.target.is_custom_build() != dep.is_build() {
return false;
}
let deps = bcx.resolve.deps(id)
.filter(|&(_id, deps)| {
assert!(!deps.is_empty());
deps.iter().any(|dep| {
// If this target is a build command, then we only want build
// dependencies, otherwise we want everything *other than* build
// dependencies.
if unit.target.is_custom_build() != dep.is_build() {
return false;
}
// If this dependency is *not* a transitive dependency, then it
// only applies to test/example targets
if !dep.is_transitive() && !unit.target.is_test() && !unit.target.is_example()
&& !unit.mode.is_any_test()
{
return false;
}
// If this dependency is *not* a transitive dependency, then it
// only applies to test/example targets
if !dep.is_transitive() &&
!unit.target.is_test() &&
!unit.target.is_example() &&
!unit.mode.is_any_test()
{
return false;
}
// If this dependency is only available for certain platforms,
// make sure we're only enabling it for that platform.
if !bcx.dep_platform_activated(dep, unit.kind) {
return false;
}
// If this dependency is only available for certain platforms,
// make sure we're only enabling it for that platform.
if !bcx.dep_platform_activated(dep, unit.kind) {
return false;
}
// If the dependency is optional, then we're only activating it
// if the corresponding feature was activated
if dep.is_optional() && !bcx.resolve.features(id).contains(&*dep.name_in_toml()) {
return false;
}
// If the dependency is optional, then we're only activating it
// if the corresponding feature was activated
if dep.is_optional() &&
!bcx.resolve.features(id).contains(&*dep.name_in_toml())
{
return false;
}
// If we've gotten past all that, then this dependency is
// actually used!
true
})
}).filter_map(|(id, _)| match bcx.get_package(id) {
Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
let mode = check_or_build_mode(unit.mode, t);
let unit = new_unit(bcx, pkg, t, profile_for, unit.kind.for_target(t), mode);
Ok((unit, profile_for))
}),
Err(e) => Some(Err(e)),
})
.collect::<CargoResult<Vec<_>>>()?;
// If we've gotten past all that, then this dependency is
// actually used!
true
})
});
let mut ret = Vec::new();
for (id, _) in deps {
let pkg = match state.get(id)? {
Some(pkg) => pkg,
None => continue,
};
let lib = match pkg.targets().iter().find(|t| t.is_lib()) {
Some(t) => t,
None => continue,
};
let mode = check_or_build_mode(unit.mode, lib);
let unit = new_unit(
bcx,
pkg,
lib,
profile_for,
unit.kind.for_target(lib),
mode,
);
ret.push((unit, profile_for));
}
// If this target is a build script, then what we've collected so far is
// all we need. If this isn't a build script, then it depends on the
@ -221,10 +269,11 @@ fn compute_deps_custom_build<'a, 'cfg>(
}
/// Returns the dependencies necessary to document a package
fn compute_deps_doc<'a, 'cfg>(
fn compute_deps_doc<'a, 'cfg, 'tmp>(
unit: &Unit<'a>,
bcx: &BuildContext<'a, 'cfg>,
state: &mut State<'a, 'cfg, 'tmp>,
) -> CargoResult<Vec<(Unit<'a>, ProfileFor)>> {
let bcx = state.bcx;
let deps = bcx.resolve
.deps(unit.pkg.package_id())
.filter(|&(_id, deps)| {
@ -232,15 +281,17 @@ fn compute_deps_doc<'a, 'cfg>(
DepKind::Normal => bcx.dep_platform_activated(dep, unit.kind),
_ => false,
})
})
.map(|(id, _deps)| bcx.get_package(id));
});
// To document a library, we depend on dependencies actually being
// built. If we're documenting *all* libraries, then we also depend on
// the documentation of the library being built.
let mut ret = Vec::new();
for dep in deps {
let dep = dep?;
for (id, _deps) in deps {
let dep = match state.get(id)? {
Some(dep) => dep,
None => continue,
};
let lib = match dep.targets().iter().find(|t| t.is_lib()) {
Some(lib) => lib,
None => continue,
@ -288,7 +339,14 @@ fn maybe_lib<'a>(
) -> Option<(Unit<'a>, ProfileFor)> {
unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| {
let mode = check_or_build_mode(unit.mode, t);
let unit = new_unit(bcx, unit.pkg, t, profile_for, unit.kind.for_target(t), mode);
let unit = new_unit(
bcx,
unit.pkg,
t,
profile_for,
unit.kind.for_target(t),
mode,
);
(unit, profile_for)
})
}
@ -373,10 +431,7 @@ fn new_unit<'a>(
///
/// Here we take the entire `deps` map and add more dependencies from execution
/// of one build script to execution of another build script.
fn connect_run_custom_build_deps<'a>(
bcx: &BuildContext,
deps: &mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
) {
fn connect_run_custom_build_deps(state: &mut State) {
let mut new_deps = Vec::new();
{
@ -386,7 +441,7 @@ fn connect_run_custom_build_deps<'a>(
// have the build script as the key and the library would be in the
// value's set.
let mut reverse_deps = HashMap::new();
for (unit, deps) in deps.iter() {
for (unit, deps) in state.deps.iter() {
for dep in deps {
if dep.mode == CompileMode::RunCustomBuild {
reverse_deps.entry(dep)
@ -405,7 +460,7 @@ fn connect_run_custom_build_deps<'a>(
// `links`, then we depend on that package's build script! Here we use
// `dep_build_script` to manufacture an appropriate build script unit to
// depend on.
for unit in deps.keys().filter(|k| k.mode == CompileMode::RunCustomBuild) {
for unit in state.deps.keys().filter(|k| k.mode == CompileMode::RunCustomBuild) {
let reverse_deps = match reverse_deps.get(unit) {
Some(set) => set,
None => continue,
@ -413,13 +468,13 @@ fn connect_run_custom_build_deps<'a>(
let to_add = reverse_deps
.iter()
.flat_map(|reverse_dep| deps[reverse_dep].iter())
.flat_map(|reverse_dep| state.deps[reverse_dep].iter())
.filter(|other| {
other.pkg != unit.pkg &&
other.target.linkable() &&
other.pkg.manifest().links().is_some()
})
.filter_map(|other| dep_build_script(other, bcx).map(|p| p.0))
.filter_map(|other| dep_build_script(other, state.bcx).map(|p| p.0))
.collect::<HashSet<_>>();
if !to_add.is_empty() {
@ -430,6 +485,50 @@ fn connect_run_custom_build_deps<'a>(
// And finally, add in all the missing dependencies!
for (unit, new_deps) in new_deps {
deps.get_mut(&unit).unwrap().extend(new_deps);
state.deps.get_mut(&unit).unwrap().extend(new_deps);
}
}
impl<'a, 'cfg, 'tmp> State<'a, 'cfg, 'tmp> {
fn get(&mut self, id: &'a PackageId) -> CargoResult<Option<&'a Package>> {
let mut pkgs = self.pkgs.borrow_mut();
if let Some(pkg) = pkgs.get(id) {
return Ok(Some(pkg))
}
if !self.waiting_on_download.insert(id) {
return Ok(None)
}
if let Some(pkg) = self.downloads.start(id)? {
pkgs.insert(id, pkg);
self.waiting_on_download.remove(id);
return Ok(Some(pkg))
}
Ok(None)
}
/// Completes at least one downloading, maybe waiting for more to complete.
///
/// This function will block the current thread waiting for at least one
/// crate to finish downloading. The function may continue to download more
/// crates if it looks like there's a long enough queue of crates to keep
/// downloading. When only a handful of packages remain this function
/// returns, and it's hoped that by returning we'll be able to push more
/// packages to download into the queue.
fn finish_some_downloads(&mut self) -> CargoResult<()> {
assert!(self.downloads.remaining() > 0);
loop {
let pkg = self.downloads.wait()?;
self.waiting_on_download.remove(pkg.package_id());
self.pkgs.borrow_mut().insert(pkg.package_id(), pkg);
// Arbitrarily choose that 5 or more packages concurrently download
// is a good enough number to "fill the network pipe". If we have
// less than this let's recompute the whole unit dependency graph
// again and try to find some more packages to download.
if self.downloads.remaining() < 5 {
break
}
}
Ok(())
}
}

View file

@ -14,6 +14,7 @@ use jobserver::{Acquired, HelperThread};
use core::profiles::Profile;
use core::{PackageId, Target, TargetKind};
use handle_error;
use util;
use util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder};
use util::{Config, DependencyQueue, Dirty, Fresh, Freshness};
use util::{Progress, ProgressStyle};
@ -237,9 +238,6 @@ impl<'a> JobQueue<'a> {
// currently a pretty big task. This is issue #5695.
let mut error = None;
let mut progress = Progress::with_style("Building", ProgressStyle::Ratio, cx.bcx.config);
if !cx.bcx.config.cli_unstable().compile_progress {
progress.disable();
}
let total = self.queue.len();
loop {
// Dequeue as much work as we can, learning about everything
@ -371,16 +369,7 @@ impl<'a> JobQueue<'a> {
opt_type += " + debuginfo";
}
let time_elapsed = {
let duration = cx.bcx.config.creation_time().elapsed();
let secs = duration.as_secs();
if secs >= 60 {
format!("{}m {:02}s", secs / 60, secs % 60)
} else {
format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000)
}
};
let time_elapsed = util::elapsed(cx.bcx.config.creation_time().elapsed());
if self.queue.is_empty() {
let message = format!(
@ -538,7 +527,7 @@ impl<'a> Key<'a> {
fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) -> CargoResult<Vec<Key<'a>>> {
let unit = Unit {
pkg: cx.bcx.get_package(self.pkg)?,
pkg: cx.get_package(self.pkg)?,
target: self.target,
profile: self.profile,
kind: self.kind,

View file

@ -10,11 +10,10 @@ use serde_json;
use core::manifest::TargetSourcePath;
use core::profiles::{Lto, Profile};
use core::shell::ColorChoice;
use core::{PackageId, Target};
use util::errors::{CargoResult, CargoResultExt, Internal};
use util::paths;
use util::{self, machine_message, Freshness, ProcessBuilder};
use util::{self, machine_message, Freshness, ProcessBuilder, process};
use util::{internal, join_paths, profile};
use self::build_plan::BuildPlan;
@ -129,6 +128,7 @@ fn compile<'a, 'cfg: 'a>(
plan: &mut BuildPlan,
unit: &Unit<'a>,
exec: &Arc<Executor>,
force_rebuild: bool,
) -> CargoResult<()> {
let bcx = cx.bcx;
let build_plan = bcx.build_config.build_plan;
@ -164,7 +164,7 @@ fn compile<'a, 'cfg: 'a>(
let dirty = work.then(link_targets(cx, unit, false)?).then(dirty);
let fresh = link_targets(cx, unit, true)?.then(fresh);
if exec.force_rebuild(unit) {
if exec.force_rebuild(unit) || force_rebuild {
freshness = Freshness::Dirty;
}
@ -175,7 +175,7 @@ fn compile<'a, 'cfg: 'a>(
// Be sure to compile all dependencies of this target as well.
for unit in cx.dep_targets(unit).iter() {
compile(cx, jobs, plan, unit, exec)?;
compile(cx, jobs, plan, unit, exec, false)?;
}
if build_plan {
plan.add(cx, unit)?;
@ -240,8 +240,6 @@ fn rustc<'a, 'cfg>(
.unwrap_or_else(|| cx.bcx.config.cwd())
.to_path_buf();
let should_capture_output = cx.bcx.config.cli_unstable().compile_progress;
return Ok(Work::new(move |state| {
// Only at runtime have we discovered what the extra -L and -l
// arguments are for native libraries, so we process those here. We
@ -291,12 +289,7 @@ fn rustc<'a, 'cfg>(
} else if build_plan {
state.build_plan(buildkey, rustc.clone(), outputs.clone());
} else {
let exec_result = if should_capture_output {
exec.exec_and_capture_output(rustc, &package_id, &target, mode, state)
} else {
exec.exec(rustc, &package_id, &target, mode)
};
exec_result
exec.exec_and_capture_output(rustc, &package_id, &target, mode, state)
.map_err(Internal::new)
.chain_err(|| format!("Could not compile `{}`.", name))?;
}
@ -590,7 +583,12 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult
rustdoc.arg("--crate-name").arg(&unit.target.crate_name());
add_path_args(bcx, unit, &mut rustdoc);
add_cap_lints(bcx, unit, &mut rustdoc);
add_color(bcx, &mut rustdoc);
let mut can_add_color_process = process(&*bcx.config.rustdoc()?);
can_add_color_process.args(&["--color", "never", "-V"]);
if bcx.rustc.cached_success(&can_add_color_process)? {
add_color(bcx, &mut rustdoc);
}
if unit.kind != Kind::Host {
if let Some(ref target) = bcx.build_config.requested_target {
@ -628,8 +626,6 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult
let package_id = unit.pkg.package_id().clone();
let target = unit.target.clone();
let should_capture_output = cx.bcx.config.cli_unstable().compile_progress;
Ok(Work::new(move |state| {
if let Some(output) = build_state.outputs.lock().unwrap().get(&key) {
for cfg in output.cfgs.iter() {
@ -648,10 +644,8 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult
&mut |line| json_stderr(line, &package_id, &target),
false,
).map(drop)
} else if should_capture_output {
state.capture_output(&rustdoc, false).map(drop)
} else {
rustdoc.exec()
state.capture_output(&rustdoc, false).map(drop)
};
exec_result.chain_err(|| format!("Could not document `{}`.", name))?;
Ok(())
@ -708,12 +702,9 @@ fn add_cap_lints(bcx: &BuildContext, unit: &Unit, cmd: &mut ProcessBuilder) {
}
fn add_color(bcx: &BuildContext, cmd: &mut ProcessBuilder) {
let capture_output = bcx.config.cli_unstable().compile_progress;
let shell = bcx.config.shell();
if capture_output || shell.color_choice() != ColorChoice::CargoAuto {
let color = if shell.supports_color() { "always" } else { "never" };
cmd.args(&["--color", color]);
}
let color = if shell.supports_color() { "always" } else { "never" };
cmd.args(&["--color", color]);
}
fn add_error_format(bcx: &BuildContext, cmd: &mut ProcessBuilder) {

View file

@ -28,7 +28,7 @@ struct Inner {
specified_req: bool,
kind: Kind,
only_match_name: bool,
rename: Option<InternedString>,
explicit_name_in_toml: Option<InternedString>,
optional: bool,
default_features: bool,
@ -73,7 +73,7 @@ impl ser::Serialize for Dependency {
uses_default_features: self.uses_default_features(),
features: self.features(),
target: self.platform(),
rename: self.rename().map(|s| s.as_str()),
rename: self.explicit_name_in_toml().map(|s| s.as_str()),
}.serialize(s)
}
}
@ -199,7 +199,7 @@ impl Dependency {
default_features: true,
specified_req: false,
platform: None,
rename: None,
explicit_name_in_toml: None,
}),
}
}
@ -229,7 +229,7 @@ impl Dependency {
/// foo = { version = "0.1", package = 'bar' }
/// ```
pub fn name_in_toml(&self) -> InternedString {
self.rename().unwrap_or(self.inner.name)
self.explicit_name_in_toml().unwrap_or(self.inner.name)
}
/// The name of the package that this `Dependency` depends on.
@ -285,8 +285,8 @@ impl Dependency {
///
/// If the `package` key is used in `Cargo.toml` then this returns the same
/// value as `name_in_toml`.
pub fn rename(&self) -> Option<InternedString> {
self.inner.rename
pub fn explicit_name_in_toml(&self) -> Option<InternedString> {
self.inner.explicit_name_in_toml
}
pub fn set_kind(&mut self, kind: Kind) -> &mut Dependency {
@ -330,8 +330,8 @@ impl Dependency {
self
}
pub fn set_rename(&mut self, rename: &str) -> &mut Dependency {
Rc::make_mut(&mut self.inner).rename = Some(InternedString::new(rename));
pub fn set_explicit_name_in_toml(&mut self, name: &str) -> &mut Dependency {
Rc::make_mut(&mut self.inner).explicit_name_in_toml = Some(InternedString::new(name));
self
}

View file

@ -85,6 +85,7 @@ impl FromStr for Edition {
}
}
#[derive(PartialEq)]
enum Status {
Stable,
Unstable,
@ -106,7 +107,7 @@ macro_rules! features {
$(
pub fn $feature() -> &'static Feature {
fn get(features: &Features) -> bool {
features.$feature
stab!($stab) == Status::Stable || features.$feature
}
static FEAT: Feature = Feature {
name: stringify!($feature),
@ -173,7 +174,7 @@ features! {
[unstable] alternative_registries: bool,
// Using editions
[unstable] edition: bool,
[stable] edition: bool,
// Renaming a package in the manifest via the `package` key
[unstable] rename_dependency: bool,
@ -317,7 +318,6 @@ pub struct CliUnstable {
pub package_features: bool,
pub advanced_env: bool,
pub config_profile: bool,
pub compile_progress: bool,
}
impl CliUnstable {
@ -354,7 +354,6 @@ impl CliUnstable {
"package-features" => self.package_features = true,
"advanced-env" => self.advanced_env = true,
"config-profile" => self.config_profile = true,
"compile-progress" => self.compile_progress = true,
_ => bail!("unknown `-Z` flag specified: {}", k),
}

View file

@ -1,19 +1,27 @@
use std::cell::{Ref, RefCell};
use std::collections::HashMap;
use std::cell::{Ref, RefCell, Cell};
use std::collections::{HashMap, HashSet};
use std::fmt;
use std::hash;
use std::mem;
use std::path::{Path, PathBuf};
use std::time::{Instant, Duration};
use bytesize::ByteSize;
use curl::easy::{Easy, HttpVersion};
use curl::multi::{Multi, EasyHandle};
use lazycell::LazyCell;
use semver::Version;
use serde::ser;
use toml;
use lazycell::LazyCell;
use core::{Dependency, Manifest, PackageId, SourceId, Target};
use core::{FeatureMap, SourceMap, Summary};
use core::source::MaybePackage;
use core::interning::InternedString;
use util::{internal, lev_distance, Config};
use util::errors::{CargoResult, CargoResultExt};
use ops;
use util::{self, internal, lev_distance, Config, Progress, ProgressStyle};
use util::errors::{CargoResult, CargoResultExt, HttpNot200};
use util::network::Retry;
/// Information about a package that is available somewhere in the file system.
///
@ -236,46 +244,491 @@ impl hash::Hash for Package {
}
}
#[derive(Debug)]
pub struct PackageSet<'cfg> {
packages: HashMap<PackageId, LazyCell<Package>>,
sources: RefCell<SourceMap<'cfg>>,
config: &'cfg Config,
multi: Multi,
downloading: Cell<bool>,
multiplexing: bool,
}
pub struct Downloads<'a, 'cfg: 'a> {
set: &'a PackageSet<'cfg>,
pending: HashMap<usize, (Download, EasyHandle)>,
pending_ids: HashSet<PackageId>,
results: Vec<(usize, CargoResult<()>)>,
next: usize,
retry: Retry<'cfg>,
progress: RefCell<Option<Progress<'cfg>>>,
downloads_finished: usize,
downloaded_bytes: u64,
largest: (u64, String),
start: Instant,
success: bool,
}
struct Download {
token: usize,
id: PackageId,
data: RefCell<Vec<u8>>,
url: String,
descriptor: String,
total: Cell<u64>,
current: Cell<u64>,
start: Instant,
}
impl<'cfg> PackageSet<'cfg> {
pub fn new(package_ids: &[PackageId], sources: SourceMap<'cfg>) -> PackageSet<'cfg> {
PackageSet {
pub fn new(
package_ids: &[PackageId],
sources: SourceMap<'cfg>,
config: &'cfg Config,
) -> CargoResult<PackageSet<'cfg>> {
// We've enabled the `http2` feature of `curl` in Cargo, so treat
// failures here as fatal as it would indicate a build-time problem.
//
// Note that the multiplexing support is pretty new so we're having it
// off-by-default temporarily.
//
// Also note that pipelining is disabled as curl authors have indicated
// that it's buggy, and we've empirically seen that it's buggy with HTTP
// proxies.
let mut multi = Multi::new();
let multiplexing = config.get::<Option<bool>>("http.multiplexing")?
.unwrap_or(false);
multi.pipelining(false, multiplexing)
.chain_err(|| "failed to enable multiplexing/pipelining in curl")?;
// let's not flood crates.io with connections
multi.set_max_host_connections(2)?;
Ok(PackageSet {
packages: package_ids
.iter()
.map(|id| (id.clone(), LazyCell::new()))
.collect(),
sources: RefCell::new(sources),
}
config,
multi,
downloading: Cell::new(false),
multiplexing,
})
}
pub fn package_ids<'a>(&'a self) -> Box<Iterator<Item = &'a PackageId> + 'a> {
Box::new(self.packages.keys())
}
pub fn get(&self, id: &PackageId) -> CargoResult<&Package> {
let slot = self.packages
.get(id)
.ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?;
if let Some(pkg) = slot.borrow() {
return Ok(pkg);
pub fn enable_download<'a>(&'a self) -> CargoResult<Downloads<'a, 'cfg>> {
assert!(!self.downloading.replace(true));
Ok(Downloads {
start: Instant::now(),
set: self,
next: 0,
pending: HashMap::new(),
pending_ids: HashSet::new(),
results: Vec::new(),
retry: Retry::new(self.config)?,
progress: RefCell::new(Some(Progress::with_style(
"Downloading",
ProgressStyle::Ratio,
self.config,
))),
downloads_finished: 0,
downloaded_bytes: 0,
largest: (0, String::new()),
success: false,
})
}
pub fn get_one(&self, id: &PackageId) -> CargoResult<&Package> {
Ok(self.get_many(Some(id))?.remove(0))
}
pub fn get_many<'a>(&self, ids: impl IntoIterator<Item = &'a PackageId>)
-> CargoResult<Vec<&Package>>
{
let mut pkgs = Vec::new();
let mut downloads = self.enable_download()?;
for id in ids {
pkgs.extend(downloads.start(id)?);
}
let mut sources = self.sources.borrow_mut();
let source = sources
.get_mut(id.source_id())
.ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
let pkg = source
.download(id)
.chain_err(|| format_err!("unable to get packages from source"))?;
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
while downloads.remaining() > 0 {
pkgs.push(downloads.wait()?);
}
downloads.success = true;
Ok(pkgs)
}
pub fn sources(&self) -> Ref<SourceMap<'cfg>> {
self.sources.borrow()
}
}
impl<'a, 'cfg> Downloads<'a, 'cfg> {
/// Starts to download the package for the `id` specified.
///
/// Returns `None` if the package is queued up for download and will
/// eventually be returned from `wait_for_download`. Returns `Some(pkg)` if
/// the package is ready and doesn't need to be downloaded.
pub fn start(&mut self, id: &PackageId) -> CargoResult<Option<&'a Package>> {
// First up see if we've already cached this package, in which case
// there's nothing to do.
let slot = self.set.packages
.get(id)
.ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?;
if let Some(pkg) = slot.borrow() {
return Ok(Some(pkg));
}
// Ask the original source fo this `PackageId` for the corresponding
// package. That may immediately come back and tell us that the package
// is ready, or it could tell us that it needs to be downloaded.
let mut sources = self.set.sources.borrow_mut();
let source = sources
.get_mut(id.source_id())
.ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?;
let pkg = source
.download(id)
.chain_err(|| format_err!("unable to get packages from source"))?;
let (url, descriptor) = match pkg {
MaybePackage::Ready(pkg) => {
debug!("{} doesn't need a download", id);
assert!(slot.fill(pkg).is_ok());
return Ok(Some(slot.borrow().unwrap()))
}
MaybePackage::Download { url, descriptor } => (url, descriptor),
};
// Ok we're going to download this crate, so let's set up all our
// internal state and hand off an `Easy` handle to our libcurl `Multi`
// handle. This won't actually start the transfer, but later it'll
// hapen during `wait_for_download`
let token = self.next;
self.next += 1;
debug!("downloading {} as {}", id, token);
assert!(self.pending_ids.insert(id.clone()));
let mut handle = ops::http_handle(self.set.config)?;
handle.get(true)?;
handle.url(&url)?;
handle.follow_location(true)?; // follow redirects
// Enable HTTP/2 to be used as it'll allow true multiplexing which makes
// downloads much faster. Currently Cargo requests the `http2` feature
// of the `curl` crate which means it should always be built in, so
// treat it as a fatal error of http/2 support isn't found.
if self.set.multiplexing {
handle.http_version(HttpVersion::V2)
.chain_err(|| "failed to enable HTTP2, is curl not built right?")?;
}
// This is an option to `libcurl` which indicates that if there's a
// bunch of parallel requests to the same host they all wait until the
// pipelining status of the host is known. This means that we won't
// initiate dozens of connections to crates.io, but rather only one.
// Once the main one is opened we realized that pipelining is possible
// and multiplexing is possible with static.crates.io. All in all this
// reduces the number of connections done to a more manageable state.
handle.pipewait(true)?;
handle.write_function(move |buf| {
debug!("{} - {} bytes of data", token, buf.len());
tls::with(|downloads| {
if let Some(downloads) = downloads {
downloads.pending[&token].0.data
.borrow_mut()
.extend_from_slice(buf);
}
});
Ok(buf.len())
})?;
handle.progress(true)?;
handle.progress_function(move |dl_total, dl_cur, _, _| {
tls::with(|downloads| {
let downloads = match downloads {
Some(d) => d,
None => return false,
};
let dl = &downloads.pending[&token].0;
dl.total.set(dl_total as u64);
dl.current.set(dl_cur as u64);
downloads.tick(WhyTick::DownloadUpdate).is_ok()
})
})?;
// If the progress bar isn't enabled then it may be awhile before the
// first crate finishes downloading so we inform immediately that we're
// downloading crates here.
if self.downloads_finished == 0 &&
self.pending.len() == 0 &&
!self.progress.borrow().as_ref().unwrap().is_enabled()
{
self.set.config.shell().status("Downloading", "crates ...")?;
}
let dl = Download {
token,
data: RefCell::new(Vec::new()),
id: id.clone(),
url,
descriptor,
total: Cell::new(0),
current: Cell::new(0),
start: Instant::now(),
};
self.enqueue(dl, handle)?;
self.tick(WhyTick::DownloadStarted)?;
Ok(None)
}
/// Returns the number of crates that are still downloading
pub fn remaining(&self) -> usize {
self.pending.len()
}
/// Blocks the current thread waiting for a package to finish downloading.
///
/// This method will wait for a previously enqueued package to finish
/// downloading and return a reference to it after it's done downloading.
///
/// # Panics
///
/// This function will panic if there are no remaining downloads.
pub fn wait(&mut self) -> CargoResult<&'a Package> {
let (dl, data) = loop {
assert_eq!(self.pending.len(), self.pending_ids.len());
let (token, result) = self.wait_for_curl()?;
debug!("{} finished with {:?}", token, result);
let (mut dl, handle) = self.pending.remove(&token)
.expect("got a token for a non-in-progress transfer");
let data = mem::replace(&mut *dl.data.borrow_mut(), Vec::new());
let mut handle = self.set.multi.remove(handle)?;
self.pending_ids.remove(&dl.id);
// Check if this was a spurious error. If it was a spurious error
// then we want to re-enqueue our request for another attempt and
// then we wait for another request to finish.
let ret = {
self.retry.try(|| {
result?;
let code = handle.response_code()?;
if code != 200 && code != 0 {
let url = handle.effective_url()?.unwrap_or(&dl.url);
return Err(HttpNot200 {
code,
url: url.to_string(),
}.into())
}
Ok(())
}).chain_err(|| {
format!("failed to download from `{}`", dl.url)
})?
};
match ret {
Some(()) => break (dl, data),
None => {
self.pending_ids.insert(dl.id.clone());
self.enqueue(dl, handle)?
}
}
};
// If the progress bar isn't enabled then we still want to provide some
// semblance of progress of how we're downloading crates.
if !self.progress.borrow().as_ref().unwrap().is_enabled() {
self.set.config.shell().status("Downloaded", &dl.descriptor)?;
}
self.downloads_finished += 1;
self.downloaded_bytes += dl.total.get();
if dl.total.get() > self.largest.0 {
self.largest = (dl.total.get(), dl.id.name().to_string());
}
// We're about to synchronously extract the crate below. While we're
// doing that our download progress won't actually be updated, nor do we
// have a great view into the progress of the extraction. Let's prepare
// the user for this CPU-heavy step if it looks like it'll take some
// time to do so.
if dl.total.get() < ByteSize::kb(400).0 {
self.tick(WhyTick::DownloadFinished)?;
} else {
self.tick(WhyTick::Extracting(&dl.id.name()))?;
}
// Inform the original source that the download is finished which
// should allow us to actually get the package and fill it in now.
let mut sources = self.set.sources.borrow_mut();
let source = sources
.get_mut(dl.id.source_id())
.ok_or_else(|| internal(format!("couldn't find source for `{}`", dl.id)))?;
let pkg = source.finish_download(&dl.id, data)?;
let slot = &self.set.packages[&dl.id];
assert!(slot.fill(pkg).is_ok());
Ok(slot.borrow().unwrap())
}
fn enqueue(&mut self, dl: Download, handle: Easy) -> CargoResult<()> {
let mut handle = self.set.multi.add(handle)?;
handle.set_token(dl.token)?;
self.pending.insert(dl.token, (dl, handle));
Ok(())
}
fn wait_for_curl(&mut self) -> CargoResult<(usize, CargoResult<()>)> {
// This is the main workhorse loop. We use libcurl's portable `wait`
// method to actually perform blocking. This isn't necessarily too
// efficient in terms of fd management, but we should only be juggling
// a few anyway.
//
// Here we start off by asking the `multi` handle to do some work via
// the `perform` method. This will actually do I/O work (nonblocking)
// and attempt to make progress. Afterwards we ask about the `messages`
// contained in the handle which will inform us if anything has finished
// transferring.
//
// If we've got a finished transfer after all that work we break out
// and process the finished transfer at the end. Otherwise we need to
// actually block waiting for I/O to happen, which we achieve with the
// `wait` method on `multi`.
loop {
let n = tls::set(self, || {
self.set.multi.perform()
.chain_err(|| "failed to perform http requests")
})?;
debug!("handles remaining: {}", n);
let results = &mut self.results;
let pending = &self.pending;
self.set.multi.messages(|msg| {
let token = msg.token().expect("failed to read token");
let handle = &pending[&token].1;
if let Some(result) = msg.result_for(&handle) {
results.push((token, result.map_err(|e| e.into())));
} else {
debug!("message without a result (?)");
}
});
if let Some(pair) = results.pop() {
break Ok(pair)
}
assert!(self.pending.len() > 0);
self.set.multi.wait(&mut [], Duration::new(60, 0))
.chain_err(|| "failed to wait on curl `Multi`")?;
}
}
fn tick(&self, why: WhyTick) -> CargoResult<()> {
let mut progress = self.progress.borrow_mut();
let progress = progress.as_mut().unwrap();
if let WhyTick::DownloadUpdate = why {
if !progress.update_allowed() {
return Ok(())
}
}
let mut msg = format!("{} crates", self.pending.len());
match why {
WhyTick::Extracting(krate) => {
msg.push_str(&format!(", extracting {} ...", krate));
}
_ => {
let mut dur = Duration::new(0, 0);
let mut remaining = 0;
for (dl, _) in self.pending.values() {
dur += dl.start.elapsed();
// If the total/current look weird just throw out the data
// point, sounds like curl has more to learn before we have
// the true information.
if dl.total.get() >= dl.current.get() {
remaining += dl.total.get() - dl.current.get();
}
}
if remaining > 0 && dur > Duration::from_millis(500) {
msg.push_str(&format!(", remaining bytes: {}", ByteSize(remaining)));
}
}
}
progress.print_now(&msg)
}
}
enum WhyTick<'a> {
DownloadStarted,
DownloadUpdate,
DownloadFinished,
Extracting(&'a str),
}
impl<'a, 'cfg> Drop for Downloads<'a, 'cfg> {
fn drop(&mut self) {
self.set.downloading.set(false);
let progress = self.progress.get_mut().take().unwrap();
// Don't print a download summary if we're not using a progress bar,
// we've already printed lots of `Downloading...` items.
if !progress.is_enabled() {
return
}
// If we didn't download anything, no need for a summary
if self.downloads_finished == 0 {
return
}
// If an error happened, let's not clutter up the output
if !self.success {
return
}
let mut status = format!("{} crates ({}) in {}",
self.downloads_finished,
ByteSize(self.downloaded_bytes),
util::elapsed(self.start.elapsed()));
if self.largest.0 > ByteSize::mb(1).0 {
status.push_str(&format!(
" (largest was `{}` at {})",
self.largest.1,
ByteSize(self.largest.0),
));
}
drop(self.set.config.shell().status("Downloaded", status));
}
}
mod tls {
use std::cell::Cell;
use super::Downloads;
thread_local!(static PTR: Cell<usize> = Cell::new(0));
pub(crate) fn with<R>(f: impl FnOnce(Option<&Downloads>) -> R) -> R {
let ptr = PTR.with(|p| p.get());
if ptr == 0 {
f(None)
} else {
unsafe {
f(Some(&*(ptr as *const Downloads)))
}
}
}
pub(crate) fn set<R>(dl: &Downloads, f: impl FnOnce() -> R) -> R {
struct Reset<'a, T: Copy + 'a>(&'a Cell<T>, T);
impl<'a, T: Copy> Drop for Reset<'a, T> {
fn drop(&mut self) {
self.0.set(self.1);
}
}
PTR.with(|p| {
let _reset = Reset(p, p.get());
p.set(dl as *const Downloads as usize);
f()
})
}
}

View file

@ -37,6 +37,7 @@ pub trait Registry {
/// a `Source`. Each `Source` in the map has been updated (using network
/// operations if necessary) and is ready to be queried for packages.
pub struct PackageRegistry<'cfg> {
config: &'cfg Config,
sources: SourceMap<'cfg>,
// A list of sources which are considered "overrides" which take precedent
@ -81,6 +82,7 @@ impl<'cfg> PackageRegistry<'cfg> {
pub fn new(config: &'cfg Config) -> CargoResult<PackageRegistry<'cfg>> {
let source_config = SourceConfigMap::new(config)?;
Ok(PackageRegistry {
config,
sources: SourceMap::new(),
source_ids: HashMap::new(),
overrides: Vec::new(),
@ -92,9 +94,9 @@ impl<'cfg> PackageRegistry<'cfg> {
})
}
pub fn get(self, package_ids: &[PackageId]) -> PackageSet<'cfg> {
pub fn get(self, package_ids: &[PackageId]) -> CargoResult<PackageSet<'cfg>> {
trace!("getting packages; sources={}", self.sources.len());
PackageSet::new(package_ids, self.sources)
PackageSet::new(package_ids, self.sources, self.config)
}
fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> {
@ -354,7 +356,7 @@ To change the dependency graph via an override it's recommended to use the
`[replace]` feature of Cargo instead of the path override feature. This is
documented online at the url below for more information.
http://doc.crates.io/specifying-dependencies.html#overriding-dependencies
https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#overriding-dependencies
";
for dep in override_summary.dependencies() {

View file

@ -1,8 +1,8 @@
use std::collections::{HashMap, HashSet};
use core::{Dependency, PackageId};
use core::resolver::Context;
use super::types::ConflictReason;
use core::resolver::Context;
use core::{Dependency, PackageId};
pub(super) struct ConflictCache {
// `con_from_dep` is a cache of the reasons for each time we
@ -77,11 +77,17 @@ impl ConflictCache {
/// `dep` is known to be unresolvable if
/// all the `PackageId` entries are activated
pub fn insert(&mut self, dep: &Dependency, con: &HashMap<PackageId, ConflictReason>) {
let past = self.con_from_dep
let past = self
.con_from_dep
.entry(dep.clone())
.or_insert_with(Vec::new);
if !past.contains(con) {
trace!("{} adding a skip {:?}", dep.package_name(), con);
trace!(
"{} = \"{}\" adding a skip {:?}",
dep.package_name(),
dep.version_req(),
con
);
past.push(con.clone());
for c in con.keys() {
self.dep_from_pid

View file

@ -317,7 +317,12 @@ fn activate_deps_loop(
// It's our job here to backtrack, if possible, and find a
// different candidate to activate. If we can't find any
// candidates whatsoever then it's time to bail entirely.
trace!("{}[{}]>{} -- no candidates", parent.name(), cur, dep.package_name());
trace!(
"{}[{}]>{} -- no candidates",
parent.name(),
cur,
dep.package_name()
);
// Use our list of `conflicting_activations` to add to our
// global list of past conflicting activations, effectively
@ -337,7 +342,12 @@ fn activate_deps_loop(
past_conflicting_activations.insert(&dep, &conflicting_activations);
}
match find_candidate(&mut backtrack_stack, &parent, &conflicting_activations) {
match find_candidate(
&mut backtrack_stack,
&parent,
backtracked,
&conflicting_activations,
) {
Some((candidate, has_another, frame)) => {
// Reset all of our local variables used with the
// contents of `frame` to complete our backtrack.
@ -444,8 +454,7 @@ fn activate_deps_loop(
.clone()
.filter_map(|(_, (ref new_dep, _, _))| {
past_conflicting_activations.conflicting(&cx, new_dep)
})
.next()
}).next()
{
// If one of our deps is known unresolvable
// then we will not succeed.
@ -479,18 +488,14 @@ fn activate_deps_loop(
.iter()
.flat_map(|other| other.flatten())
// for deps related to us
.filter(|&(_, ref other_dep)|
known_related_bad_deps.contains(other_dep))
.filter_map(|(other_parent, other_dep)| {
.filter(|&(_, ref other_dep)| {
known_related_bad_deps.contains(other_dep)
}).filter_map(|(other_parent, other_dep)| {
past_conflicting_activations
.find_conflicting(
&cx,
&other_dep,
|con| con.contains_key(&pid)
)
.map(|con| (other_parent, con))
})
.next()
.find_conflicting(&cx, &other_dep, |con| {
con.contains_key(&pid)
}).map(|con| (other_parent, con))
}).next()
{
let rel = conflict.get(&pid).unwrap().clone();
@ -530,6 +535,7 @@ fn activate_deps_loop(
find_candidate(
&mut backtrack_stack.clone(),
&parent,
backtracked,
&conflicting_activations,
).is_none()
}
@ -821,6 +827,7 @@ fn compatible(a: &semver::Version, b: &semver::Version) -> bool {
fn find_candidate(
backtrack_stack: &mut Vec<BacktrackFrame>,
parent: &Summary,
backtracked: bool,
conflicting_activations: &HashMap<PackageId, ConflictReason>,
) -> Option<(Candidate, bool, BacktrackFrame)> {
while let Some(mut frame) = backtrack_stack.pop() {
@ -842,11 +849,20 @@ fn find_candidate(
// active in this back up we know that we're guaranteed to not actually
// make any progress. As a result if we hit this condition we can
// completely skip this backtrack frame and move on to the next.
if frame
.context_backup
.is_conflicting(Some(parent.package_id()), conflicting_activations)
{
continue;
if !backtracked {
if frame
.context_backup
.is_conflicting(Some(parent.package_id()), conflicting_activations)
{
trace!(
"{} = \"{}\" skip as not solving {}: {:?}",
frame.dep.package_name(),
frame.dep.version_req(),
parent.package_id(),
conflicting_activations
);
continue;
}
}
return Some((candidate, has_another, frame));

View file

@ -227,7 +227,7 @@ unable to verify that `{0}` is the same as when the lockfile was generated
let crate_name = to_target.crate_name();
let mut names = deps.iter()
.map(|d| d.rename().map(|s| s.as_str()).unwrap_or(&crate_name));
.map(|d| d.explicit_name_in_toml().map(|s| s.as_str()).unwrap_or(&crate_name));
let name = names.next().unwrap_or(&crate_name);
for n in names {
if n == name {

View file

@ -359,18 +359,51 @@ mod imp {
extern crate winapi;
use std::mem;
use std::ptr;
use self::winapi::um::fileapi::*;
use self::winapi::um::handleapi::*;
use self::winapi::um::processenv::*;
use self::winapi::um::winbase::*;
use self::winapi::um::wincon::*;
use self::winapi::um::winnt::*;
pub fn stderr_width() -> Option<usize> {
unsafe {
let stdout = GetStdHandle(STD_ERROR_HANDLE);
let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
if GetConsoleScreenBufferInfo(stdout, &mut csbi) == 0 {
if GetConsoleScreenBufferInfo(stdout, &mut csbi) != 0 {
return Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize)
}
// On mintty/msys/cygwin based terminals, the above fails with
// INVALID_HANDLE_VALUE. Use an alternate method which works
// in that case as well.
let h = CreateFileA("CONOUT$\0".as_ptr() as *const CHAR,
GENERIC_READ | GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE,
ptr::null_mut(),
OPEN_EXISTING,
0,
ptr::null_mut()
);
if h == INVALID_HANDLE_VALUE {
return None;
}
Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize)
let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed();
let rc = GetConsoleScreenBufferInfo(h, &mut csbi);
CloseHandle(h);
if rc != 0 {
let width = (csbi.srWindow.Right - csbi.srWindow.Left) as usize;
// Some terminals, such as mintty, always return 79 instead of
// the actual width. In that case, use a conservative value.
if width == 79 {
return Some(60);
} else {
return Some(width);
}
}
return None;
}
}
}

View file

@ -49,7 +49,10 @@ pub trait Source {
/// The download method fetches the full package for each name and
/// version specified.
fn download(&mut self, package: &PackageId) -> CargoResult<Package>;
fn download(&mut self, package: &PackageId) -> CargoResult<MaybePackage>;
fn finish_download(&mut self, package: &PackageId, contents: Vec<u8>)
-> CargoResult<Package>;
/// Generates a unique string which represents the fingerprint of the
/// current state of the source.
@ -74,6 +77,14 @@ pub trait Source {
}
}
pub enum MaybePackage {
Ready(Package),
Download {
url: String,
descriptor: String,
}
}
impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
/// Forwards to `Source::supports_checksums`
fn supports_checksums(&self) -> bool {
@ -111,10 +122,14 @@ impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
}
/// Forwards to `Source::download`
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
(**self).download(id)
}
fn finish_download(&mut self, id: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
(**self).finish_download(id, data)
}
/// Forwards to `Source::fingerprint`
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
(**self).fingerprint(pkg)
@ -126,6 +141,52 @@ impl<'a, T: Source + ?Sized + 'a> Source for Box<T> {
}
}
impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T {
fn supports_checksums(&self) -> bool {
(**self).supports_checksums()
}
fn requires_precise(&self) -> bool {
(**self).requires_precise()
}
fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
(**self).query(dep, f)
}
fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> {
(**self).fuzzy_query(dep, f)
}
fn source_id(&self) -> &SourceId {
(**self).source_id()
}
fn replaced_source_id(&self) -> &SourceId {
(**self).replaced_source_id()
}
fn update(&mut self) -> CargoResult<()> {
(**self).update()
}
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
(**self).download(id)
}
fn finish_download(&mut self, id: &PackageId, data: Vec<u8>) -> CargoResult<Package> {
(**self).finish_download(id, data)
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
(**self).fingerprint(pkg)
}
fn verify(&self, pkg: &PackageId) -> CargoResult<()> {
(**self).verify(pkg)
}
}
/// A `HashMap` of `SourceId` -> `Box<Source>`
#[derive(Default)]
pub struct SourceMap<'src> {

View file

@ -210,7 +210,11 @@ impl SourceId {
}
pub fn display_registry(&self) -> String {
format!("registry `{}`", self.url())
if self.is_default_registry() {
"crates.io index".to_string()
} else {
format!("`{}` index", url_display(self.url()))
}
}
/// Is this source from a filesystem path
@ -363,6 +367,18 @@ impl<'de> de::Deserialize<'de> for SourceId {
}
}
fn url_display(url: &Url) -> String {
if url.scheme() == "file" {
if let Ok(path) = url.to_file_path() {
if let Some(path_str) = path.to_str() {
return path_str.to_string();
}
}
}
url.as_str().to_string()
}
impl fmt::Display for SourceId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self.inner {
@ -370,13 +386,15 @@ impl fmt::Display for SourceId {
kind: Kind::Path,
ref url,
..
} => fmt::Display::fmt(url, f),
} => write!(f, "{}", url_display(url)),
SourceIdInner {
kind: Kind::Git(ref reference),
ref url,
ref precise,
..
} => {
// Don't replace the URL display for git references,
// because those are kind of expected to be URLs.
write!(f, "{}", url)?;
if let Some(pretty) = reference.pretty_ref() {
write!(f, "?{}", pretty)?;
@ -397,12 +415,12 @@ impl fmt::Display for SourceId {
kind: Kind::LocalRegistry,
ref url,
..
} => write!(f, "registry `{}`", url),
} => write!(f, "registry `{}`", url_display(url)),
SourceIdInner {
kind: Kind::Directory,
ref url,
..
} => write!(f, "dir {}", url),
} => write!(f, "dir {}", url_display(url)),
}
}
}

View file

@ -15,6 +15,7 @@
#![cfg_attr(feature = "cargo-clippy", allow(wrong_self_convention))] // perhaps Rc should be special cased in Clippy?
extern crate atty;
extern crate bytesize;
extern crate clap;
#[cfg(target_os = "macos")]
extern crate core_foundation;

View file

@ -1,3 +1,4 @@
use std::collections::HashMap;
use std::fs;
use std::path::Path;
@ -51,7 +52,7 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
for spec in opts.spec.iter() {
// Translate the spec to a Package
let pkgid = resolve.query(spec)?;
let pkg = packages.get(pkgid)?;
let pkg = packages.get_one(pkgid)?;
// Generate all relevant `Unit` targets for this package
for target in pkg.targets() {
@ -97,7 +98,7 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
opts.config,
&build_config,
profiles,
None,
HashMap::new(),
)?;
let mut cx = Context::new(config, &bcx)?;
cx.prepare_units(None, &units)?;

View file

@ -53,11 +53,13 @@ pub struct CompileOptions<'a> {
/// Filter to apply to the root package to select which targets will be
/// built.
pub filter: CompileFilter,
/// Extra arguments to be passed to rustdoc (for main crate and dependencies)
/// Extra arguments to be passed to rustdoc (single target only)
pub target_rustdoc_args: Option<Vec<String>>,
/// The specified target will be compiled with all the available arguments,
/// note that this only accounts for the *final* invocation of rustc
pub target_rustc_args: Option<Vec<String>>,
/// Extra arguments passed to all selected targets for rustdoc.
pub local_rustdoc_args: Option<Vec<String>>,
/// The directory to copy final artifacts to. Note that even if `out_dir` is
/// set, a copy of artifacts still could be found a `target/(debug\release)`
/// as usual.
@ -80,6 +82,7 @@ impl<'a> CompileOptions<'a> {
},
target_rustdoc_args: None,
target_rustc_args: None,
local_rustdoc_args: None,
export_dir: None,
})
}
@ -219,6 +222,7 @@ pub fn compile_ws<'a>(
ref filter,
ref target_rustdoc_args,
ref target_rustc_args,
ref local_rustdoc_args,
ref export_dir,
} = *options;
@ -239,15 +243,19 @@ pub fn compile_ws<'a>(
let resolve = ops::resolve_ws_with_method(ws, source, method, &specs)?;
let (packages, resolve_with_overrides) = resolve;
let to_builds = specs
.iter()
.map(|p| {
let pkgid = p.query(resolve_with_overrides.iter())?;
let p = packages.get(pkgid)?;
p.manifest().print_teapot(ws.config());
Ok(p)
})
let to_build_ids = specs.iter()
.map(|s| s.query(resolve_with_overrides.iter()))
.collect::<CargoResult<Vec<_>>>()?;
let mut to_builds = packages.get_many(to_build_ids)?;
// The ordering here affects some error messages coming out of cargo, so
// let's be test and CLI friendly by always printing in the same order if
// there's an error.
to_builds.sort_by_key(|p| p.package_id());
for pkg in to_builds.iter() {
pkg.manifest().print_teapot(ws.config());
}
let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
(&Some(ref args), _) => (Some(args.clone()), "rustc"),
@ -265,8 +273,6 @@ pub fn compile_ws<'a>(
let profiles = ws.profiles();
profiles.validate_packages(&mut config.shell(), &packages)?;
let mut extra_compiler_args = None;
let units = generate_targets(
ws,
profiles,
@ -277,6 +283,7 @@ pub fn compile_ws<'a>(
build_config,
)?;
let mut extra_compiler_args = HashMap::new();
if let Some(args) = extra_args {
if units.len() != 1 {
bail!(
@ -286,7 +293,14 @@ pub fn compile_ws<'a>(
extra_args_name
);
}
extra_compiler_args = Some((units[0], args));
extra_compiler_args.insert(units[0], args);
}
if let Some(args) = local_rustdoc_args {
for unit in &units {
if unit.mode.is_doc() {
extra_compiler_args.insert(*unit, args.clone());
}
}
}
let ret = {
@ -434,6 +448,22 @@ impl CompileFilter {
}
}
/// A proposed target.
///
/// Proposed targets are later filtered into actual Units based on whether or
/// not the target requires its features to be present.
#[derive(Debug)]
struct Proposal<'a> {
pkg: &'a Package,
target: &'a Target,
/// Indicates whether or not all required features *must* be present. If
/// false, and the features are not available, then it will be silently
/// skipped. Generally, targets specified by name (`--bin foo`) are
/// required, all others can be silently skipped if features are missing.
requires_features: bool,
mode: CompileMode,
}
/// Generates all the base targets for the packages the user has requested to
/// compile. Dependencies for these targets are computed later in
/// `unit_dependencies`.
@ -521,13 +551,8 @@ fn generate_targets<'a>(
}
};
// Create a list of proposed targets. The `bool` value indicates
// whether or not all required features *must* be present. If false,
// and the features are not available, then it will be silently
// skipped. Generally, targets specified by name (`--bin foo`) are
// required, all others can be silently skipped if features are
// missing.
let mut proposals: Vec<(&Package, &Target, bool, CompileMode)> = Vec::new();
// Create a list of proposed targets.
let mut proposals: Vec<Proposal> = Vec::new();
match *filter {
CompileFilter::Default {
@ -535,22 +560,24 @@ fn generate_targets<'a>(
} => {
for pkg in packages {
let default = filter_default_targets(pkg.targets(), build_config.mode);
proposals.extend(default.into_iter().map(|target| {
(
*pkg,
target,
!required_features_filterable,
build_config.mode,
)
proposals.extend(default.into_iter().map(|target| Proposal {
pkg,
target,
requires_features: !required_features_filterable,
mode: build_config.mode,
}));
if build_config.mode == CompileMode::Test {
// Include doctest for lib.
if let Some(t) = pkg
.targets()
.iter()
.find(|t| t.is_lib() && t.doctested() && t.doctestable())
{
proposals.push((pkg, t, false, CompileMode::Doctest));
proposals.push(Proposal {
pkg,
target: t,
requires_features: false,
mode: CompileMode::Doctest,
});
}
}
}
@ -576,7 +603,12 @@ fn generate_targets<'a>(
pkg.name()
))?;
} else {
libs.push((*pkg, target, false, build_config.mode));
libs.push(Proposal {
pkg,
target,
requires_features: false,
mode: build_config.mode,
});
}
}
}
@ -590,6 +622,7 @@ fn generate_targets<'a>(
}
proposals.extend(libs);
}
// If --tests was specified, add all targets that would be
// generated by `cargo test`.
let test_filter = match *tests {
@ -647,8 +680,8 @@ fn generate_targets<'a>(
// Only include targets that are libraries or have all required
// features available.
let mut features_map = HashMap::new();
let mut units = Vec::new();
for (pkg, target, required, mode) in proposals {
let mut units = HashSet::new();
for Proposal { pkg, target, requires_features, mode} in proposals {
let unavailable_features = match target.required_features() {
Some(rf) => {
let features = features_map
@ -660,8 +693,8 @@ fn generate_targets<'a>(
};
if target.is_lib() || unavailable_features.is_empty() {
let unit = new_unit(pkg, target, mode);
units.push(unit);
} else if required {
units.insert(unit);
} else if requires_features {
let required_features = target.required_features().unwrap();
let quoted_required_features: Vec<String> = required_features
.iter()
@ -678,7 +711,7 @@ fn generate_targets<'a>(
}
// else, silently skip target.
}
Ok(units)
Ok(units.into_iter().collect())
}
fn resolve_all_features(
@ -736,14 +769,19 @@ fn list_rule_targets<'a>(
target_desc: &'static str,
is_expected_kind: fn(&Target) -> bool,
mode: CompileMode,
) -> CargoResult<Vec<(&'a Package, &'a Target, bool, CompileMode)>> {
) -> CargoResult<Vec<Proposal<'a>>> {
let mut result = Vec::new();
match *rule {
FilterRule::All => {
for pkg in packages {
for target in pkg.targets() {
if is_expected_kind(target) {
result.push((*pkg, target, false, mode));
result.push(Proposal {
pkg,
target,
requires_features: false,
mode,
});
}
}
}
@ -770,12 +808,17 @@ fn find_named_targets<'a>(
target_desc: &'static str,
is_expected_kind: fn(&Target) -> bool,
mode: CompileMode,
) -> CargoResult<Vec<(&'a Package, &'a Target, bool, CompileMode)>> {
) -> CargoResult<Vec<Proposal<'a>>> {
let mut result = Vec::new();
for pkg in packages {
for target in pkg.targets() {
if target.name() == target_name && is_expected_kind(target) {
result.push((*pkg, target, true, mode));
result.push(Proposal {
pkg,
target,
requires_features: true,
mode,
});
}
}
}

View file

@ -31,13 +31,10 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> {
)?;
let (packages, resolve_with_overrides) = resolve;
let pkgs = specs
.iter()
.map(|p| {
let pkgid = p.query(resolve_with_overrides.iter())?;
packages.get(pkgid)
})
let ids = specs.iter()
.map(|s| s.query(resolve_with_overrides.iter()))
.collect::<CargoResult<Vec<_>>>()?;
let pkgs = packages.get_many(ids)?;
let mut lib_names = HashMap::new();
let mut bin_names = HashMap::new();

View file

@ -27,13 +27,14 @@ pub fn fetch<'a>(
{
let mut fetched_packages = HashSet::new();
let mut deps_to_fetch = ws.members().map(|p| p.package_id()).collect::<Vec<_>>();
let mut to_download = Vec::new();
while let Some(id) = deps_to_fetch.pop() {
if !fetched_packages.insert(id) {
continue;
}
packages.get(id)?;
to_download.push(id.clone());
let deps = resolve.deps(id)
.filter(|&(_id, deps)| {
deps.iter()
@ -57,6 +58,7 @@ pub fn fetch<'a>(
.map(|(id, _deps)| id);
deps_to_fetch.extend(deps);
}
packages.get_many(&to_download)?;
}
Ok((resolve, packages))

View file

@ -12,6 +12,8 @@ use toml;
use core::{Dependency, Edition, Package, PackageIdSpec, Source, SourceId};
use core::{PackageId, Workspace};
use core::source::SourceMap;
use core::package::PackageSet;
use core::compiler::{DefaultExecutor, Executor};
use ops::{self, CompileFilter};
use sources::{GitSource, PathSource, SourceConfigMap};
@ -499,22 +501,28 @@ where
source.source_id(),
)?;
let deps = source.query_vec(&dep)?;
match deps.iter().map(|p| p.package_id()).max() {
Some(pkgid) => {
let pkg = source.download(pkgid)?;
Ok((pkg, Box::new(source)))
}
let pkgid = match deps.iter().map(|p| p.package_id()).max() {
Some(pkgid) => pkgid,
None => {
let vers_info = vers.map(|v| format!(" with version `{}`", v))
.unwrap_or_default();
Err(format_err!(
bail!(
"could not find `{}` in {}{}",
name,
source.source_id(),
vers_info
))
)
}
}
};
let pkg = {
let mut map = SourceMap::new();
map.insert(Box::new(&mut source));
PackageSet::new(&[pkgid.clone()], map, config)?
.get_one(&pkgid)?
.clone()
};
Ok((pkg, Box::new(source)))
}
None => {
let candidates = list_all(&mut source)?;

View file

@ -30,6 +30,7 @@ pub struct NewOptions {
/// Absolute path to the directory for the new project
pub path: PathBuf,
pub name: Option<String>,
pub edition: Option<String>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
@ -65,6 +66,7 @@ struct MkOptions<'a> {
name: &'a str,
source_files: Vec<SourceFileInformation>,
bin: bool,
edition: Option<&'a str>,
}
impl NewOptions {
@ -74,6 +76,7 @@ impl NewOptions {
lib: bool,
path: PathBuf,
name: Option<String>,
edition: Option<String>,
) -> CargoResult<NewOptions> {
let kind = match (bin, lib) {
(true, true) => bail!("can't specify both lib and binary outputs"),
@ -87,6 +90,7 @@ impl NewOptions {
kind,
path,
name,
edition,
};
Ok(opts)
}
@ -321,6 +325,7 @@ pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> {
name,
source_files: vec![plan_new_source_file(opts.kind.is_bin(), name.to_string())],
bin: opts.kind.is_bin(),
edition: opts.edition.as_ref().map(|s| &**s),
};
mk(config, &mkopts).chain_err(|| {
@ -397,6 +402,7 @@ pub fn init(opts: &NewOptions, config: &Config) -> CargoResult<()> {
name,
bin: src_paths_types.iter().any(|x| x.bin),
source_files: src_paths_types,
edition: opts.edition.as_ref().map(|s| &**s),
};
mk(config, &mkopts).chain_err(|| {
@ -530,11 +536,16 @@ path = {}
name = "{}"
version = "0.1.0"
authors = [{}]
edition = {}
[dependencies]
{}"#,
name,
toml::Value::String(author),
match opts.edition {
Some(edition) => toml::Value::String(edition.to_string()),
None => toml::Value::String("2018".to_string()),
},
cargotoml_path_specifier
).as_bytes(),
)?;

View file

@ -1,7 +1,9 @@
use std::collections::HashMap;
use serde::ser;
use core::resolver::Resolve;
use core::{Package, PackageId, Workspace, PackageSet};
use core::{Package, PackageId, Workspace};
use ops::{self, Packages};
use util::CargoResult;
@ -18,7 +20,7 @@ pub struct OutputMetadataOptions {
/// Loads the manifest, resolves the dependencies of the project to the concrete
/// used versions - considering overrides - and writes all dependencies in a JSON
/// format to stdout.
pub fn output_metadata<'a>(ws: &'a Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo<'a>> {
pub fn output_metadata(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
if opt.version != VERSION {
bail!(
"metadata version {} not supported, only {} is currently supported",
@ -33,7 +35,7 @@ pub fn output_metadata<'a>(ws: &'a Workspace, opt: &OutputMetadataOptions) -> Ca
}
}
fn metadata_no_deps<'a>(ws: &'a Workspace, _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo<'a>> {
fn metadata_no_deps(ws: &Workspace, _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
Ok(ExportInfo {
packages: ws.members().cloned().collect(),
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
@ -44,9 +46,9 @@ fn metadata_no_deps<'a>(ws: &'a Workspace, _opt: &OutputMetadataOptions) -> Carg
})
}
fn metadata_full<'a>(ws: &'a Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo<'a>> {
fn metadata_full(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
let specs = Packages::All.to_package_id_specs(ws)?;
let deps = ops::resolve_ws_precisely(
let (package_set, resolve) = ops::resolve_ws_precisely(
ws,
None,
&opt.features,
@ -54,18 +56,16 @@ fn metadata_full<'a>(ws: &'a Workspace, opt: &OutputMetadataOptions) -> CargoRes
opt.no_default_features,
&specs,
)?;
let (package_set, resolve) = deps;
let packages = package_set
.package_ids()
.map(|i| package_set.get(i).map(|p| p.clone()))
.collect::<CargoResult<Vec<_>>>()?;
let mut packages = HashMap::new();
for pkg in package_set.get_many(package_set.package_ids())? {
packages.insert(pkg.package_id().clone(), pkg.clone());
}
Ok(ExportInfo {
packages,
packages: packages.values().map(|p| (*p).clone()).collect(),
workspace_members: ws.members().map(|pkg| pkg.package_id().clone()).collect(),
resolve: Some(MetadataResolve {
resolve: (package_set, resolve),
resolve: (packages, resolve),
root: ws.current_opt().map(|pkg| pkg.package_id().clone()),
}),
target_directory: ws.target_dir().display().to_string(),
@ -75,10 +75,10 @@ fn metadata_full<'a>(ws: &'a Workspace, opt: &OutputMetadataOptions) -> CargoRes
}
#[derive(Serialize)]
pub struct ExportInfo<'a> {
pub struct ExportInfo {
packages: Vec<Package>,
workspace_members: Vec<PackageId>,
resolve: Option<MetadataResolve<'a>>,
resolve: Option<MetadataResolve>,
target_directory: String,
version: u32,
workspace_root: String,
@ -88,13 +88,13 @@ pub struct ExportInfo<'a> {
/// The one from lockfile does not fit because it uses a non-standard
/// format for `PackageId`s
#[derive(Serialize)]
struct MetadataResolve<'a> {
struct MetadataResolve {
#[serde(rename = "nodes", serialize_with = "serialize_resolve")]
resolve: (PackageSet<'a>, Resolve),
resolve: (HashMap<PackageId, Package>, Resolve),
root: Option<PackageId>,
}
fn serialize_resolve<S>((package_set, resolve): &(PackageSet, Resolve), s: S) -> Result<S::Ok, S::Error>
fn serialize_resolve<S>((packages, resolve): &(HashMap<PackageId, Package>, Resolve), s: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
@ -119,7 +119,7 @@ where
dependencies: resolve.deps(id).map(|(pkg, _deps)| pkg).collect(),
deps: resolve.deps(id)
.map(|(pkg, _deps)| {
let name = package_set.get(pkg).ok()
let name = packages.get(pkg)
.and_then(|pkg| pkg.targets().iter().find(|t| t.is_lib()))
.and_then(|lib_target| {
resolve.extern_crate_name(id, pkg, lib_target).ok()

View file

@ -444,6 +444,7 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult
},
target_rustdoc_args: None,
target_rustc_args: None,
local_rustdoc_args: None,
export_dir: None,
},
&exec,

View file

@ -10,7 +10,6 @@ pub struct TestOptions<'a> {
pub compile_opts: ops::CompileOptions<'a>,
pub no_run: bool,
pub no_fail_fast: bool,
pub only_doc: bool,
}
pub fn run_tests(
@ -23,27 +22,13 @@ pub fn run_tests(
if options.no_run {
return Ok(None);
}
let (test, mut errors) = if options.only_doc {
assert!(options.compile_opts.filter.is_specific());
run_doc_tests(options, test_args, &compilation)?
} else {
run_unit_tests(options, test_args, &compilation)?
};
let (test, mut errors) = run_unit_tests(options, test_args, &compilation)?;
// If we have an error and want to fail fast, return
if !errors.is_empty() && !options.no_fail_fast {
return Ok(Some(CargoTestError::new(test, errors)));
}
// If a specific test was requested or we're not running any tests at all,
// don't run any doc tests.
if options.compile_opts.filter.is_specific() {
match errors.len() {
0 => return Ok(None),
_ => return Ok(Some(CargoTestError::new(test, errors))),
}
}
let (doctest, docerrors) = run_doc_tests(options, test_args, &compilation)?;
let test = if docerrors.is_empty() { test } else { doctest };
errors.extend(docerrors);

View file

@ -49,6 +49,8 @@ pub fn fix(ws: &Workspace, opts: &mut FixOptions) -> CargoResult<()> {
));
let _started = lock_server.start()?;
opts.compile_opts.build_config.force_rebuild = true;
if opts.broken_code {
let key = BROKEN_CODE_ENV.to_string();
opts.compile_opts.build_config.extra_rustc_env.push((key, "1".to_string()));
@ -231,7 +233,6 @@ fn rustfix_crate(lock_addr: &str, rustc: &Path, filename: &Path, args: &FixArgs)
-> Result<FixedCrate, Error>
{
args.verify_not_preparing_for_enabled_edition()?;
args.warn_if_preparing_probably_inert()?;
// First up we want to make sure that each crate is only checked by one
// process at a time. If two invocations concurrently check a crate then
@ -594,38 +595,6 @@ impl FixArgs {
process::exit(1);
}
/// If we're preparing for an edition and we *don't* find the
/// `rust_2018_preview` feature, for example, in the entry point file then
/// it probably means that the edition isn't actually enabled, so we can't
/// actually fix anything.
///
/// If this is the case, issue a warning.
fn warn_if_preparing_probably_inert(&self) -> CargoResult<()> {
let edition = match self.prepare_for_edition_resolve() {
Some(s) => s,
None => return Ok(()),
};
let path = match &self.file {
Some(s) => s,
None => return Ok(()),
};
let contents = match fs::read_to_string(path) {
Ok(s) => s,
Err(_) => return Ok(())
};
let feature_name = format!("rust_{}_preview", edition);
if contents.contains(&feature_name) {
return Ok(())
}
Message::PreviewNotFound {
file: path.display().to_string(),
edition: edition.to_string(),
}.post()?;
Ok(())
}
fn prepare_for_edition_resolve(&self) -> Option<&str> {
match &self.prepare_for_edition {
PrepareFor::Edition(s) => Some(s),

View file

@ -177,6 +177,7 @@ fn transmit(
Kind::Development => "dev",
}.to_string(),
registry: dep_registry,
explicit_name_in_toml: dep.explicit_name_in_toml().map(|s| s.to_string()),
})
})
.collect::<CargoResult<Vec<NewCrateDependency>>>()?;
@ -367,8 +368,8 @@ pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<
// connect phase as well as a "low speed" timeout so if we don't receive
// many bytes in a large-ish period of time then we time out.
handle.connect_timeout(Duration::new(30, 0))?;
handle.low_speed_limit(10 /* bytes per second */)?;
handle.low_speed_time(Duration::new(30, 0))?;
handle.low_speed_limit(http_low_speed_limit(config)?)?;
if let Some(proxy) = http_proxy(config)? {
handle.proxy(&proxy)?;
}
@ -390,6 +391,14 @@ pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult<
Ok(())
}
/// Find an override from config for curl low-speed-limit option, otherwise use default value
fn http_low_speed_limit(config: &Config) -> CargoResult<u32> {
if let Some(s) = config.get::<Option<u32>>("http.low-speed-limit")? {
return Ok(s);
}
Ok(10)
}
/// Find an explicit HTTP proxy if one is available.
///
/// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified

View file

@ -16,7 +16,7 @@ use util::profile;
pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> {
let mut registry = PackageRegistry::new(ws.config())?;
let resolve = resolve_with_registry(ws, &mut registry, true)?;
let packages = get_resolved_packages(&resolve, registry);
let packages = get_resolved_packages(&resolve, registry)?;
Ok((packages, resolve))
}
@ -96,7 +96,7 @@ pub fn resolve_ws_with_method<'a>(
true,
)?;
let packages = get_resolved_packages(&resolved_with_overrides, registry);
let packages = get_resolved_packages(&resolved_with_overrides, registry)?;
Ok((packages, resolved_with_overrides))
}
@ -374,7 +374,7 @@ pub fn add_overrides<'a>(
pub fn get_resolved_packages<'a>(
resolve: &Resolve,
registry: PackageRegistry<'a>,
) -> PackageSet<'a> {
) -> CargoResult<PackageSet<'a>> {
let ids: Vec<PackageId> = resolve.iter().cloned().collect();
registry.get(&ids)
}

View file

@ -9,6 +9,7 @@ use hex;
use serde_json;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use core::source::MaybePackage;
use sources::PathSource;
use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt};
@ -150,14 +151,19 @@ impl<'cfg> Source for DirectorySource<'cfg> {
Ok(())
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
self.packages
.get(id)
.map(|p| &p.0)
.cloned()
.map(MaybePackage::Ready)
.ok_or_else(|| format_err!("failed to find package with id: {}", id))
}
fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
panic!("no downloads to do")
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
Ok(pkg.package_id().version().to_string())
}

View file

@ -2,7 +2,7 @@ use std::fmt::{self, Debug, Formatter};
use url::Url;
use core::source::{Source, SourceId};
use core::source::{Source, SourceId, MaybePackage};
use core::GitReference;
use core::{Dependency, Package, PackageId, Summary};
use util::Config;
@ -210,7 +210,7 @@ impl<'cfg> Source for GitSource<'cfg> {
self.path_source.as_mut().unwrap().update()
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
trace!(
"getting packages for package id `{}` from `{:?}`",
id,
@ -222,6 +222,10 @@ impl<'cfg> Source for GitSource<'cfg> {
.download(id)
}
fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
panic!("no download should have started")
}
fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> {
Ok(self.rev.as_ref().unwrap().to_string())
}

View file

@ -9,6 +9,7 @@ use ignore::Match;
use ignore::gitignore::GitignoreBuilder;
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use core::source::MaybePackage;
use ops;
use util::{self, internal, CargoResult};
use util::paths;
@ -540,14 +541,19 @@ impl<'cfg> Source for PathSource<'cfg> {
Ok(())
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
trace!("getting packages; id={}", id);
let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id);
pkg.cloned()
.map(MaybePackage::Ready)
.ok_or_else(|| internal(format!("failed to find {} in path source", id)))
}
fn finish_download(&mut self, _id: &PackageId, _data: Vec<u8>) -> CargoResult<Package> {
panic!("no download should have started")
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
let (max, max_path) = self.last_modified_file(pkg)?;
Ok(format!("{} ({})", max, max_path.display()))

View file

@ -4,10 +4,9 @@ use std::path::Path;
use core::PackageId;
use hex;
use sources::registry::{RegistryConfig, RegistryData};
use util::FileLock;
use sources::registry::{RegistryConfig, RegistryData, MaybeLock};
use util::paths;
use util::{Config, Filesystem, Sha256};
use util::{Config, Filesystem, Sha256, FileLock};
use util::errors::{CargoResult, CargoResultExt};
pub struct LocalRegistry<'cfg> {
@ -70,7 +69,7 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
Ok(())
}
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock> {
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<MaybeLock> {
let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version());
let mut crate_file = self.root.open_ro(&crate_file, self.config, "crate file")?;
@ -78,7 +77,7 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
// checksum below as it is in theory already verified.
let dst = format!("{}-{}", pkg.name(), pkg.version());
if self.src_path.join(dst).into_path_unlocked().exists() {
return Ok(crate_file);
return Ok(MaybeLock::Ready(crate_file));
}
self.config.shell().status("Unpacking", pkg)?;
@ -102,6 +101,12 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> {
crate_file.seek(SeekFrom::Start(0))?;
Ok(crate_file)
Ok(MaybeLock::Ready(crate_file))
}
fn finish_download(&mut self, _pkg: &PackageId, _checksum: &str, _data: &[u8])
-> CargoResult<FileLock>
{
panic!("this source doesn't download")
}
}

View file

@ -170,6 +170,7 @@ use serde_json;
use tar::Archive;
use core::dependency::{Dependency, Kind};
use core::source::MaybePackage;
use core::{Package, PackageId, Source, SourceId, Summary};
use sources::PathSource;
use util::errors::CargoResultExt;
@ -275,6 +276,7 @@ struct RegistryDependency<'a> {
target: Option<Cow<'a, str>>,
kind: Option<Cow<'a, str>>,
registry: Option<Cow<'a, str>>,
package: Option<Cow<'a, str>>,
}
impl<'a> RegistryDependency<'a> {
@ -289,6 +291,7 @@ impl<'a> RegistryDependency<'a> {
target,
kind,
registry,
package,
} = self;
let id = if let Some(registry) = registry {
@ -297,7 +300,15 @@ impl<'a> RegistryDependency<'a> {
default.clone()
};
let mut dep = Dependency::parse_no_deprecated(&name, Some(&req), &id)?;
let mut dep = Dependency::parse_no_deprecated(
package.as_ref().unwrap_or(&name),
Some(&req),
&id,
)?;
if package.is_some() {
dep.set_explicit_name_in_toml(&name);
}
let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") {
"dev" => Kind::Development,
"build" => Kind::Build,
@ -337,13 +348,20 @@ pub trait RegistryData {
) -> CargoResult<()>;
fn config(&mut self) -> CargoResult<Option<RegistryConfig>>;
fn update_index(&mut self) -> CargoResult<()>;
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock>;
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<MaybeLock>;
fn finish_download(&mut self, pkg: &PackageId, checksum: &str, data: &[u8])
-> CargoResult<FileLock>;
fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool {
true
}
}
pub enum MaybeLock {
Ready(FileLock),
Download { url: String, descriptor: String }
}
mod index;
mod local;
mod remote;
@ -452,6 +470,34 @@ impl<'cfg> RegistrySource<'cfg> {
index::RegistryIndex::new(&self.source_id, path, self.config, self.index_locked);
Ok(())
}
fn get_pkg(&mut self, package: &PackageId, path: FileLock) -> CargoResult<Package> {
let path = self
.unpack_package(package, &path)
.chain_err(|| internal(format!("failed to unpack package `{}`", package)))?;
let mut src = PathSource::new(&path, &self.source_id, self.config);
src.update()?;
let pkg = match src.download(package)? {
MaybePackage::Ready(pkg) => pkg,
MaybePackage::Download { .. } => unreachable!(),
};
// Unfortunately the index and the actual Cargo.toml in the index can
// differ due to historical Cargo bugs. To paper over these we trash the
// *summary* loaded from the Cargo.toml we just downloaded with the one
// we loaded from the index.
let summaries = self
.index
.summaries(package.name().as_str(), &mut *self.ops)?;
let summary = summaries
.iter()
.map(|s| &s.0)
.find(|s| s.package_id() == package)
.expect("summary not found");
let mut manifest = pkg.manifest().clone();
manifest.set_summary(summary.clone());
Ok(Package::new(manifest, pkg.manifest_path()))
}
}
impl<'cfg> Source for RegistrySource<'cfg> {
@ -516,31 +562,24 @@ impl<'cfg> Source for RegistrySource<'cfg> {
Ok(())
}
fn download(&mut self, package: &PackageId) -> CargoResult<Package> {
fn download(&mut self, package: &PackageId) -> CargoResult<MaybePackage> {
let hash = self.index.hash(package, &mut *self.ops)?;
let path = self.ops.download(package, &hash)?;
let path = self
.unpack_package(package, &path)
.chain_err(|| internal(format!("failed to unpack package `{}`", package)))?;
let mut src = PathSource::new(&path, &self.source_id, self.config);
src.update()?;
let pkg = src.download(package)?;
match self.ops.download(package, &hash)? {
MaybeLock::Ready(file) => {
self.get_pkg(package, file).map(MaybePackage::Ready)
}
MaybeLock::Download { url, descriptor } => {
Ok(MaybePackage::Download { url, descriptor })
}
}
}
// Unfortunately the index and the actual Cargo.toml in the index can
// differ due to historical Cargo bugs. To paper over these we trash the
// *summary* loaded from the Cargo.toml we just downloaded with the one
// we loaded from the index.
let summaries = self
.index
.summaries(package.name().as_str(), &mut *self.ops)?;
let summary = summaries
.iter()
.map(|s| &s.0)
.find(|s| s.package_id() == package)
.expect("summary not found");
let mut manifest = pkg.manifest().clone();
manifest.set_summary(summary.clone());
Ok(Package::new(manifest, pkg.manifest_path()))
fn finish_download(&mut self, package: &PackageId, data: Vec<u8>)
-> CargoResult<Package>
{
let hash = self.index.hash(package, &mut *self.ops)?;
let file = self.ops.finish_download(package, &hash, &data)?;
self.get_pkg(package, file)
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {

View file

@ -14,10 +14,10 @@ use lazycell::LazyCell;
use core::{PackageId, SourceId};
use sources::git;
use sources::registry::{RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE};
use util::network;
use sources::registry::MaybeLock;
use util::{FileLock, Filesystem};
use util::{Config, Progress, Sha256, ToUrl};
use util::errors::{CargoResult, CargoResultExt, HttpNot200};
use util::{Config, Sha256};
use util::errors::{CargoResult, CargoResultExt};
pub struct RemoteRegistry<'cfg> {
index_path: Filesystem,
@ -122,6 +122,10 @@ impl<'cfg> RemoteRegistry<'cfg> {
*self.tree.borrow_mut() = Some(tree);
Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap()))
}
fn filename(&self, pkg: &PackageId) -> String {
format!("{}-{}.crate", pkg.name(), pkg.version())
}
}
impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
@ -206,9 +210,8 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
Ok(())
}
fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult<FileLock> {
let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
let path = Path::new(&filename);
fn download(&mut self, pkg: &PackageId, _checksum: &str) -> CargoResult<MaybeLock> {
let filename = self.filename(pkg);
// Attempt to open an read-only copy first to avoid an exclusive write
// lock and also work with read-only filesystems. Note that we check the
@ -216,18 +219,12 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
//
// If this fails then we fall through to the exclusive path where we may
// have to redownload the file.
if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) {
if let Ok(dst) = self.cache_path.open_ro(&filename, self.config, &filename) {
let meta = dst.file().metadata()?;
if meta.len() > 0 {
return Ok(dst);
return Ok(MaybeLock::Ready(dst));
}
}
let mut dst = self.cache_path.open_rw(path, self.config, &filename)?;
let meta = dst.file().metadata()?;
if meta.len() > 0 {
return Ok(dst);
}
self.config.shell().status("Downloading", pkg)?;
let config = self.config()?.unwrap();
let mut url = config.dl.clone();
@ -235,56 +232,29 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap();
}
let url = url.replace(CRATE_TEMPLATE, &*pkg.name())
.replace(VERSION_TEMPLATE, &pkg.version().to_string())
.to_url()?;
.replace(VERSION_TEMPLATE, &pkg.version().to_string());
// TODO: don't download into memory, but ensure that if we ctrl-c a
// download we should resume either from the start or the middle
// on the next time
let url = url.to_string();
let mut handle = self.config.http()?.borrow_mut();
handle.get(true)?;
handle.url(&url)?;
handle.follow_location(true)?;
let mut state = Sha256::new();
let mut body = Vec::new();
network::with_retry(self.config, || {
state = Sha256::new();
body = Vec::new();
let mut pb = Progress::new("Fetch", self.config);
{
handle.progress(true)?;
let mut handle = handle.transfer();
handle.progress_function(|dl_total, dl_cur, _, _| {
pb.tick(dl_cur as usize, dl_total as usize).is_ok()
})?;
handle.write_function(|buf| {
state.update(buf);
body.extend_from_slice(buf);
Ok(buf.len())
})?;
handle.perform().chain_err(|| {
format!("failed to download from `{}`", url)
})?;
}
let code = handle.response_code()?;
if code != 200 && code != 0 {
let url = handle.effective_url()?.unwrap_or(&url);
Err(HttpNot200 {
code,
url: url.to_string(),
}.into())
} else {
Ok(())
}
})?;
Ok(MaybeLock::Download { url, descriptor: pkg.to_string() })
}
fn finish_download(&mut self, pkg: &PackageId, checksum: &str, data: &[u8])
-> CargoResult<FileLock>
{
// Verify what we just downloaded
let mut state = Sha256::new();
state.update(data);
if hex::encode(state.finish()) != checksum {
bail!("failed to verify the checksum of `{}`", pkg)
}
dst.write_all(&body)?;
let filename = self.filename(pkg);
let mut dst = self.cache_path.open_rw(&filename, self.config, &filename)?;
let meta = dst.file().metadata()?;
if meta.len() > 0 {
return Ok(dst);
}
dst.write_all(data)?;
dst.seek(SeekFrom::Start(0))?;
Ok(dst)
}

View file

@ -1,4 +1,5 @@
use core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use core::source::MaybePackage;
use util::errors::{CargoResult, CargoResultExt};
pub struct ReplacedSource<'cfg> {
@ -71,11 +72,26 @@ impl<'cfg> Source for ReplacedSource<'cfg> {
Ok(())
}
fn download(&mut self, id: &PackageId) -> CargoResult<Package> {
fn download(&mut self, id: &PackageId) -> CargoResult<MaybePackage> {
let id = id.with_source_id(&self.replace_with);
let pkg = self.inner
.download(&id)
.chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
Ok(match pkg {
MaybePackage::Ready(pkg) => {
MaybePackage::Ready(pkg.map_source(&self.replace_with, &self.to_replace))
}
other @ MaybePackage::Download { .. } => other,
})
}
fn finish_download(&mut self, id: &PackageId, data: Vec<u8>)
-> CargoResult<Package>
{
let id = id.with_source_id(&self.replace_with);
let pkg = self.inner
.finish_download(&id, data)
.chain_err(|| format!("failed to download replaced source {}", self.to_replace))?;
Ok(pkg.map_source(&self.replace_with, &self.to_replace))
}

View file

@ -60,6 +60,17 @@ impl fmt::Display for Cfg {
}
impl CfgExpr {
/// Utility function to check if the key, "cfg(..)" matches the `target_cfg`
pub fn matches_key(key: &str, target_cfg: &[Cfg]) -> bool {
if key.starts_with("cfg(") && key.ends_with(')') {
let cfg = &key[4..key.len() - 1 ];
CfgExpr::from_str(cfg).ok().map(|ce| ce.matches(target_cfg)).unwrap_or(false)
} else {
false
}
}
pub fn matches(&self, cfg: &[Cfg]) -> bool {
match *self {
CfgExpr::Not(ref e) => !e.matches(cfg),

View file

@ -40,10 +40,6 @@ pub enum Message {
file: String,
message: String,
},
PreviewNotFound {
file: String,
edition: String,
},
EditionAlreadyEnabled {
file: String,
edition: String,
@ -81,7 +77,6 @@ impl Message {
pub struct DiagnosticPrinter<'a> {
config: &'a Config,
preview_not_found: HashSet<String>,
edition_already_enabled: HashSet<String>,
idiom_mismatch: HashSet<String>,
}
@ -90,7 +85,6 @@ impl<'a> DiagnosticPrinter<'a> {
pub fn new(config: &'a Config) -> DiagnosticPrinter<'a> {
DiagnosticPrinter {
config,
preview_not_found: HashSet::new(),
edition_already_enabled: HashSet::new(),
idiom_mismatch: HashSet::new(),
}
@ -140,21 +134,6 @@ impl<'a> DiagnosticPrinter<'a> {
write!(self.config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?;
Ok(())
}
Message::PreviewNotFound { file, edition } => {
// By default we're fixing a lot of things concurrently, don't
// warn about the same file multiple times.
if !self.preview_not_found.insert(file.clone()) {
return Ok(())
}
self.config.shell().warn(&format!(
"failed to find `#![feature(rust_{}_preview)]` in `{}`\n\
this may cause `cargo fix` to not be able to fix all\n\
issues in preparation for the {0} edition",
edition,
file,
))?;
Ok(())
}
Message::EditionAlreadyEnabled { file, edition } => {
// Like above, only warn once per file
if !self.edition_already_enabled.insert(file.clone()) {

View file

@ -1,3 +1,5 @@
use std::time::Duration;
pub use self::cfg::{Cfg, CfgExpr};
pub use self::config::{homedir, Config, ConfigValue};
pub use self::dependency_queue::{DependencyQueue, Dirty, Fresh, Freshness};
@ -46,3 +48,13 @@ mod read2;
mod progress;
mod lockserver;
pub mod diagnostic_server;
pub fn elapsed(duration: Duration) -> String {
let secs = duration.as_secs();
if secs >= 60 {
format!("{}m {:02}s", secs / 60, secs % 60)
} else {
format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000)
}
}

View file

@ -6,6 +6,38 @@ use failure::Error;
use util::Config;
use util::errors::{CargoResult, HttpNot200};
pub struct Retry<'a> {
config: &'a Config,
remaining: u32,
}
impl<'a> Retry<'a> {
pub fn new(config: &'a Config) -> CargoResult<Retry<'a>> {
Ok(Retry {
config,
remaining: config.get::<Option<u32>>("net.retry")?.unwrap_or(2),
})
}
pub fn try<T>(&mut self, f: impl FnOnce() -> CargoResult<T>)
-> CargoResult<Option<T>>
{
match f() {
Err(ref e) if maybe_spurious(e) && self.remaining > 0 => {
let msg = format!(
"spurious network error ({} tries \
remaining): {}",
self.remaining, e
);
self.config.shell().warn(msg)?;
self.remaining -= 1;
Ok(None)
}
other => other.map(Some),
}
}
}
fn maybe_spurious(err: &Error) -> bool {
for e in err.iter_chain() {
if let Some(git_err) = e.downcast_ref::<git2::Error>() {
@ -48,21 +80,10 @@ pub fn with_retry<T, F>(config: &Config, mut callback: F) -> CargoResult<T>
where
F: FnMut() -> CargoResult<T>,
{
let mut remaining = config.get::<Option<u32>>("net.retry")?.unwrap_or(2);
let mut retry = Retry::new(config)?;
loop {
match callback() {
Ok(ret) => return Ok(ret),
Err(ref e) if maybe_spurious(e) && remaining > 0 => {
let msg = format!(
"spurious network error ({} tries \
remaining): {}",
remaining, e
);
config.shell().warn(msg)?;
remaining -= 1;
}
//todo impl from
Err(e) => return Err(e),
if let Some(ret) = retry.try(&mut callback)? {
return Ok(ret)
}
}
}

View file

@ -150,30 +150,23 @@ impl ProcessBuilder {
}
}
/// On unix, executes the process using the unix syscall `execvp`, which will block this
/// process, and will only return if there is an error. On windows this is a synonym for
/// `exec`.
#[cfg(unix)]
/// Replaces the current process with the target process.
///
/// On Unix, this executes the process using the unix syscall `execvp`, which will block
/// this process, and will only return if there is an error.
///
/// On Windows this isn't technically possible. Instead we emulate it to the best of our
/// ability. One aspect we fix here is that we specify a handler for the ctrl-c handler.
/// In doing so (and by effectively ignoring it) we should emulate proxying ctrl-c
/// handling to the application at hand, which will either terminate or handle it itself.
/// According to microsoft's documentation at:
/// https://docs.microsoft.com/en-us/windows/console/ctrl-c-and-ctrl-break-signals
/// the ctrl-c signal is sent to all processes attached to a terminal, which should
/// include our child process. If the child terminates then we'll reap them in Cargo
/// pretty quickly, and if the child handles the signal then we won't terminate
/// (and we shouldn't!) until the process itself later exits.
pub fn exec_replace(&self) -> CargoResult<()> {
use std::os::unix::process::CommandExt;
let mut command = self.build_command();
let error = command.exec();
Err(::util::CargoError::from(error)
.context(process_error(
&format!("could not execute process {}", self),
None,
None,
))
.into())
}
/// On unix, executes the process using the unix syscall `execvp`, which will block this
/// process, and will only return if there is an error. On windows this is a synonym for
/// `exec`.
#[cfg(windows)]
pub fn exec_replace(&self) -> CargoResult<()> {
self.exec()
imp::exec_replace(self)
}
/// Execute the process, returning the stdio output, or an error if non-zero exit status.
@ -324,3 +317,51 @@ pub fn process<T: AsRef<OsStr>>(cmd: T) -> ProcessBuilder {
jobserver: None,
}
}
#[cfg(unix)]
mod imp {
use CargoResult;
use std::os::unix::process::CommandExt;
use util::{process_error, ProcessBuilder};
pub fn exec_replace(process_builder: &ProcessBuilder) -> CargoResult<()> {
let mut command = process_builder.build_command();
let error = command.exec();
Err(::util::CargoError::from(error)
.context(process_error(
&format!("could not execute process {}", process_builder),
None,
None,
))
.into())
}
}
#[cfg(windows)]
mod imp {
extern crate winapi;
use CargoResult;
use util::{process_error, ProcessBuilder};
use self::winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE};
use self::winapi::um::consoleapi::SetConsoleCtrlHandler;
unsafe extern "system" fn ctrlc_handler(_: DWORD) -> BOOL {
// Do nothing. Let the child process handle it.
TRUE
}
pub fn exec_replace(process_builder: &ProcessBuilder) -> CargoResult<()> {
unsafe {
if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE {
return Err(process_error(
"Could not set Ctrl-C handler.",
None,
None).into());
}
}
// Just exec the process as normal.
process_builder.exec()
}
}

View file

@ -16,13 +16,17 @@ pub enum ProgressStyle {
Ratio,
}
struct Throttle {
first: bool,
last_update: Instant,
}
struct State<'cfg> {
config: &'cfg Config,
format: Format,
first: bool,
last_update: Instant,
name: String,
done: bool,
throttle: Throttle,
}
struct Format {
@ -50,10 +54,9 @@ impl<'cfg> Progress<'cfg> {
max_width: n,
max_print: 80,
},
first: true,
last_update: Instant::now(),
name: name.to_string(),
done: false,
throttle: Throttle::new(),
}),
}
}
@ -62,36 +65,19 @@ impl<'cfg> Progress<'cfg> {
self.state = None;
}
pub fn is_enabled(&self) -> bool {
self.state.is_some()
}
pub fn new(name: &str, cfg: &'cfg Config) -> Progress<'cfg> {
Self::with_style(name, ProgressStyle::Percentage, cfg)
}
pub fn tick(&mut self, cur: usize, max: usize) -> CargoResult<()> {
match self.state {
Some(ref mut s) => s.tick(cur, max, "", true),
None => Ok(()),
}
}
pub fn clear(&mut self) {
if let Some(ref mut s) = self.state {
s.clear();
}
}
pub fn tick_now(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
match self.state {
Some(ref mut s) => s.tick(cur, max, msg, false),
None => Ok(()),
}
}
}
impl<'cfg> State<'cfg> {
fn tick(&mut self, cur: usize, max: usize, msg: &str, throttle: bool) -> CargoResult<()> {
if self.done {
return Ok(());
}
let s = match &mut self.state {
Some(s) => s,
None => return Ok(()),
};
// Don't update too often as it can cause excessive performance loss
// just putting stuff onto the terminal. We also want to avoid
@ -105,36 +91,110 @@ impl<'cfg> State<'cfg> {
// 2. If we've drawn something, then we rate limit ourselves to only
// draw to the console every so often. Currently there's a 100ms
// delay between updates.
if throttle {
if self.first {
let delay = Duration::from_millis(500);
if self.last_update.elapsed() < delay {
return Ok(());
}
self.first = false;
} else {
let interval = Duration::from_millis(100);
if self.last_update.elapsed() < interval {
return Ok(());
}
}
self.last_update = Instant::now();
if !s.throttle.allowed() {
return Ok(())
}
if cur == max {
s.tick(cur, max, "")
}
pub fn tick_now(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
match self.state {
Some(ref mut s) => s.tick(cur, max, msg),
None => Ok(()),
}
}
pub fn update_allowed(&mut self) -> bool {
match &mut self.state {
Some(s) => s.throttle.allowed(),
None => false,
}
}
pub fn print_now(&mut self, msg: &str) -> CargoResult<()> {
match &mut self.state {
Some(s) => s.print("", msg),
None => Ok(()),
}
}
pub fn clear(&mut self) {
if let Some(ref mut s) = self.state {
s.clear();
}
}
}
impl Throttle {
fn new() -> Throttle {
Throttle {
first: true,
last_update: Instant::now(),
}
}
fn allowed(&mut self) -> bool {
if self.first {
let delay = Duration::from_millis(500);
if self.last_update.elapsed() < delay {
return false
}
} else {
let interval = Duration::from_millis(100);
if self.last_update.elapsed() < interval {
return false
}
}
self.update();
true
}
fn update(&mut self) {
self.first = false;
self.last_update = Instant::now();
}
}
impl<'cfg> State<'cfg> {
fn tick(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
if self.done {
return Ok(());
}
if max > 0 && cur == max {
self.done = true;
}
// Write out a pretty header, then the progress bar itself, and then
// return back to the beginning of the line for the next print.
self.try_update_max_width();
if let Some(string) = self.format.progress_status(cur, max, msg) {
self.config.shell().status_header(&self.name)?;
write!(self.config.shell().err(), "{}\r", string)?;
if let Some(pbar) = self.format.progress(cur, max) {
self.print(&pbar, msg)?;
}
Ok(())
}
fn print(&mut self, prefix: &str, msg: &str) -> CargoResult<()> {
self.throttle.update();
self.try_update_max_width();
// make sure we have enough room for the header
if self.format.max_width < 15 {
return Ok(())
}
self.config.shell().status_header(&self.name)?;
let mut line = prefix.to_string();
self.format.render(&mut line, msg);
while line.len() < self.format.max_width - 15 {
line.push(' ');
}
write!(self.config.shell().err(), "{}\r", line)?;
Ok(())
}
fn clear(&mut self) {
self.try_update_max_width();
let blank = " ".repeat(self.format.max_width);
@ -149,7 +209,7 @@ impl<'cfg> State<'cfg> {
}
impl Format {
fn progress_status(&self, cur: usize, max: usize, msg: &str) -> Option<String> {
fn progress(&self, cur: usize, max: usize) -> Option<String> {
// Render the percentage at the far right and then figure how long the
// progress bar is
let pct = (cur as f64) / (max as f64);
@ -188,26 +248,36 @@ impl Format {
string.push_str("]");
string.push_str(&stats);
let mut avail_msg_len = self.max_width - self.width();
Some(string)
}
fn render(&self, string: &mut String, msg: &str) {
let mut avail_msg_len = self.max_width - string.len() - 15;
let mut ellipsis_pos = 0;
if avail_msg_len > 3 {
for c in msg.chars() {
let display_width = c.width().unwrap_or(0);
if avail_msg_len >= display_width {
avail_msg_len -= display_width;
string.push(c);
if avail_msg_len >= 3 {
ellipsis_pos = string.len();
}
} else {
string.truncate(ellipsis_pos);
string.push_str("...");
break;
if avail_msg_len <= 3 {
return
}
for c in msg.chars() {
let display_width = c.width().unwrap_or(0);
if avail_msg_len >= display_width {
avail_msg_len -= display_width;
string.push(c);
if avail_msg_len >= 3 {
ellipsis_pos = string.len();
}
} else {
string.truncate(ellipsis_pos);
string.push_str("...");
break;
}
}
}
Some(string)
#[cfg(test)]
fn progress_status(&self, cur: usize, max: usize, msg: &str) -> Option<String> {
let mut ret = self.progress(cur, max)?;
self.render(&mut ret, msg);
Some(ret)
}
fn width(&self) -> usize {

View file

@ -4,6 +4,7 @@ use std::path::{Path, PathBuf};
use std::hash::{Hash, Hasher, SipHasher};
use std::collections::hash_map::{Entry, HashMap};
use std::sync::Mutex;
use std::process::Stdio;
use std::env;
use serde_json;
@ -66,20 +67,27 @@ impl Rustc {
/// Get a process builder set up to use the found rustc version, with a wrapper if Some
pub fn process(&self) -> ProcessBuilder {
if let Some(ref wrapper) = self.wrapper {
let mut cmd = util::process(wrapper);
{
match self.wrapper {
Some(ref wrapper) if !wrapper.as_os_str().is_empty() => {
let mut cmd = util::process(wrapper);
cmd.arg(&self.path);
cmd
}
cmd
} else {
util::process(&self.path)
_ => self.process_no_wrapper()
}
}
pub fn process_no_wrapper(&self) -> ProcessBuilder {
util::process(&self.path)
}
pub fn cached_output(&self, cmd: &ProcessBuilder) -> CargoResult<(String, String)> {
self.cache.lock().unwrap().cached_output(cmd)
}
pub fn cached_success(&self, cmd: &ProcessBuilder) -> CargoResult<bool> {
self.cache.lock().unwrap().cached_success(cmd)
}
}
/// It is a well known that `rustc` is not the fastest compiler in the world.
@ -101,6 +109,7 @@ struct Cache {
struct CacheData {
rustc_fingerprint: u64,
outputs: HashMap<u64, (String, String)>,
successes: HashMap<u64, bool>,
}
impl Cache {
@ -110,6 +119,7 @@ impl Cache {
let empty = CacheData {
rustc_fingerprint,
outputs: HashMap::new(),
successes: HashMap::new(),
};
let mut dirty = true;
let data = match read(&cache_location) {
@ -174,6 +184,28 @@ impl Cache {
}
}
}
fn cached_success(&mut self, cmd: &ProcessBuilder) -> CargoResult<bool> {
let key = process_fingerprint(cmd);
match self.data.successes.entry(key) {
Entry::Occupied(entry) => {
info!("rustc info cache hit");
Ok(*entry.get())
}
Entry::Vacant(entry) => {
info!("rustc info cache miss");
let success = cmd
.build_command()
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()?
.success();
entry.insert(success);
self.dirty = true;
Ok(success)
}
}
}
}
impl Drop for Cache {

View file

@ -810,6 +810,16 @@ impl TomlManifest {
bail!("package name cannot be an empty string")
}
for c in package_name.chars() {
if c.is_alphanumeric() {
continue;
}
if c == '_' || c == '-' {
continue;
}
bail!("Invalid character `{}` in package name: `{}`", c, package_name)
}
let pkgid = project.to_package_id(source_id)?;
let edition = if let Some(ref edition) = project.edition {
@ -1239,7 +1249,7 @@ impl TomlDependency {
impl DetailedTomlDependency {
fn to_dependency(
&self,
name: &str,
name_in_toml: &str,
cx: &mut Context,
kind: Option<Kind>,
) -> CargoResult<Dependency> {
@ -1249,7 +1259,7 @@ impl DetailedTomlDependency {
providing a local path, Git repository, or \
version to use. This will be considered an \
error in future versions",
name
name_in_toml
);
cx.warnings.push(msg);
}
@ -1266,7 +1276,7 @@ impl DetailedTomlDependency {
let msg = format!(
"key `{}` is ignored for dependency ({}). \
This will be considered an error in future versions",
key_name, name
key_name, name_in_toml
);
cx.warnings.push(msg)
}
@ -1290,12 +1300,12 @@ impl DetailedTomlDependency {
(Some(_), _, Some(_), _) | (Some(_), _, _, Some(_)) => bail!(
"dependency ({}) specification is ambiguous. \
Only one of `git` or `registry` is allowed.",
name
name_in_toml
),
(_, _, Some(_), Some(_)) => bail!(
"dependency ({}) specification is ambiguous. \
Only one of `registry` or `registry-index` is allowed.",
name
name_in_toml
),
(Some(git), maybe_path, _, _) => {
if maybe_path.is_some() {
@ -1303,7 +1313,7 @@ impl DetailedTomlDependency {
"dependency ({}) specification is ambiguous. \
Only one of `git` or `path` is allowed. \
This will be considered an error in future versions",
name
name_in_toml
);
cx.warnings.push(msg)
}
@ -1318,7 +1328,7 @@ impl DetailedTomlDependency {
"dependency ({}) specification is ambiguous. \
Only one of `branch`, `tag` or `rev` is allowed. \
This will be considered an error in future versions",
name
name_in_toml
);
cx.warnings.push(msg)
}
@ -1359,15 +1369,15 @@ impl DetailedTomlDependency {
(None, None, None, None) => SourceId::crates_io(cx.config)?,
};
let (pkg_name, rename) = match self.package {
Some(ref s) => (&s[..], Some(name)),
None => (name, None),
let (pkg_name, explicit_name_in_toml) = match self.package {
Some(ref s) => (&s[..], Some(name_in_toml)),
None => (name_in_toml, None),
};
let version = self.version.as_ref().map(|v| &v[..]);
let mut dep = match cx.pkgid {
Some(id) => Dependency::parse(pkg_name, version, &new_source_id, id, cx.config)?,
None => Dependency::parse_no_deprecated(name, version, &new_source_id)?,
None => Dependency::parse_no_deprecated(pkg_name, version, &new_source_id)?,
};
dep.set_features(self.features.iter().flat_map(|x| x))
.set_default_features(
@ -1381,9 +1391,9 @@ impl DetailedTomlDependency {
if let Some(kind) = kind {
dep.set_kind(kind);
}
if let Some(rename) = rename {
if let Some(name_in_toml) = explicit_name_in_toml {
cx.features.require(Feature::rename_dependency())?;
dep.set_rename(rename);
dep.set_explicit_name_in_toml(name_in_toml);
}
Ok(dep)
}

View file

@ -1,6 +1,6 @@
[package]
name = "crates-io"
version = "0.18.0"
version = "0.20.0"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/cargo"

View file

@ -68,7 +68,10 @@ pub struct NewCrateDependency {
pub version_req: String,
pub target: Option<String>,
pub kind: String,
#[serde(skip_serializing_if = "Option::is_none")] pub registry: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub registry: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub explicit_name_in_toml: Option<String>,
}
#[derive(Deserialize)]

View file

@ -46,7 +46,7 @@ their dependencies, compile them all, and update the `Cargo.lock`:
```console
$ cargo build
Updating registry `https://github.com/rust-lang/crates.io-index`
Updating crates.io index
Downloading memchr v0.1.5
Downloading libc v0.1.10
Downloading regex-syntax v0.2.1

View file

@ -83,6 +83,11 @@ rustflags = ["..", ".."]
# are concatenated. The `cfg` syntax only applies to rustflags, and not to
# linker.
rustflags = ["..", ".."]
# Similar for the $triple configuration, but using the `cfg` syntax.
# If one or more `cfg`s, and a $triple target are candidates, then the $triple
# will be used
# If several `cfg` are candidates, then the build will error
runner = ".."
# Configuration keys related to the registry
[registry]
@ -92,15 +97,17 @@ token = "..." # Access token (found on the central repos website)
[http]
proxy = "host:port" # HTTP proxy to use for HTTP requests (defaults to none)
# in libcurl format, e.g. "socks5h://host:port"
timeout = 60000 # Timeout for each HTTP request, in milliseconds
timeout = 30 # Timeout for each HTTP request, in seconds
cainfo = "cert.pem" # Path to Certificate Authority (CA) bundle (optional)
check-revoke = true # Indicates whether SSL certs are checked for revocation
low-speed-limit = 5 # Lower threshold for bytes/sec (10 = default, 0 = disabled)
multiplexing = false # whether or not to use HTTP/2 multiplexing where possible
[build]
jobs = 1 # number of parallel jobs, defaults to # of CPUs
rustc = "rustc" # the rust compiler tool
rustdoc = "rustdoc" # the doc generator tool
target = "triple" # build for the target triple
target = "triple" # build for the target triple (ignored by `cargo install`)
target-dir = "target" # path of where to place all generated artifacts
rustflags = ["..", ".."] # custom flags to pass to all compiler invocations
incremental = true # whether or not to enable incremental compilation

View file

@ -26,11 +26,29 @@ Versioning](http://semver.org/), so make sure you follow some basic rules:
structs or variants to enums.
* After 1.0.0, only make breaking changes when you increment the major version.
Dont break the build.
* After 1.0.0, dont add any new public API (no new `pub` anything) in tiny
* After 1.0.0, dont add any new public API (no new `pub` anything) in patch-level
versions. Always increment the minor version if you add any new `pub` structs,
traits, fields, types, functions, methods or anything else.
* Use version numbers with three numeric parts such as 1.0.0 rather than 1.0.
#### The `edition` field (optional)
You can opt in to a specific Rust Edition for your package with the
`edition` key in `Cargo.toml`. If you don't specify the edition, it will
default to 2015.
```toml
[package]
# ...
edition = '2018'
```
The `edition` key affects which edition your package is compiled with. Cargo
will always generate projects via `cargo new` with the `edition` key set to the
latest edition. Setting the `edition` key in `[package]` will affect all
targets/crates in the package, including test suites, benchmarks, binaries,
examples, etc.
#### The `build` field (optional)
This field specifies a file in the project root which is a [build script][1] for
@ -714,6 +732,12 @@ proc-macro = false
# stops it from generating a test harness. This is useful when the binary being
# built manages the test runner itself.
harness = true
# If set then a target can be configured to use a different edition than the
# `[package]` is configured to use, perhaps only compiling a library with the
# 2018 edition or only compiling one unit test with the 2015 edition. By default
# all targets are compiled with the edition specified in `[package]`.
edition = '2015'
```
The `[package]` also includes the optional `autobins`, `autoexamples`,

View file

@ -250,13 +250,13 @@ In any case, typically all you need to do now is:
```console
$ cargo build
Compiling uuid v1.0.0 (file://.../uuid)
Compiling my-library v0.1.0 (file://.../my-library)
Compiling uuid v1.0.0 (.../uuid)
Compiling my-library v0.1.0 (.../my-library)
Finished dev [unoptimized + debuginfo] target(s) in 0.32 secs
```
And that's it! You're now building with the local version of `uuid` (note the
`file://` in the build output). If you don't see the `file://` version getting
path in parentheses in the build output). If you don't see the local path version getting
built then you may need to run `cargo update -p uuid --precise $version` where
`$version` is the version of the locally checked out copy of `uuid`.
@ -376,7 +376,7 @@ my-library = { git = 'https://example.com/git/my-library' }
uuid = "1.0"
[patch.crates-io]
uuid = { git = 'https://github.com/rust-lang-nursery/uuid', version = '2.0.0' }
uuid = { git = 'https://github.com/rust-lang-nursery/uuid', branch = '2.0.0' }
```
Note that this will actually resolve to two versions of the `uuid` crate. The

View file

@ -199,30 +199,6 @@ cargo +nightly build --out-dir=out -Z unstable-options
```
### Edition
* Tracking Issue: [rust-lang/rust#44581](https://github.com/rust-lang/rust/issues/44581)
* RFC: [#2052](https://github.com/rust-lang/rfcs/blob/master/text/2052-epochs.md)
You can opt in to a specific Rust Edition for your package with the `edition`
key in `Cargo.toml`. If you don't specify the edition, it will default to
2015. You need to include the appropriate `cargo-features`.
You can also specify `edition` on a per-target level, where it will otherwise
default to the package `edition`.
```toml
cargo-features = ["edition"]
[package]
...
edition = "2018"
[[bin]]
...
edition = "2015"
```
### Profile Overrides
* Tracking Issue: [rust-lang/rust#48683](https://github.com/rust-lang/rust/issues/48683)
* RFC: [#2282](https://github.com/rust-lang/rfcs/blob/master/text/2282-profile-dependencies.md)
@ -318,22 +294,6 @@ Example:
cargo +nightly build --build-plan -Z unstable-options
```
### Compile progress
* Tracking Issue: [rust-lang/cargo#2536](https://github.com/rust-lang/cargo/issues/2536)
The `-Z compile-progress` flag enables a progress bar while compiling.
```console
$ cargo +nightly build -Z compile-progress
Compiling libc v0.2.41
Compiling void v1.0.2
Compiling lazy_static v1.0.1
Compiling regex v1.0.0
Compiling ucd-util v0.1.1
Compiling utf8-ranges v1.0.0
Building [=======> ] 2/14: libc, regex, uc...
```
### default-run
* Original issue: [#2200](https://github.com/rust-lang/cargo/issues/2200)

View file

@ -125,7 +125,7 @@ __cargo_commands=$(cargo --list 2>/dev/null | tail -n +2)
_locate_manifest(){
local manifest=`cargo locate-project 2>/dev/null`
# regexp-replace manifest '\{"root":"|"\}' ''
echo ${manifest:9:-2}
echo ${manifest:9:${#manifest}-11}
}
# Extracts the values of "name" from the array given in $1 and shows them as
@ -238,6 +238,8 @@ _toolchains(){
result+=("+${BASH_REMATCH[1]}-${BASH_REMATCH[3]}")
fi
result+=("+$line")
else
result+=("+$line")
fi
done <<< "$toolchains"
echo "${result[@]}"

View file

@ -56,13 +56,14 @@ fn depend_on_alt_registry() {
.masquerade_as_nightly_cargo()
.with_stderr(&format!(
"\
[UPDATING] registry `{reg}`
[DOWNLOADING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 (CWD)
[UPDATING] `{reg}` index
[DOWNLOADING] crates ...
[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
",
reg = registry::alt_registry()
reg = registry::alt_registry_path().to_str().unwrap()
)).run();
p.cargo("clean").masquerade_as_nightly_cargo().run();
@ -72,8 +73,8 @@ fn depend_on_alt_registry() {
.masquerade_as_nightly_cargo()
.with_stderr(
"\
[COMPILING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
",
).run();
@ -109,15 +110,16 @@ fn depend_on_alt_registry_depends_on_same_registry_no_index() {
.masquerade_as_nightly_cargo()
.with_stderr(&format!(
"\
[UPDATING] registry `{reg}`
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[COMPILING] baz v0.0.1 (registry `file://[..]`)
[COMPILING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 (CWD)
[UPDATING] `{reg}` index
[DOWNLOADING] crates ...
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[COMPILING] baz v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
",
reg = registry::alt_registry()
reg = registry::alt_registry_path().to_str().unwrap()
)).run();
}
@ -151,15 +153,16 @@ fn depend_on_alt_registry_depends_on_same_registry() {
.masquerade_as_nightly_cargo()
.with_stderr(&format!(
"\
[UPDATING] registry `{reg}`
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[COMPILING] baz v0.0.1 (registry `file://[..]`)
[COMPILING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 (CWD)
[UPDATING] `{reg}` index
[DOWNLOADING] crates ...
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[COMPILING] baz v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
",
reg = registry::alt_registry()
reg = registry::alt_registry_path().to_str().unwrap()
)).run();
}
@ -193,17 +196,18 @@ fn depend_on_alt_registry_depends_on_crates_io() {
.masquerade_as_nightly_cargo()
.with_stderr(&format!(
"\
[UPDATING] registry `{alt_reg}`
[UPDATING] registry `{reg}`
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[DOWNLOADING] [..] v0.0.1 (registry `file://[..]`)
[COMPILING] baz v0.0.1 (registry `file://[..]`)
[COMPILING] bar v0.0.1 (registry `file://[..]`)
[COMPILING] foo v0.0.1 (CWD)
[UPDATING] `{alt_reg}` index
[UPDATING] `{reg}` index
[DOWNLOADING] crates ...
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`)
[COMPILING] baz v0.0.1 (registry `[ROOT][..]`)
[COMPILING] bar v0.0.1 (registry `[ROOT][..]`)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
",
alt_reg = registry::alt_registry(),
reg = registry::registry()
alt_reg = registry::alt_registry_path().to_str().unwrap(),
reg = registry::registry_path().to_str().unwrap()
)).run();
}
@ -235,8 +239,8 @@ fn registry_and_path_dep_works() {
.masquerade_as_nightly_cargo()
.with_stderr(
"\
[COMPILING] bar v0.0.1 (CWD/bar)
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] bar v0.0.1 ([CWD]/bar)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s
",
).run();
@ -358,14 +362,16 @@ fn alt_registry_and_crates_io_deps() {
p.cargo("build")
.masquerade_as_nightly_cargo()
.with_stderr_contains(format!(
"[UPDATING] registry `{}`",
registry::alt_registry()
)).with_stderr_contains(&format!("[UPDATING] registry `{}`", registry::registry()))
.with_stderr_contains("[DOWNLOADING] crates_io_dep v0.0.1 (registry `file://[..]`)")
.with_stderr_contains("[DOWNLOADING] alt_reg_dep v0.1.0 (registry `file://[..]`)")
.with_stderr_contains("[COMPILING] alt_reg_dep v0.1.0 (registry `file://[..]`)")
"[UPDATING] `{}` index",
registry::alt_registry_path().to_str().unwrap()
)).with_stderr_contains(&format!(
"[UPDATING] `{}` index",
registry::registry_path().to_str().unwrap()))
.with_stderr_contains("[DOWNLOADED] crates_io_dep v0.0.1 (registry `[ROOT][..]`)")
.with_stderr_contains("[DOWNLOADED] alt_reg_dep v0.1.0 (registry `[ROOT][..]`)")
.with_stderr_contains("[COMPILING] alt_reg_dep v0.1.0 (registry `[ROOT][..]`)")
.with_stderr_contains("[COMPILING] crates_io_dep v0.0.1")
.with_stderr_contains("[COMPILING] foo v0.0.1 (CWD)")
.with_stderr_contains("[COMPILING] foo v0.0.1 ([CWD])")
.with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s")
.run();
}

View file

@ -641,7 +641,7 @@ fn unused_keys() {
.with_stderr(
"\
warning: unused manifest key: target.foo.bar
[COMPILING] foo v0.1.0 (CWD)
[COMPILING] foo v0.1.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();

View file

@ -37,12 +37,12 @@ fn cargo_bench_simple() {
p.process(&p.bin("foo")).with_stdout("hello\n").run();
p.cargo("bench")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]",
)).with_stdout_contains("test bench_hello ... bench: [..]")
).with_stdout_contains("test bench_hello ... bench: [..]")
.run();
}
@ -76,14 +76,14 @@ fn bench_bench_implicit() {
).build();
p.cargo("bench --benches")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]
[RUNNING] target/release/deps/mybench-[..][EXE]
",
)).with_stdout_contains("test run2 ... bench: [..]")
).with_stdout_contains("test run2 ... bench: [..]")
.run();
}
@ -117,13 +117,13 @@ fn bench_bin_implicit() {
).build();
p.cargo("bench --bins")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]
",
)).with_stdout_contains("test run1 ... bench: [..]")
).with_stdout_contains("test run1 ... bench: [..]")
.run();
}
@ -149,13 +149,13 @@ fn bench_tarname() {
).build();
p.cargo("bench --bench bin2")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/bin2-[..][EXE]
",
)).with_stdout_contains("test run2 ... bench: [..]")
).with_stdout_contains("test run2 ... bench: [..]")
.run();
}
@ -213,13 +213,13 @@ fn cargo_bench_verbose() {
).build();
p.cargo("bench -v hello")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[RUNNING] `rustc [..] src/main.rs [..]`
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target/release/deps/foo-[..][EXE] hello --bench`",
)).with_stdout_contains("test bench_hello ... bench: [..]")
).with_stdout_contains("test bench_hello ... bench: [..]")
.run();
}
@ -303,12 +303,12 @@ fn cargo_bench_failing_test() {
// Force libtest into serial execution so that the test header will be printed.
p.cargo("bench -- --test-threads=1")
.with_stdout_contains("test bench_hello ...[..]")
.with_stderr_contains(format!(
.with_stderr_contains(
"\
[COMPILING] foo v0.5.0 (CWD)[..]
[COMPILING] foo v0.5.0 ([CWD])[..]
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]",
)).with_either_contains(
).with_either_contains(
"[..]thread '[..]' panicked at 'assertion failed: `(left == right)`[..]",
).with_either_contains("[..]left: `\"hello\"`[..]")
.with_either_contains("[..]right: `\"nope\"`[..]")
@ -370,13 +370,13 @@ fn bench_with_lib_dep() {
).build();
p.cargo("bench")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]
[RUNNING] target/release/deps/baz-[..][EXE]",
)).with_stdout_contains("test lib_bench ... bench: [..]")
).with_stdout_contains("test lib_bench ... bench: [..]")
.with_stdout_contains("test bin_bench ... bench: [..]")
.run();
}
@ -430,13 +430,13 @@ fn bench_with_deep_lib_dep() {
).build();
p.cargo("bench")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 ([..])
[COMPILING] bar v0.0.1 (CWD)
[COMPILING] bar v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/bar-[..][EXE]",
)).with_stdout_contains("test bar_bench ... bench: [..]")
).with_stdout_contains("test bar_bench ... bench: [..]")
.run();
}
@ -484,13 +484,13 @@ fn external_bench_explicit() {
).build();
p.cargo("bench")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]
[RUNNING] target/release/deps/bench-[..][EXE]",
)).with_stdout_contains("test internal_bench ... bench: [..]")
).with_stdout_contains("test internal_bench ... bench: [..]")
.with_stdout_contains("test external_bench ... bench: [..]")
.run();
}
@ -528,13 +528,13 @@ fn external_bench_implicit() {
).build();
p.cargo("bench")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]
[RUNNING] target/release/deps/external-[..][EXE]",
)).with_stdout_contains("test internal_bench ... bench: [..]")
).with_stdout_contains("test internal_bench ... bench: [..]")
.with_stdout_contains("test external_bench ... bench: [..]")
.run();
}
@ -549,46 +549,43 @@ fn bench_autodiscover_2015() {
.file(
"Cargo.toml",
r#"
cargo-features = ["edition"]
[project]
name = "foo"
version = "0.0.1"
authors = []
edition = "2015"
[project]
name = "foo"
version = "0.0.1"
authors = []
edition = "2015"
[[bench]]
name = "bench_magic"
required-features = ["magic"]
"#,
[[bench]]
name = "bench_magic"
required-features = ["magic"]
"#,
).file("src/lib.rs", "")
.file(
"benches/bench_basic.rs",
r#"
#![feature(test)]
#[allow(unused_extern_crates)]
extern crate foo;
extern crate test;
#![feature(test)]
#[allow(unused_extern_crates)]
extern crate foo;
extern crate test;
#[bench]
fn bench_basic(_b: &mut test::Bencher) {}
"#,
#[bench]
fn bench_basic(_b: &mut test::Bencher) {}
"#,
).file(
"benches/bench_magic.rs",
r#"
#![feature(test)]
#[allow(unused_extern_crates)]
extern crate foo;
extern crate test;
#![feature(test)]
#[allow(unused_extern_crates)]
extern crate foo;
extern crate test;
#[bench]
fn bench_magic(_b: &mut test::Bencher) {}
"#,
#[bench]
fn bench_magic(_b: &mut test::Bencher) {}
"#,
).build();
p.cargo("bench bench_basic")
.masquerade_as_nightly_cargo()
.with_stderr(&format!(
.with_stderr(
"warning: \
An explicit [[bench]] section is specified in Cargo.toml which currently
disables Cargo from automatically inferring other benchmark targets.
@ -605,11 +602,11 @@ automatically infer them to be a target, such as in subfolders.
For more information on this warning you can consult
https://github.com/rust-lang/cargo/issues/5330
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]
",
)).run();
).run();
}
#[test]
@ -647,12 +644,12 @@ fn pass_through_command_line() {
).build();
p.cargo("bench bar")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]",
)).with_stdout_contains("test bar ... bench: [..]")
).with_stdout_contains("test bar ... bench: [..]")
.run();
p.cargo("bench foo")
@ -734,13 +731,13 @@ fn lib_bin_same_name() {
).build();
p.cargo("bench")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]
[RUNNING] target/release/deps/foo-[..][EXE]",
)).with_stdout_contains_n("test [..] ... bench: [..]", 2)
).with_stdout_contains_n("test [..] ... bench: [..]", 2)
.run();
}
@ -780,13 +777,13 @@ fn lib_with_standard_name() {
).build();
p.cargo("bench")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] syntax v0.0.1 (CWD)
[COMPILING] syntax v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/syntax-[..][EXE]
[RUNNING] target/release/deps/bench-[..][EXE]",
)).with_stdout_contains("test foo_bench ... bench: [..]")
).with_stdout_contains("test foo_bench ... bench: [..]")
.with_stdout_contains("test bench ... bench: [..]")
.run();
}
@ -829,12 +826,12 @@ fn lib_with_standard_name2() {
).build();
p.cargo("bench")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] syntax v0.0.1 (CWD)
[COMPILING] syntax v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/syntax-[..][EXE]",
)).with_stdout_contains("test bench ... bench: [..]")
).with_stdout_contains("test bench ... bench: [..]")
.run();
}
@ -899,30 +896,30 @@ fn bench_dylib() {
.build();
p.cargo("bench -v")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] bar v0.0.1 (CWD/bar)
[COMPILING] bar v0.0.1 ([CWD]/bar)
[RUNNING] [..] -C opt-level=3 [..]
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] [..] -C opt-level=3 [..]
[RUNNING] [..] -C opt-level=3 [..]
[RUNNING] [..] -C opt-level=3 [..]
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench`
[RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`",
)).with_stdout_contains_n("test foo ... bench: [..]", 2)
).with_stdout_contains_n("test foo ... bench: [..]", 2)
.run();
p.root().move_into_the_past();
p.cargo("bench -v")
.with_stderr(&format!(
.with_stderr(
"\
[FRESH] bar v0.0.1 (CWD/bar)
[FRESH] foo v0.0.1 (CWD)
[FRESH] bar v0.0.1 ([CWD]/bar)
[FRESH] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench`
[RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`",
)).with_stdout_contains_n("test foo ... bench: [..]", 2)
).with_stdout_contains_n("test foo ... bench: [..]", 2)
.run();
}
@ -955,12 +952,12 @@ fn bench_twice_with_build_cmd() {
).build();
p.cargo("bench")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target/release/deps/foo-[..][EXE]",
)).with_stdout_contains("test foo ... bench: [..]")
).with_stdout_contains("test foo ... bench: [..]")
.run();
p.cargo("bench")
@ -1039,16 +1036,16 @@ fn bench_with_examples() {
).build();
p.cargo("bench -v")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v6.6.6 (CWD)
[COMPILING] foo v6.6.6 ([CWD])
[RUNNING] `rustc [..]`
[RUNNING] `rustc [..]`
[RUNNING] `rustc [..]`
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `CWD/target/release/deps/foo-[..][EXE] --bench`
[RUNNING] `CWD/target/release/deps/testb1-[..][EXE] --bench`",
)).with_stdout_contains("test bench_bench1 ... bench: [..]")
[RUNNING] `[CWD]/target/release/deps/foo-[..][EXE] --bench`
[RUNNING] `[CWD]/target/release/deps/testb1-[..][EXE] --bench`",
).with_stdout_contains("test bench_bench1 ... bench: [..]")
.with_stdout_contains("test bench_bench2 ... bench: [..]")
.run();
}

View file

@ -257,7 +257,7 @@ Caused by:
}
#[test]
fn cargo_compile_with_invalid_package_name() {
fn cargo_compile_with_empty_package_name() {
let p = project()
.file("Cargo.toml", &basic_manifest("", "0.0.0"))
.build();
@ -274,6 +274,24 @@ Caused by:
).run();
}
#[test]
fn cargo_compile_with_invalid_package_name() {
let p = project()
.file("Cargo.toml", &basic_manifest("foo::bar", "0.0.0"))
.build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Invalid character `:` in package name: `foo::bar`
",
).run();
}
#[test]
fn cargo_compile_with_invalid_bin_target_name() {
let p = project()
@ -806,8 +824,8 @@ fn cargo_compile_with_dep_name_mismatch() {
.with_status(101)
.with_stderr(
r#"error: no matching package named `notquitebar` found
location searched: CWD/bar
required by package `foo v0.0.1 (CWD)`
location searched: [CWD]/bar
required by package `foo v0.0.1 ([CWD])`
"#,
).run();
}
@ -1017,13 +1035,13 @@ fn main(){
p2.cargo("run -Zoffline")
.masquerade_as_nightly_cargo()
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] present_dep v1.2.3
[COMPILING] foo v0.1.0 (CWD)
[COMPILING] foo v0.1.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
Running `[..]`",
)).with_stdout("1.2.3")
).with_stdout("1.2.3")
.run();
}
@ -1164,7 +1182,7 @@ fn compile_offline_while_transitive_dep_not_cached() {
error: no matching package named `baz` found
location searched: registry `[..]`
required by package `bar v0.1.0`
... which is depended on by `foo v0.0.1 (CWD)`
... which is depended on by `foo v0.0.1 ([CWD])`
As a reminder, you're using offline mode (-Z offline) \
which can sometimes cause surprising resolution failures, \
if this error is too confusing you may with to retry \
@ -1260,21 +1278,21 @@ fn cargo_default_env_metadata_env_var() {
p.cargo("build -v")
.with_stderr(&format!(
"\
[COMPILING] bar v0.0.1 (CWD/bar)
[RUNNING] `rustc --crate-name bar bar/src/lib.rs --crate-type dylib \
[COMPILING] bar v0.0.1 ([CWD]/bar)
[RUNNING] `rustc --crate-name bar bar/src/lib.rs --color never --crate-type dylib \
--emit=dep-info,link \
-C prefer-dynamic -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=CWD/target/debug/deps`
[COMPILING] foo v0.0.1 (CWD)
[RUNNING] `rustc --crate-name foo src/lib.rs --crate-type lib \
-L dependency=[CWD]/target/debug/deps`
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
-C extra-filename=[..] \
--out-dir [..] \
-L dependency=CWD/target/debug/deps \
--extern bar=CWD/target/debug/deps/{prefix}bar{suffix}`
-L dependency=[CWD]/target/debug/deps \
--extern bar=[CWD]/target/debug/deps/{prefix}bar{suffix}`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
prefix = env::consts::DLL_PREFIX,
suffix = env::consts::DLL_SUFFIX,
@ -1287,21 +1305,21 @@ fn cargo_default_env_metadata_env_var() {
.env("__CARGO_DEFAULT_LIB_METADATA", "stable")
.with_stderr(&format!(
"\
[COMPILING] bar v0.0.1 (CWD/bar)
[RUNNING] `rustc --crate-name bar bar/src/lib.rs --crate-type dylib \
[COMPILING] bar v0.0.1 ([CWD]/bar)
[RUNNING] `rustc --crate-name bar bar/src/lib.rs --color never --crate-type dylib \
--emit=dep-info,link \
-C prefer-dynamic -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=CWD/target/debug/deps`
[COMPILING] foo v0.0.1 (CWD)
[RUNNING] `rustc --crate-name foo src/lib.rs --crate-type lib \
-L dependency=[CWD]/target/debug/deps`
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
-C extra-filename=[..] \
--out-dir [..] \
-L dependency=CWD/target/debug/deps \
--extern bar=CWD/target/debug/deps/{prefix}bar-[..]{suffix}`
-L dependency=[CWD]/target/debug/deps \
--extern bar=[CWD]/target/debug/deps/{prefix}bar-[..]{suffix}`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
prefix = env::consts::DLL_PREFIX,
@ -1370,7 +1388,7 @@ fn crate_env_vars() {
p.cargo("build -v").run();
println!("bin");
p.process(&p.bin("foo")).with_stdout("0-5-1 @ alpha.1 in CWD").run();
p.process(&p.bin("foo")).with_stdout("0-5-1 @ alpha.1 in [CWD]").run();
println!("test");
p.cargo("test -v").run();
@ -1551,8 +1569,8 @@ fn self_dependency() {
.with_status(101)
.with_stderr(
"\
[ERROR] cyclic package dependency: package `test v0.0.0 (CWD)` depends on itself. Cycle:
package `test v0.0.0 (CWD)`",
[ERROR] cyclic package dependency: package `test v0.0.0 ([CWD])` depends on itself. Cycle:
package `test v0.0.0 ([CWD])`",
).run();
}
@ -1613,54 +1631,54 @@ fn lto_build() {
).file("src/main.rs", "fn main() {}")
.build();
p.cargo("build -v --release")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] test v0.0.0 (CWD)
[RUNNING] `rustc --crate-name test src/main.rs --crate-type bin \
[COMPILING] test v0.0.0 ([CWD])
[RUNNING] `rustc --crate-name test src/main.rs --color never --crate-type bin \
--emit=dep-info,link \
-C opt-level=3 \
-C lto \
-C metadata=[..] \
--out-dir CWD/target/release/deps \
-L dependency=CWD/target/release/deps`
--out-dir [CWD]/target/release/deps \
-L dependency=[CWD]/target/release/deps`
[FINISHED] release [optimized] target(s) in [..]
",
)).run();
).run();
}
#[test]
fn verbose_build() {
let p = project().file("src/lib.rs", "").build();
p.cargo("build -v")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[RUNNING] `rustc --crate-name foo src/lib.rs --crate-type lib \
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=CWD/target/debug/deps`
-L dependency=[CWD]/target/debug/deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
}
#[test]
fn verbose_release_build() {
let p = project().file("src/lib.rs", "").build();
p.cargo("build -v --release")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[RUNNING] `rustc --crate-name foo src/lib.rs --crate-type lib \
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=CWD/target/release/deps`
-L dependency=[CWD]/target/release/deps`
[FINISHED] release [optimized] target(s) in [..]
",
)).run();
).run();
}
#[test]
@ -1697,24 +1715,24 @@ fn verbose_release_build_deps() {
p.cargo("build -v --release")
.with_stderr(&format!(
"\
[COMPILING] foo v0.0.0 (CWD/foo)
[RUNNING] `rustc --crate-name foo foo/src/lib.rs \
[COMPILING] foo v0.0.0 ([CWD]/foo)
[RUNNING] `rustc --crate-name foo foo/src/lib.rs --color never \
--crate-type dylib --crate-type rlib \
--emit=dep-info,link \
-C prefer-dynamic \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=CWD/target/release/deps`
[COMPILING] test v0.0.0 (CWD)
[RUNNING] `rustc --crate-name test src/lib.rs --crate-type lib \
-L dependency=[CWD]/target/release/deps`
[COMPILING] test v0.0.0 ([CWD])
[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \
--emit=dep-info,link \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=CWD/target/release/deps \
--extern foo=CWD/target/release/deps/{prefix}foo{suffix} \
--extern foo=CWD/target/release/deps/libfoo.rlib`
-L dependency=[CWD]/target/release/deps \
--extern foo=[CWD]/target/release/deps/{prefix}foo{suffix} \
--extern foo=[CWD]/target/release/deps/libfoo.rlib`
[FINISHED] release [optimized] target(s) in [..]
",
prefix = env::consts::DLL_PREFIX,
@ -2030,12 +2048,12 @@ fn lib_with_standard_name() {
).build();
p.cargo("build")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] syntax v0.0.1 (CWD)
[COMPILING] syntax v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
}
#[test]
@ -2140,12 +2158,12 @@ fn freshness_ignores_excluded() {
foo.root().move_into_the_past();
foo.cargo("build")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.0 (CWD)
[COMPILING] foo v0.0.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
// Smoke test to make sure it doesn't compile again
println!("first pass");
@ -2191,21 +2209,21 @@ fn rebuild_preserves_out_dir() {
foo.cargo("build")
.env("FIRST", "1")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.0 (CWD)
[COMPILING] foo v0.0.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
File::create(&foo.root().join("src/bar.rs")).unwrap();
foo.cargo("build")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.0 (CWD)
[COMPILING] foo v0.0.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
}
#[test]
@ -2601,9 +2619,9 @@ fn cyclic_deps_rejected() {
p.cargo("build -v")
.with_status(101)
.with_stderr(
"[ERROR] cyclic package dependency: package `a v0.0.1 (CWD/a)` depends on itself. Cycle:
package `a v0.0.1 (CWD/a)`
... which is depended on by `foo v0.0.1 (CWD)`",
"[ERROR] cyclic package dependency: package `a v0.0.1 ([CWD]/a)` depends on itself. Cycle:
package `a v0.0.1 ([CWD]/a)`
... which is depended on by `foo v0.0.1 ([CWD])`",
).run();
}
@ -3551,11 +3569,12 @@ fn build_all_member_dependency_same_name() {
p.cargo("build --all")
.with_stderr(
"[..] Updating registry `[..]`\n\
[..] Downloading a v0.1.0 ([..])\n\
[..] Compiling a v0.1.0\n\
[..] Compiling a v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
"[UPDATING] `[..]` index\n\
[DOWNLOADING] crates ...\n\
[DOWNLOADED] a v0.1.0 ([..])\n\
[COMPILING] a v0.1.0\n\
[COMPILING] a v0.1.0 ([..])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
).run();
}
@ -3920,27 +3939,22 @@ fn inferred_benchmarks() {
#[test]
fn target_edition() {
if !is_nightly() {
// --edition is nightly-only
return;
}
let p = project()
.file(
"Cargo.toml",
r#"
cargo-features = ["edition"]
[package]
name = "foo"
version = "0.0.1"
[package]
name = "foo"
version = "0.0.1"
[lib]
edition = "2018"
"#,
[lib]
edition = "2018"
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build -v")
.masquerade_as_nightly_cargo()
.without_status() // passes on nightly, fails on stable, b/c --edition is nightly-only
.with_stderr_contains(
"\
[COMPILING] foo v0.0.1 ([..])
@ -3955,62 +3969,26 @@ fn target_edition_override() {
.file(
"Cargo.toml",
r#"
cargo-features = ["edition"]
[package]
name = "foo"
version = "0.0.1"
authors = []
edition = "2018"
[package]
name = "foo"
version = "0.0.1"
authors = []
edition = "2018"
[lib]
edition = "2015"
"#,
).file("src/lib.rs", "")
[lib]
edition = "2015"
"#,
).file(
"src/lib.rs",
"
pub fn async() {}
pub fn try() {}
pub fn await() {}
"
)
.build();
p.cargo("build -v")
.masquerade_as_nightly_cargo()
.with_stderr_contains(
"\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..]--edition=2015 [..]
",
).run();
}
#[test]
fn target_edition_feature_gated() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
edition = "2018"
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build -v")
.masquerade_as_nightly_cargo()
.with_status(101)
.with_stderr(
"\
error: failed to parse manifest at `[..]`
Caused by:
editions are unstable
Caused by:
feature `edition` is required
consider adding `cargo-features = [\"edition\"]` to the manifest
",
).run();
p.cargo("build -v").run();
}
#[test]
@ -4137,11 +4115,11 @@ fn build_filter_infer_profile() {
p.cargo("build -v")
.with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/lib.rs --crate-type lib \
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link[..]",
).with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/main.rs --crate-type bin \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]",
).run();
@ -4149,13 +4127,13 @@ fn build_filter_infer_profile() {
p.cargo("build -v --test=t1")
.with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/lib.rs --crate-type lib \
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link[..]",
).with_stderr_contains(
"[RUNNING] `rustc --crate-name t1 tests/t1.rs --emit=dep-info,link[..]",
"[RUNNING] `rustc --crate-name t1 tests/t1.rs --color never --emit=dep-info,link[..]",
).with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/main.rs --crate-type bin \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]",
).run();
@ -4163,15 +4141,15 @@ fn build_filter_infer_profile() {
p.cargo("build -v --bench=b1")
.with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/lib.rs --crate-type lib \
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link[..]",
).with_stderr_contains(
"\
[RUNNING] `rustc --crate-name b1 benches/b1.rs --emit=dep-info,link \
[RUNNING] `rustc --crate-name b1 benches/b1.rs --color never --emit=dep-info,link \
-C opt-level=3[..]",
).with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/main.rs --crate-type bin \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]",
).run();
}
@ -4182,15 +4160,15 @@ fn targets_selected_default() {
p.cargo("build -v")
// bin
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --crate-type bin \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]")
// bench
.with_stderr_does_not_contain("\
[RUNNING] `rustc --crate-name foo src/main.rs --emit=dep-info,link \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C opt-level=3 --test [..]")
// unit test
.with_stderr_does_not_contain("\
[RUNNING] `rustc --crate-name foo src/main.rs --emit=dep-info,link \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C debuginfo=2 --test [..]").run();
}
@ -4200,15 +4178,15 @@ fn targets_selected_all() {
p.cargo("build -v --all-targets")
// bin
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --crate-type bin \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]")
// bench
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --emit=dep-info,link \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C opt-level=3 --test [..]")
// unit test
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --emit=dep-info,link \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C debuginfo=2 --test [..]").run();
}
@ -4218,15 +4196,15 @@ fn all_targets_no_lib() {
p.cargo("build -v --all-targets")
// bin
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --crate-type bin \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]")
// bench
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --emit=dep-info,link \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C opt-level=3 --test [..]")
// unit test
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --emit=dep-info,link \
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C debuginfo=2 --test [..]").run();
}

View file

@ -16,7 +16,7 @@ fn http_auth_offered() {
let addr = server.local_addr().unwrap();
fn headers(rdr: &mut BufRead) -> HashSet<String> {
let valid = ["GET", "Authorization", "Accept", "User-Agent"];
let valid = ["GET", "Authorization", "Accept"];
rdr.lines()
.map(|s| s.unwrap())
.take_while(|s| s.len() > 2)
@ -28,7 +28,6 @@ fn http_auth_offered() {
let t = thread::spawn(move || {
let mut conn = BufStream::new(server.accept().unwrap().0);
let req = headers(&mut conn);
let user_agent = "User-Agent: git/2.0 (libgit2 0.27.0)";
conn.write_all(
b"\
HTTP/1.1 401 Unauthorized\r\n\
@ -41,7 +40,6 @@ fn http_auth_offered() {
vec![
"GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1",
"Accept: */*",
user_agent,
].into_iter()
.map(|s| s.to_string())
.collect()
@ -63,7 +61,6 @@ fn http_auth_offered() {
"GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1",
"Authorization: Basic Zm9vOmJhcg==",
"Accept: */*",
user_agent,
].into_iter()
.map(|s| s.to_string())
.collect()

View file

@ -10,12 +10,12 @@ fn build_lib_only() {
p.cargo("build --lib -v")
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[RUNNING] `rustc --crate-name foo src/lib.rs --crate-type lib \
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=CWD/target/debug/deps`
-L dependency=[CWD]/target/debug/deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
).run();
}

View file

@ -29,14 +29,14 @@ fn custom_build_script_failed() {
.build();
p.cargo("build -v")
.with_status(101)
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.5.0 (CWD)
[RUNNING] `rustc --crate-name build_script_build build.rs --crate-type bin [..]`
[COMPILING] foo v0.5.0 ([CWD])
[RUNNING] `rustc --crate-name build_script_build build.rs --color never --crate-type bin [..]`
[RUNNING] `[..]/build-script-build`
[ERROR] failed to run custom build command for `foo v0.5.0 (CWD)`
[ERROR] failed to run custom build command for `foo v0.5.0 ([CWD])`
process didn't exit successfully: `[..]/build-script-build` (exit code: 101)",
)).run();
).run();
}
#[test]
@ -179,11 +179,11 @@ fn custom_build_script_wrong_rustc_flags() {
p.cargo("build")
.with_status(101)
.with_stderr_contains(&format!(
.with_stderr_contains(
"\
[ERROR] Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 (CWD)`: \
[ERROR] Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ([CWD])`: \
`-aaa -bbb`",
)).run();
).run();
}
/*
@ -228,14 +228,14 @@ fn custom_build_script_rustc_flags() {
.with_status(101)
.with_stderr(
"\
[COMPILING] bar v0.5.0 (CWD)
[RUNNING] `rustc --crate-name test CWD/src/lib.rs --crate-type lib -C debuginfo=2 \
[COMPILING] bar v0.5.0 ([CWD])
[RUNNING] `rustc --crate-name test [CWD]/src/lib.rs --crate-type lib -C debuginfo=2 \
-C metadata=[..] \
-C extra-filename=-[..] \
--out-dir CWD/target \
--out-dir [CWD]/target \
--emit=dep-info,link \
-L CWD/target \
-L CWD/target/deps`
-L [CWD]/target \
-L [CWD]/target/deps`
",
).run();
}
@ -260,7 +260,7 @@ fn links_no_build_cmd() {
.with_status(101)
.with_stderr(
"\
[ERROR] package `foo v0.5.0 (CWD)` specifies that it links to `a` but does \
[ERROR] package `foo v0.5.0 ([CWD])` specifies that it links to `a` but does \
not have a custom build script
",
).run();
@ -551,7 +551,7 @@ fn only_rerun_build_script() {
p.cargo("build -v")
.with_stderr(
"\
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[RUNNING] `[..]/build-script-build`
[RUNNING] `rustc --crate-name foo [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
@ -651,7 +651,7 @@ fn testing_and_such() {
p.cargo("test -vj1")
.with_stderr(
"\
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[RUNNING] `[..]/build-script-build`
[RUNNING] `rustc --crate-name foo [..]`
[RUNNING] `rustc --crate-name foo [..]`
@ -666,7 +666,7 @@ fn testing_and_such() {
p.cargo("doc -v")
.with_stderr(
"\
[DOCUMENTING] foo v0.5.0 (CWD)
[DOCUMENTING] foo v0.5.0 ([CWD])
[RUNNING] `rustdoc [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
@ -680,7 +680,7 @@ fn testing_and_such() {
p.cargo("run")
.with_stderr(
"\
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target/debug/foo[EXE]`
",
@ -746,7 +746,7 @@ fn propagation_of_l_flags() {
.with_stderr_contains(
"\
[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]`
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[RUNNING] `rustc --crate-name foo [..] -L bar -L foo`
",
).run();
@ -815,7 +815,7 @@ fn propagation_of_l_flags_new() {
.with_stderr_contains(
"\
[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]`
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[RUNNING] `rustc --crate-name foo [..] -L bar -L foo`
",
).run();
@ -850,9 +850,9 @@ fn build_deps_simple() {
p.cargo("build -v")
.with_stderr(
"\
[COMPILING] a v0.5.0 (CWD/a)
[COMPILING] a v0.5.0 ([CWD]/a)
[RUNNING] `rustc --crate-name a [..]`
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[RUNNING] `rustc [..] build.rs [..] --extern a=[..]`
[RUNNING] `[..]/foo-[..]/build-script-build`
[RUNNING] `rustc --crate-name foo [..]`
@ -950,24 +950,24 @@ fn build_cmd_with_a_build_cmd() {
p.cargo("build -v")
.with_stderr(
"\
[COMPILING] b v0.5.0 (CWD/b)
[COMPILING] b v0.5.0 ([CWD]/b)
[RUNNING] `rustc --crate-name b [..]`
[COMPILING] a v0.5.0 (CWD/a)
[COMPILING] a v0.5.0 ([CWD]/a)
[RUNNING] `rustc [..] a/build.rs [..] --extern b=[..]`
[RUNNING] `[..]/a-[..]/build-script-build`
[RUNNING] `rustc --crate-name a [..]lib.rs --crate-type lib \
[RUNNING] `rustc --crate-name a [..]lib.rs --color never --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..]target/debug/deps \
-L [..]target/debug/deps`
[COMPILING] foo v0.5.0 (CWD)
[RUNNING] `rustc --crate-name build_script_build build.rs --crate-type bin \
[COMPILING] foo v0.5.0 ([CWD])
[RUNNING] `rustc --crate-name build_script_build build.rs --color never --crate-type bin \
--emit=dep-info,link \
-C debuginfo=2 -C metadata=[..] --out-dir [..] \
-L [..]target/debug/deps \
--extern a=[..]liba[..].rlib`
[RUNNING] `[..]/foo-[..]/build-script-build`
[RUNNING] `rustc --crate-name foo [..]lib.rs --crate-type lib \
[RUNNING] `rustc --crate-name foo [..]lib.rs --color never --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
@ -1057,7 +1057,7 @@ fn output_separate_lines() {
.with_status(101)
.with_stderr_contains(
"\
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[RUNNING] `rustc [..] build.rs [..]`
[RUNNING] `[..]/foo-[..]/build-script-build`
[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo`
@ -1092,7 +1092,7 @@ fn output_separate_lines_new() {
.with_status(101)
.with_stderr_contains(
"\
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[RUNNING] `rustc [..] build.rs [..]`
[RUNNING] `[..]/foo-[..]/build-script-build`
[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo`
@ -1146,7 +1146,7 @@ fn code_generation() {
p.cargo("run")
.with_stderr(
"\
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target/debug/foo`",
).with_stdout("Hello, World!")
@ -1669,9 +1669,9 @@ fn cfg_test() {
).file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}")
.build();
p.cargo("test -v")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] [..] build.rs [..]
[RUNNING] `[..]/build-script-build`
[RUNNING] [..] --cfg foo[..]
@ -1682,7 +1682,7 @@ fn cfg_test() {
[RUNNING] `[..]/test-[..][EXE]`
[DOCTEST] foo
[RUNNING] [..] --cfg foo[..]",
)).with_stdout_contains("test test_foo ... ok")
).with_stdout_contains("test test_foo ... ok")
.with_stdout_contains("test test_bar ... ok")
.with_stdout_contains_n("test [..] ... ok", 3)
.run();
@ -1774,9 +1774,9 @@ fn cfg_override_test() {
).file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}")
.build();
p.cargo("test -v")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `[..]`
[RUNNING] `[..]`
[RUNNING] `[..]`
@ -1785,7 +1785,7 @@ fn cfg_override_test() {
[RUNNING] `[..]/test-[..][EXE]`
[DOCTEST] foo
[RUNNING] [..] --cfg foo[..]",
)).with_stdout_contains("test test_foo ... ok")
).with_stdout_contains("test test_foo ... ok")
.with_stdout_contains("test test_bar ... ok")
.with_stdout_contains_n("test [..] ... ok", 3)
.run();
@ -1897,9 +1897,9 @@ fn env_test() {
"#,
).build();
p.cargo("test -v")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] [..] build.rs [..]
[RUNNING] `[..]/build-script-build`
[RUNNING] [..] --crate-name foo[..]
@ -1910,7 +1910,7 @@ fn env_test() {
[RUNNING] `[..]/test-[..][EXE]`
[DOCTEST] foo
[RUNNING] [..] --crate-name foo[..]",
)).with_stdout_contains_n("running 0 tests", 2)
).with_stdout_contains_n("running 0 tests", 2)
.with_stdout_contains("test test_foo ... ok")
.run();
}
@ -2706,8 +2706,9 @@ fn warnings_hidden_for_upstream() {
p.cargo("build -v")
.with_stderr(
"\
[UPDATING] registry `[..]`
[DOWNLOADING] bar v0.1.0 ([..])
[UPDATING] `[..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 ([..])
[COMPILING] bar v0.1.0
[RUNNING] `rustc [..]`
[RUNNING] `[..]`
@ -2760,8 +2761,9 @@ fn warnings_printed_on_vv() {
p.cargo("build -vv")
.with_stderr(
"\
[UPDATING] registry `[..]`
[DOWNLOADING] bar v0.1.0 ([..])
[UPDATING] `[..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 ([..])
[COMPILING] bar v0.1.0
[RUNNING] `rustc [..]`
[RUNNING] `[..]`

View file

@ -268,7 +268,7 @@ fn cargo_subcommand_args() {
if cfg!(windows) { // weird edge-case w/ CWD & (windows vs unix)
format!(r#"[{:?}, "foo", "bar", "-v", "--help"]"#, cargo_foo_bin)
} else {
r#"["CWD/cargo-foo/target/debug/cargo-foo", "foo", "bar", "-v", "--help"]"#.to_string()
r#"["[CWD]/cargo-foo/target/debug/cargo-foo", "foo", "bar", "-v", "--help"]"#.to_string()
}
).run();
}

View file

@ -222,9 +222,10 @@ fn works_through_the_registry() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry [..]
[DOWNLOADING] [..]
[DOWNLOADING] [..]
[UPDATING] [..] index
[DOWNLOADING] crates ...
[DOWNLOADED] [..]
[DOWNLOADED] [..]
[COMPILING] baz v0.1.0
[COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 ([..])
@ -266,8 +267,9 @@ fn ignore_version_from_other_platform() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry [..]
[DOWNLOADING] [..]
[UPDATING] [..] index
[DOWNLOADING] crates ...
[DOWNLOADED] [..]
[COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]

View file

@ -521,7 +521,7 @@ fn check_filters() {
p.root().join("target").rm_rf();
p.cargo("check --tests -v")
.with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --test [..]")
.with_stderr_contains("[..] --crate-name foo src/lib.rs --crate-type lib [..]")
.with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --crate-type lib [..]")
.with_stderr_contains("[..] --crate-name foo src/main.rs [..] --test [..]")
.with_stderr_contains("[..]unused_unit_lib[..]")
.with_stderr_contains("[..]unused_unit_bin[..]")
@ -684,3 +684,9 @@ fn proc_macro() {
).build();
p.cargo("check -v").env("RUST_LOG", "cargo=trace").run();
}
#[test]
fn does_not_use_empty_rustc_wrapper() {
let p = project().file("src/lib.rs", "").build();
p.cargo("check").env("RUSTC_WRAPPER", "").run();
}

View file

@ -357,15 +357,15 @@ fn linker_and_ar() {
.with_status(101)
.with_stderr_contains(&format!(
"\
[COMPILING] foo v0.5.0 (CWD)
[RUNNING] `rustc --crate-name foo src/foo.rs --crate-type bin \
[COMPILING] foo v0.5.0 ([CWD])
[RUNNING] `rustc --crate-name foo src/foo.rs --color never --crate-type bin \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
--out-dir CWD/target/{target}/debug/deps \
--out-dir [CWD]/target/{target}/debug/deps \
--target {target} \
-C ar=my-ar-tool -C linker=my-linker-tool \
-L dependency=CWD/target/{target}/debug/deps \
-L dependency=CWD/target/debug/deps`
-L dependency=[CWD]/target/{target}/debug/deps \
-L dependency=[CWD]/target/debug/deps`
",
target = target,
)).run();
@ -504,7 +504,7 @@ fn cross_tests() {
.arg(&target)
.with_stderr(&format!(
"\
[COMPILING] foo v0.0.0 (CWD)
[COMPILING] foo v0.0.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target/{triple}/debug/deps/foo-[..][EXE]
[RUNNING] target/{triple}/debug/deps/bar-[..][EXE]",
@ -531,14 +531,13 @@ fn no_cross_doctests() {
"#,
).build();
let host_output = format!(
let host_output =
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target/debug/deps/foo-[..][EXE]
[DOCTEST] foo
",
);
";
println!("a");
p.cargo("test").with_stderr(&host_output).run();
@ -549,7 +548,7 @@ fn no_cross_doctests() {
.arg(&target)
.with_stderr(&format!(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target/{triple}/debug/deps/foo-[..][EXE]
[DOCTEST] foo
@ -563,7 +562,7 @@ fn no_cross_doctests() {
.arg(&target)
.with_stderr(&format!(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target/{triple}/debug/deps/foo-[..][EXE]
",
@ -644,9 +643,9 @@ fn cross_with_a_build_script() {
.arg(&target)
.with_stderr(&format!(
"\
[COMPILING] foo v0.0.0 (CWD)
[RUNNING] `rustc [..] build.rs [..] --out-dir CWD/target/debug/build/foo-[..]`
[RUNNING] `CWD/target/debug/build/foo-[..]/build-script-build`
[COMPILING] foo v0.0.0 ([CWD])
[RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..]`
[RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build`
[RUNNING] `rustc [..] src/main.rs [..] --target {target} [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
@ -732,19 +731,19 @@ fn build_script_needed_for_host_and_target() {
p.cargo("build -v --target")
.arg(&target)
.with_stderr_contains(&"[COMPILING] d1 v0.0.0 (CWD/d1)")
.with_stderr_contains(&"[COMPILING] d1 v0.0.0 ([CWD]/d1)")
.with_stderr_contains(
"[RUNNING] `rustc [..] d1/build.rs [..] --out-dir CWD/target/debug/build/d1-[..]`",
"[RUNNING] `rustc [..] d1/build.rs [..] --out-dir [CWD]/target/debug/build/d1-[..]`",
)
.with_stderr_contains("[RUNNING] `CWD/target/debug/build/d1-[..]/build-script-build`")
.with_stderr_contains("[RUNNING] `[CWD]/target/debug/build/d1-[..]/build-script-build`")
.with_stderr_contains("[RUNNING] `rustc [..] d1/src/lib.rs [..]`")
.with_stderr_contains("[COMPILING] d2 v0.0.0 (CWD/d2)")
.with_stderr_contains("[COMPILING] d2 v0.0.0 ([CWD]/d2)")
.with_stderr_contains(&format!(
"[RUNNING] `rustc [..] d2/src/lib.rs [..] -L /path/to/{host}`",
host = host
)).with_stderr_contains("[COMPILING] foo v0.0.0 (CWD)")
)).with_stderr_contains("[COMPILING] foo v0.0.0 ([CWD])")
.with_stderr_contains(&format!(
"[RUNNING] `rustc [..] build.rs [..] --out-dir CWD/target/debug/build/foo-[..] \
"[RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..] \
-L /path/to/{host}`",
host = host
)).with_stderr_contains(&format!(
@ -940,7 +939,7 @@ fn build_script_with_platform_specific_dependencies() {
[RUNNING] `rustc [..] d1/src/lib.rs [..]`
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..] build.rs [..]`
[RUNNING] `CWD/target/debug/build/foo-[..]/build-script-build`
[RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build`
[RUNNING] `rustc [..] src/lib.rs [..] --target {target} [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
@ -1153,8 +1152,8 @@ fn cross_test_dylib() {
.arg(&target)
.with_stderr(&format!(
"\
[COMPILING] bar v0.0.1 (CWD/bar)
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] bar v0.0.1 ([CWD]/bar)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target/{arch}/debug/deps/foo-[..][EXE]
[RUNNING] target/{arch}/debug/deps/test-[..][EXE]",

View file

@ -41,13 +41,13 @@ fn simple_cross_package() {
p.cargo("package --target")
.arg(&target)
.with_stderr(&format!(
" Packaging foo v0.0.0 (CWD)
Verifying foo v0.0.0 (CWD)
Compiling foo v0.0.0 (CWD/target/package/foo-0.0.0)
.with_stderr(
" Packaging foo v0.0.0 ([CWD])
Verifying foo v0.0.0 ([CWD])
Compiling foo v0.0.0 ([CWD]/target/package/foo-0.0.0)
Finished dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
// Check that the tarball contains the files
let f = File::open(&p.root().join("target/package/foo-0.0.0.crate")).unwrap();
@ -104,13 +104,13 @@ fn publish_with_target() {
.arg("--target")
.arg(&target)
.with_stderr(&format!(
" Updating registry `{registry}`
Packaging foo v0.0.0 (CWD)
Verifying foo v0.0.0 (CWD)
Compiling foo v0.0.0 (CWD/target/package/foo-0.0.0)
" Updating `{registry}` index
Packaging foo v0.0.0 ([CWD])
Verifying foo v0.0.0 ([CWD])
Compiling foo v0.0.0 ([CWD]/target/package/foo-0.0.0)
Finished dev [unoptimized + debuginfo] target(s) in [..]
Uploading foo v0.0.0 (CWD)
Uploading foo v0.0.0 ([CWD])
",
registry = publish::registry()
registry = publish::registry_path().to_str().unwrap()
)).run();
}

View file

@ -102,7 +102,7 @@ fn simple() {
.with_stderr(
"\
[COMPILING] bar v0.1.0
[COMPILING] foo v0.1.0 (CWD)
[COMPILING] foo v0.1.0 ([CWD])
[FINISHED] [..]
",
).run();
@ -298,7 +298,7 @@ fn multiple() {
.with_stderr(
"\
[COMPILING] bar v0.1.0
[COMPILING] foo v0.1.0 (CWD)
[COMPILING] foo v0.1.0 ([CWD])
[FINISHED] [..]
",
).run();
@ -330,10 +330,11 @@ fn crates_io_then_directory() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `[..]`
[DOWNLOADING] bar v0.1.0 ([..])
[UPDATING] `[..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 ([..])
[COMPILING] bar v0.1.0
[COMPILING] foo v0.1.0 (CWD)
[COMPILING] foo v0.1.0 ([CWD])
[FINISHED] [..]
",
).run();
@ -350,7 +351,7 @@ fn crates_io_then_directory() {
.with_stderr(
"\
[COMPILING] bar v0.1.0
[COMPILING] foo v0.1.0 (CWD)
[COMPILING] foo v0.1.0 ([CWD])
[FINISHED] [..]
",
).run();

View file

@ -26,13 +26,13 @@ fn simple() {
.build();
p.cargo("doc")
.with_stderr(&format!(
.with_stderr(
"\
[..] foo v0.0.1 (CWD)
[..] foo v0.0.1 (CWD)
[..] foo v0.0.1 ([CWD])
[..] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
assert!(p.root().join("target/doc").is_dir());
assert!(p.root().join("target/doc/foo/index.html").is_file());
}
@ -63,12 +63,12 @@ fn doc_twice() {
let p = project().file("src/lib.rs", "pub fn foo() {}").build();
p.cargo("doc")
.with_stderr(&format!(
.with_stderr(
"\
[DOCUMENTING] foo v0.0.1 (CWD)
[DOCUMENTING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
p.cargo("doc").with_stdout("").run();
}
@ -93,14 +93,14 @@ fn doc_deps() {
.build();
p.cargo("doc")
.with_stderr(&format!(
.with_stderr(
"\
[..] bar v0.0.1 (CWD/bar)
[..] bar v0.0.1 (CWD/bar)
[DOCUMENTING] foo v0.0.1 (CWD)
[..] bar v0.0.1 ([CWD]/bar)
[..] bar v0.0.1 ([CWD]/bar)
[DOCUMENTING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
assert!(p.root().join("target/doc").is_dir());
assert!(p.root().join("target/doc/foo/index.html").is_file());
@ -154,13 +154,13 @@ fn doc_no_deps() {
.build();
p.cargo("doc --no-deps")
.with_stderr(&format!(
.with_stderr(
"\
[CHECKING] bar v0.0.1 (CWD/bar)
[DOCUMENTING] foo v0.0.1 (CWD)
[CHECKING] bar v0.0.1 ([CWD]/bar)
[DOCUMENTING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
assert!(p.root().join("target/doc").is_dir());
assert!(p.root().join("target/doc/foo/index.html").is_file());
@ -265,8 +265,8 @@ fn doc_multiple_targets_same_name() {
.build();
p.cargo("doc --all")
.with_stderr_contains("[DOCUMENTING] foo v0.1.0 (CWD/foo)")
.with_stderr_contains("[DOCUMENTING] bar v0.1.0 (CWD/bar)")
.with_stderr_contains("[DOCUMENTING] foo v0.1.0 ([CWD]/foo)")
.with_stderr_contains("[DOCUMENTING] bar v0.1.0 ([CWD]/bar)")
.with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")
.run();
assert!(p.root().join("target/doc").is_dir());
@ -369,12 +369,12 @@ fn doc_lib_bin_same_name_documents_lib() {
).build();
p.cargo("doc")
.with_stderr(&format!(
.with_stderr(
"\
[DOCUMENTING] foo v0.0.1 (CWD)
[DOCUMENTING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
assert!(p.root().join("target/doc").is_dir());
let doc_file = p.root().join("target/doc/foo/index.html");
assert!(doc_file.is_file());
@ -408,12 +408,12 @@ fn doc_lib_bin_same_name_documents_lib_when_requested() {
).build();
p.cargo("doc --lib")
.with_stderr(&format!(
.with_stderr(
"\
[DOCUMENTING] foo v0.0.1 (CWD)
[DOCUMENTING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
assert!(p.root().join("target/doc").is_dir());
let doc_file = p.root().join("target/doc/foo/index.html");
assert!(doc_file.is_file());
@ -447,13 +447,13 @@ fn doc_lib_bin_same_name_documents_named_bin_when_requested() {
).build();
p.cargo("doc --bin foo")
.with_stderr(&format!(
.with_stderr(
"\
[CHECKING] foo v0.0.1 (CWD)
[DOCUMENTING] foo v0.0.1 (CWD)
[CHECKING] foo v0.0.1 ([CWD])
[DOCUMENTING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
assert!(p.root().join("target/doc").is_dir());
let doc_file = p.root().join("target/doc/foo/index.html");
assert!(doc_file.is_file());
@ -487,13 +487,13 @@ fn doc_lib_bin_same_name_documents_bins_when_requested() {
).build();
p.cargo("doc --bins")
.with_stderr(&format!(
.with_stderr(
"\
[CHECKING] foo v0.0.1 (CWD)
[DOCUMENTING] foo v0.0.1 (CWD)
[CHECKING] foo v0.0.1 ([CWD])
[DOCUMENTING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
assert!(p.root().join("target/doc").is_dir());
let doc_file = p.root().join("target/doc/foo/index.html");
assert!(doc_file.is_file());
@ -540,9 +540,9 @@ fn doc_dash_p() {
p.cargo("doc -p a")
.with_stderr(
"\
[..] b v0.0.1 (CWD/b)
[..] b v0.0.1 (CWD/b)
[DOCUMENTING] a v0.0.1 (CWD/a)
[..] b v0.0.1 ([CWD]/b)
[..] b v0.0.1 ([CWD]/b)
[DOCUMENTING] a v0.0.1 ([CWD]/a)
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -635,7 +635,7 @@ fn output_not_captured() {
).build();
p.cargo("doc")
.with_status(101)
.without_status()
.with_stderr_contains("1 | ☃")
.with_stderr_contains(r"error: unknown start of token: \u{2603}")
.run();
@ -967,7 +967,7 @@ fn doc_all_member_dependency_same_name() {
Package::new("bar", "0.1.0").publish();
p.cargo("doc --all")
.with_stderr_contains("[..] Updating registry `[..]`")
.with_stderr_contains("[..] Updating `[..]` index")
.with_stderr_contains("[..] Documenting bar v0.1.0 ([..])")
.run();
}
@ -1028,7 +1028,7 @@ fn doc_workspace_open_different_library_and_package_names() {
p.cargo("doc --open")
.env("BROWSER", "echo")
.with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
.with_stderr_contains("[..] CWD/target/doc/foolib/index.html")
.with_stderr_contains("[..] [CWD]/target/doc/foolib/index.html")
.run();
}
@ -1058,7 +1058,7 @@ fn doc_workspace_open_binary() {
p.cargo("doc --open")
.env("BROWSER", "echo")
.with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
.with_stderr_contains("[..] Opening CWD/target/doc/foobin/index.html")
.with_stderr_contains("[..] Opening [CWD]/target/doc/foobin/index.html")
.run();
}
@ -1091,7 +1091,7 @@ fn doc_workspace_open_binary_and_library() {
p.cargo("doc --open")
.env("BROWSER", "echo")
.with_stderr_contains("[..] Documenting foo v0.1.0 ([..])")
.with_stderr_contains("[..] Opening CWD/target/doc/foolib/index.html")
.with_stderr_contains("[..] Opening [CWD]/target/doc/foolib/index.html")
.run();
}
@ -1206,6 +1206,31 @@ fn doc_private_items() {
);
}
#[test]
fn doc_private_ws() {
let p = project()
.file(
"Cargo.toml",
r#"
[workspace]
members = ["a", "b"]
"#,
).file("a/Cargo.toml", &basic_manifest("a", "0.0.1"))
.file("a/src/lib.rs", "fn p() {}")
.file("b/Cargo.toml", &basic_manifest("b", "0.0.1"))
.file("b/src/lib.rs", "fn p2() {}")
.file("b/src/main.rs", "fn main() {}")
.build();
p.cargo("doc --all --bins --lib --document-private-items -v")
.with_stderr_contains(
"[RUNNING] `rustdoc [..] a/src/lib.rs [..]--document-private-items[..]",
).with_stderr_contains(
"[RUNNING] `rustdoc [..] b/src/lib.rs [..]--document-private-items[..]",
).with_stderr_contains(
"[RUNNING] `rustdoc [..] b/src/main.rs [..]--document-private-items[..]",
).run();
}
const BAD_INTRA_LINK_LIB: &str = r#"
#![deny(intra_doc_link_resolution_failure)]

View file

@ -10,7 +10,6 @@ fn edition_works_for_build_script() {
.file(
"Cargo.toml",
r#"
cargo-features = ['edition']
[package]
name = 'foo'
version = '0.1.0'

View file

@ -415,22 +415,22 @@ fn no_feature_doesnt_build() {
.build();
p.cargo("build")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
p.process(&p.bin("foo")).with_stdout("").run();
p.cargo("build --features bar")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] bar v0.0.1 (CWD/bar)
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] bar v0.0.1 ([CWD]/bar)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
p.process(&p.bin("foo")).with_stdout("bar\n").run();
}
@ -467,22 +467,22 @@ fn default_feature_pulled_in() {
.build();
p.cargo("build")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] bar v0.0.1 (CWD/bar)
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] bar v0.0.1 ([CWD]/bar)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
p.process(&p.bin("foo")).with_stdout("bar\n").run();
p.cargo("build --no-default-features")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
p.process(&p.bin("foo")).with_stdout("").run();
}
@ -575,14 +575,14 @@ fn groups_on_groups_on_groups() {
.build();
p.cargo("build")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] ba[..] v0.0.1 (CWD/ba[..])
[COMPILING] ba[..] v0.0.1 (CWD/ba[..])
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
}
#[test]
@ -621,14 +621,14 @@ fn many_cli_features() {
p.cargo("build --features")
.arg("bar baz")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] ba[..] v0.0.1 (CWD/ba[..])
[COMPILING] ba[..] v0.0.1 (CWD/ba[..])
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
}
#[test]
@ -698,14 +698,14 @@ fn union_features() {
).build();
p.cargo("build")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] d2 v0.0.1 (CWD/d2)
[COMPILING] d1 v0.0.1 (CWD/d1)
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] d2 v0.0.1 ([CWD]/d2)
[COMPILING] d1 v0.0.1 ([CWD]/d1)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
}
#[test]
@ -741,13 +741,13 @@ fn many_features_no_rebuilds() {
.build();
p.cargo("build")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] a v0.1.0 (CWD/a)
[COMPILING] b v0.1.0 (CWD)
[COMPILING] a v0.1.0 ([CWD]/a)
[COMPILING] b v0.1.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
p.root().move_into_the_past();
p.cargo("build -v")
@ -1197,14 +1197,14 @@ fn many_cli_features_comma_delimited() {
.build();
p.cargo("build --features bar,baz")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] ba[..] v0.0.1 (CWD/ba[..])
[COMPILING] ba[..] v0.0.1 (CWD/ba[..])
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
}
#[test]
@ -1259,16 +1259,16 @@ fn many_cli_features_comma_and_space_delimited() {
p.cargo("build --features")
.arg("bar,baz bam bap")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] ba[..] v0.0.1 (CWD/ba[..])
[COMPILING] ba[..] v0.0.1 (CWD/ba[..])
[COMPILING] ba[..] v0.0.1 (CWD/ba[..])
[COMPILING] ba[..] v0.0.1 (CWD/ba[..])
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..])
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
}
#[test]

View file

@ -53,8 +53,8 @@ fn fetch_all_platform_dependencies_when_no_target_is_given() {
.build();
p.cargo("fetch")
.with_stderr_contains("[..] Downloading d1 v1.2.3 [..]")
.with_stderr_contains("[..] Downloading d2 v0.1.2 [..]")
.with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]")
.with_stderr_contains("[DOWNLOADED] d2 v0.1.2 [..]")
.run();
}
@ -100,13 +100,13 @@ fn fetch_platform_specific_dependencies() {
p.cargo("fetch --target")
.arg(&host)
.with_stderr_contains("[..] Downloading d1 v1.2.3 [..]")
.with_stderr_does_not_contain("[..] Downloading d2 v0.1.2 [..]")
.with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]")
.with_stderr_does_not_contain("[DOWNLOADED] d2 v0.1.2 [..]")
.run();
p.cargo("fetch --target")
.arg(&target)
.with_stderr_contains("[..] Downloading d2 v0.1.2[..]")
.with_stderr_does_not_contain("[..] Downloading d1 v1.2.3 [..]")
.with_stderr_contains("[DOWNLOADED] d2 v0.1.2[..]")
.with_stderr_does_not_contain("[DOWNLOADED] d1 v1.2.3 [..]")
.run();
}

View file

@ -335,9 +335,6 @@ fn local_paths_no_fix() {
let stderr = "\
[CHECKING] foo v0.0.1 ([..])
warning: failed to find `#![feature(rust_2018_preview)]` in `src/lib.rs`
this may cause `cargo fix` to not be able to fix all
issues in preparation for the 2018 edition
[FINISHED] [..]
";
p.cargo("fix --edition --allow-no-vcs")
@ -355,8 +352,6 @@ fn upgrade_extern_crate() {
.file(
"Cargo.toml",
r#"
cargo-features = ["edition"]
[package]
name = "foo"
version = "0.1.0"
@ -392,7 +387,6 @@ fn upgrade_extern_crate() {
";
p.cargo("fix --allow-no-vcs")
.env("__CARGO_FIX_YOLO", "1")
.masquerade_as_nightly_cargo()
.with_stderr(stderr)
.with_stdout("")
.run();
@ -830,8 +824,6 @@ fn prepare_for_and_enable() {
.file(
"Cargo.toml",
r#"
cargo-features = ['edition']
[package]
name = 'foo'
version = '0.1.0'
@ -853,32 +845,11 @@ information about transitioning to the 2018 edition see:
";
p.cargo("fix --edition --allow-no-vcs")
.masquerade_as_nightly_cargo()
.with_stderr_contains(stderr)
.with_status(101)
.run();
}
#[test]
fn prepare_for_without_feature_issues_warning() {
if !is_nightly() {
return;
}
let p = project().file("src/lib.rs", "").build();
let stderr = "\
[CHECKING] foo v0.0.1 ([..])
warning: failed to find `#![feature(rust_2018_preview)]` in `src/lib.rs`
this may cause `cargo fix` to not be able to fix all
issues in preparation for the 2018 edition
[FINISHED] [..]
";
p.cargo("fix --edition --allow-no-vcs")
.masquerade_as_nightly_cargo()
.with_stderr(stderr)
.run();
}
#[test]
fn fix_overlapping() {
if !is_nightly() {
@ -925,7 +896,6 @@ fn fix_idioms() {
.file(
"Cargo.toml",
r#"
cargo-features = ['edition']
[package]
name = 'foo'
version = '0.1.0'
@ -947,7 +917,6 @@ fn fix_idioms() {
[FINISHED] [..]
";
p.cargo("fix --edition-idioms --allow-no-vcs")
.masquerade_as_nightly_cargo()
.with_stderr(stderr)
.with_status(0)
.run();
@ -986,3 +955,157 @@ For more information try --help
.with_stderr(stderr)
.run();
}
#[test]
fn shows_warnings_on_second_run_without_changes() {
let p = project()
.file(
"src/lib.rs",
r#"
use std::default::Default;
pub fn foo() {
}
"#,
)
.build();
p.cargo("fix --allow-no-vcs")
.with_stderr_contains("[..]warning: unused import[..]")
.run();
p.cargo("fix --allow-no-vcs")
.with_stderr_contains("[..]warning: unused import[..]")
.run();
}
#[test]
fn shows_warnings_on_second_run_without_changes_on_multiple_targets() {
let p = project()
.file(
"src/lib.rs",
r#"
use std::default::Default;
pub fn a() -> u32 { 3 }
"#,
)
.file(
"src/main.rs",
r#"
use std::default::Default;
fn main() { println!("3"); }
"#,
)
.file(
"tests/foo.rs",
r#"
use std::default::Default;
#[test]
fn foo_test() {
println!("3");
}
"#,
)
.file(
"tests/bar.rs",
r#"
use std::default::Default;
#[test]
fn foo_test() {
println!("3");
}
"#,
)
.file(
"examples/fooxample.rs",
r#"
use std::default::Default;
fn main() {
println!("3");
}
"#,
)
.build();
p.cargo("fix --allow-no-vcs --all-targets")
.with_stderr_contains(" --> examples/fooxample.rs:2:21")
.with_stderr_contains(" --> src/lib.rs:2:21")
.with_stderr_contains(" --> src/main.rs:2:21")
.with_stderr_contains(" --> tests/bar.rs:2:21")
.with_stderr_contains(" --> tests/foo.rs:2:21")
.run();
p.cargo("fix --allow-no-vcs --all-targets")
.with_stderr_contains(" --> examples/fooxample.rs:2:21")
.with_stderr_contains(" --> src/lib.rs:2:21")
.with_stderr_contains(" --> src/main.rs:2:21")
.with_stderr_contains(" --> tests/bar.rs:2:21")
.with_stderr_contains(" --> tests/foo.rs:2:21")
.run();
}
#[test]
fn doesnt_rebuild_dependencies() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = 'bar' }
[workspace]
"#,
).file("src/lib.rs", "extern crate bar;")
.file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
.file("bar/src/lib.rs", "")
.build();
p.cargo("fix --allow-no-vcs -p foo")
.env("__CARGO_FIX_YOLO", "1")
.with_stdout("")
.with_stderr("\
[CHECKING] bar v0.1.0 ([..])
[CHECKING] foo v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
")
.run();
p.cargo("fix --allow-no-vcs -p foo")
.env("__CARGO_FIX_YOLO", "1")
.with_stdout("")
.with_stderr("\
[CHECKING] foo v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
")
.run();
}
#[test]
fn does_not_crash_with_rustc_wrapper() {
// We don't have /usr/bin/env on Windows.
if cfg!(windows) {
return;
}
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
"#,
)
.file("src/lib.rs", "")
.build();
p.cargo("fix --allow-no-vcs")
.env("RUSTC_WRAPPER", "/usr/bin/env")
.run();
}

View file

@ -14,12 +14,12 @@ fn modifying_and_moving() {
.build();
p.cargo("build")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
p.cargo("build").with_stdout("").run();
p.root().move_into_the_past();
@ -30,12 +30,12 @@ fn modifying_and_moving() {
.write_all(b"#[allow(unused)]fn main() {}")
.unwrap();
p.cargo("build")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap();
p.cargo("build").with_status(101).run();
@ -52,12 +52,12 @@ fn modify_only_some_files() {
.build();
p.cargo("build")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
p.cargo("test").run();
sleep_ms(1000);
@ -78,12 +78,12 @@ fn modify_only_some_files() {
// Make sure the binary is rebuilt, not the lib
p.cargo("build")
.with_stderr(format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
assert!(p.bin("foo").is_file());
}
@ -644,19 +644,19 @@ fn same_build_dir_cached_packages() {
[COMPILING] d v0.0.1 ({dir}/d)
[COMPILING] c v0.0.1 ({dir}/c)
[COMPILING] b v0.0.1 ({dir}/b)
[COMPILING] a1 v0.0.1 (CWD)
[COMPILING] a1 v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
dir = p.url()
dir = p.url().to_file_path().unwrap().to_str().unwrap()
)).run();
p.cargo("build")
.cwd(p.root().join("a2"))
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] a2 v0.0.1 (CWD)
[COMPILING] a2 v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
}
#[test]
@ -746,13 +746,13 @@ fn rebuild_if_environment_changes() {
p.cargo("run")
.with_stdout("old desc")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target/debug/foo[EXE]`
",
)).run();
).run();
File::create(&p.root().join("Cargo.toml"))
.unwrap()
@ -768,13 +768,13 @@ fn rebuild_if_environment_changes() {
p.cargo("run")
.with_stdout("new desc")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] `target/debug/foo[EXE]`
",
)).run();
).run();
}
#[test]

View file

@ -82,7 +82,7 @@ fn no_index_update() {
.build();
p.cargo("generate-lockfile")
.with_stderr("[UPDATING] registry `[..]`")
.with_stderr("[UPDATING] `[..]` index")
.run();
p.cargo("generate-lockfile -Zno-index-update")

View file

@ -1,4 +1,5 @@
use git2;
use std::env;
use std::fs::{self, File};
use std::io::prelude::*;
use std::net::{TcpListener, TcpStream};
@ -56,7 +57,7 @@ fn cargo_compile_simple_git_dep() {
.with_stderr(&format!(
"[UPDATING] git repository `{}`\n\
[COMPILING] dep1 v0.5.0 ({}#[..])\n\
[COMPILING] foo v0.5.0 (CWD)\n\
[COMPILING] foo v0.5.0 ([CWD])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
path2url(&git_root),
path2url(&git_root),
@ -192,7 +193,7 @@ fn cargo_compile_offline_with_cached_git_dep() {
.with_stderr(format!(
"\
[COMPILING] dep1 v0.5.0 ({}#[..])
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
path2url(git_root),
)).run();
@ -280,7 +281,7 @@ fn cargo_compile_git_dep_branch() {
.with_stderr(&format!(
"[UPDATING] git repository `{}`\n\
[COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\
[COMPILING] foo v0.5.0 (CWD)\n\
[COMPILING] foo v0.5.0 ([CWD])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
path2url(&git_root),
path2url(&git_root),
@ -351,7 +352,7 @@ fn cargo_compile_git_dep_tag() {
.with_stderr(&format!(
"[UPDATING] git repository `{}`\n\
[COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\
[COMPILING] foo v0.5.0 (CWD)\n\
[COMPILING] foo v0.5.0 ([CWD])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
path2url(&git_root),
path2url(&git_root),
@ -721,7 +722,7 @@ fn recompilation() {
.with_stderr(&format!(
"[UPDATING] git repository `{}`\n\
[COMPILING] bar v0.5.0 ({}#[..])\n\
[COMPILING] foo v0.5.0 (CWD)\n\
[COMPILING] foo v0.5.0 ([CWD])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) \
in [..]\n",
git_project.url(),
@ -769,7 +770,7 @@ fn recompilation() {
p.cargo("build")
.with_stderr(&format!(
"[COMPILING] bar v0.5.0 ({}#[..])\n\
[COMPILING] foo v0.5.0 (CWD)\n\
[COMPILING] foo v0.5.0 ([CWD])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) \
in [..]\n",
git_project.url(),
@ -779,7 +780,7 @@ fn recompilation() {
p.cargo("clean -p foo").with_stdout("").run();
p.cargo("build")
.with_stderr(
"[COMPILING] foo v0.5.0 (CWD)\n\
"[COMPILING] foo v0.5.0 ([CWD])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) \
in [..]"
).run();
@ -858,7 +859,7 @@ fn update_with_shared_deps() {
[COMPILING] bar v0.5.0 ({git}#[..])
[COMPILING] [..] v0.5.0 ([..])
[COMPILING] [..] v0.5.0 ([..])
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
git = git_project.url(),
)).run();
@ -917,9 +918,9 @@ Caused by:
.with_stderr(&format!(
"\
[COMPILING] bar v0.5.0 ({git}#[..])
[COMPILING] [..] v0.5.0 (CWD[..]dep[..])
[COMPILING] [..] v0.5.0 (CWD[..]dep[..])
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] [..] v0.5.0 ([CWD][..]dep[..])
[COMPILING] [..] v0.5.0 ([CWD][..]dep[..])
[COMPILING] foo v0.5.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
git = git_project.url(),
)).run();
@ -1095,7 +1096,7 @@ fn two_deps_only_update_one() {
[UPDATING] git repository `[..]`\n\
[COMPILING] [..] v0.5.0 ([..])\n\
[COMPILING] [..] v0.5.0 ([..])\n\
[COMPILING] foo v0.5.0 (CWD)\n\
[COMPILING] foo v0.5.0 ([CWD])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
).run();
@ -1195,7 +1196,7 @@ fn stale_cached_version() {
"\
[UPDATING] git repository `{bar}`
[COMPILING] bar v0.0.0 ({bar}#[..])
[COMPILING] foo v0.0.0 (CWD)
[COMPILING] foo v0.0.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
bar = bar.url(),
@ -1358,7 +1359,7 @@ fn dev_deps_with_testing() {
.with_stderr(&format!(
"\
[UPDATING] git repository `{bar}`
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
bar = p2.url()
@ -1399,12 +1400,12 @@ fn git_build_cmd_freshness() {
sleep_ms(1000);
foo.cargo("build")
.with_stderr(&format!(
.with_stderr(
"\
[COMPILING] foo v0.0.0 (CWD)
[COMPILING] foo v0.0.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
// Smoke test to make sure it doesn't compile again
println!("first pass");
@ -1458,7 +1459,7 @@ fn git_name_not_always_needed() {
.with_stderr(&format!(
"\
[UPDATING] git repository `{bar}`
[COMPILING] foo v0.5.0 (CWD)
[COMPILING] foo v0.5.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
bar = p2.url()
@ -1680,7 +1681,7 @@ fn warnings_in_git_dep() {
.with_stderr(&format!(
"[UPDATING] git repository `{}`\n\
[COMPILING] bar v0.5.0 ({}#[..])\n\
[COMPILING] foo v0.5.0 (CWD)\n\
[COMPILING] foo v0.5.0 ([CWD])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
bar.url(),
bar.url(),
@ -2357,9 +2358,9 @@ fn include_overrides_gitignore() {
p.cargo("build -v")
.with_stderr(
"\
[UPDATING] registry `[..]`
[DOWNLOADING] filetime [..]
[DOWNLOADING] libc [..]
[UPDATING] `[..]` index
[DOWNLOADED] filetime [..]
[DOWNLOADED] libc [..]
[COMPILING] libc [..]
[RUNNING] `rustc --crate-name libc [..]`
[COMPILING] filetime [..]
@ -2575,6 +2576,12 @@ fn failed_submodule_checkout() {
#[test]
fn use_the_cli() {
if env::var("CARGO_TEST_DISABLE_GIT_CLI") == Ok("1".to_string()) {
// mingw git on Windows does not support Windows-style file URIs.
// Appveyor in the rust repo has that git up front in the PATH instead
// of Git-for-Windows, which causes this to fail.
return;
}
let project = project();
let git_project = git::new("dep1", |project| {
project

View file

@ -13,7 +13,7 @@ fn cargo_process(s: &str) -> Execs {
#[test]
fn simple_lib() {
cargo_process("init --lib --vcs none")
cargo_process("init --lib --vcs none --edition 2015")
.env("USER", "foo")
.with_stderr("[CREATED] library project")
.run();
@ -29,7 +29,7 @@ fn simple_lib() {
fn simple_bin() {
let path = paths::root().join("foo");
fs::create_dir(&path).unwrap();
cargo_process("init --bin --vcs none")
cargo_process("init --bin --vcs none --edition 2015")
.env("USER", "foo")
.cwd(&path)
.with_stderr("[CREATED] binary (application) project")

View file

@ -26,19 +26,20 @@ fn simple() {
cargo_process("install foo")
.with_stderr(
"\
[UPDATING] registry `[..]`
[DOWNLOADING] foo v0.0.1 (registry [..])
[UPDATING] `[..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] foo v0.0.1 (registry [..])
[INSTALLING] foo v0.0.1
[COMPILING] foo v0.0.1
[FINISHED] release [optimized] target(s) in [..]
[INSTALLING] CWD/home/.cargo/bin/foo[EXE]
[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
).run();
assert_has_installed_exe(cargo_home(), "foo");
cargo_process("uninstall foo")
.with_stderr("[REMOVING] CWD/home/.cargo/bin/foo[EXE]")
.with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]")
.run();
assert_has_not_installed_exe(cargo_home(), "foo");
}
@ -52,17 +53,19 @@ fn multiple_pkgs() {
.with_status(101)
.with_stderr(
"\
[UPDATING] registry `[..]`
[DOWNLOADING] foo v0.0.1 (registry `CWD/registry`)
[UPDATING] `[..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] foo v0.0.1 (registry `[CWD]/registry`)
[INSTALLING] foo v0.0.1
[COMPILING] foo v0.0.1
[FINISHED] release [optimized] target(s) in [..]
[INSTALLING] CWD/home/.cargo/bin/foo[EXE]
[DOWNLOADING] bar v0.0.2 (registry `CWD/registry`)
[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
[DOWNLOADING] crates ...
[DOWNLOADED] bar v0.0.2 (registry `[CWD]/registry`)
[INSTALLING] bar v0.0.2
[COMPILING] bar v0.0.2
[FINISHED] release [optimized] target(s) in [..]
[INSTALLING] CWD/home/.cargo/bin/bar[EXE]
[INSTALLING] [CWD]/home/.cargo/bin/bar[EXE]
error: could not find `baz` in registry `[..]`
[SUMMARY] Successfully installed foo, bar! Failed to install baz (see error(s) above).
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
@ -75,8 +78,8 @@ error: some crates failed to install
cargo_process("uninstall foo bar")
.with_stderr(
"\
[REMOVING] CWD/home/.cargo/bin/foo[EXE]
[REMOVING] CWD/home/.cargo/bin/bar[EXE]
[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]
[REMOVING] [CWD]/home/.cargo/bin/bar[EXE]
[SUMMARY] Successfully uninstalled foo, bar!
",
).run();
@ -96,12 +99,13 @@ fn pick_max_version() {
cargo_process("install foo")
.with_stderr(
"\
[UPDATING] registry `[..]`
[DOWNLOADING] foo v0.2.1 (registry [..])
[UPDATING] `[..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] foo v0.2.1 (registry [..])
[INSTALLING] foo v0.2.1
[COMPILING] foo v0.2.1
[FINISHED] release [optimized] target(s) in [..]
[INSTALLING] CWD/home/.cargo/bin/foo[EXE]
[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
).run();
@ -129,7 +133,7 @@ fn missing() {
.with_status(101)
.with_stderr(
"\
[UPDATING] registry [..]
[UPDATING] [..] index
[ERROR] could not find `bar` in registry `[..]`
",
).run();
@ -142,7 +146,7 @@ fn bad_version() {
.with_status(101)
.with_stderr(
"\
[UPDATING] registry [..]
[UPDATING] [..] index
[ERROR] could not find `foo` in registry `[..]` with version `=0.2.0`
",
).run();
@ -225,8 +229,7 @@ fn install_path() {
cargo_process("install --path").arg(p.root()).run();
assert_has_installed_exe(cargo_home(), "foo");
cargo_process("install --path .")
.cwd(p.root())
p.cargo("install --path .")
.with_status(101)
.with_stderr(
"\
@ -435,7 +438,7 @@ fn install_force() {
[INSTALLING] foo v0.2.0 ([..])
[COMPILING] foo v0.2.0 ([..])
[FINISHED] release [optimized] target(s) in [..]
[REPLACING] CWD/home/.cargo/bin/foo[EXE]
[REPLACING] [CWD]/home/.cargo/bin/foo[EXE]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
).run();
@ -472,8 +475,8 @@ fn install_force_partial_overlap() {
[INSTALLING] foo v0.2.0 ([..])
[COMPILING] foo v0.2.0 ([..])
[FINISHED] release [optimized] target(s) in [..]
[INSTALLING] CWD/home/.cargo/bin/foo-bin3[EXE]
[REPLACING] CWD/home/.cargo/bin/foo-bin2[EXE]
[INSTALLING] [CWD]/home/.cargo/bin/foo-bin3[EXE]
[REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
).run();
@ -513,7 +516,7 @@ fn install_force_bin() {
[INSTALLING] foo v0.2.0 ([..])
[COMPILING] foo v0.2.0 ([..])
[FINISHED] release [optimized] target(s) in [..]
[REPLACING] CWD/home/.cargo/bin/foo-bin2[EXE]
[REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
).run();
@ -565,7 +568,7 @@ fn git_repo() {
[INSTALLING] foo v0.1.0 ([..])
[COMPILING] foo v0.1.0 ([..])
[FINISHED] release [optimized] target(s) in [..]
[INSTALLING] CWD/home/.cargo/bin/foo[EXE]
[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE]
warning: be sure to add `[..]` to your PATH to be able to run the installed binaries
",
).run();
@ -692,8 +695,7 @@ fn subcommand_works_out_of_the_box() {
fn installs_from_cwd_by_default() {
let p = project().file("src/main.rs", "fn main() {}").build();
cargo_process("install")
.cwd(p.root())
p.cargo("install")
.with_stderr_contains(
"warning: Using `cargo install` to install the binaries for the \
project in current working directory is deprecated, \
@ -725,8 +727,7 @@ fn installs_from_cwd_with_2018_warnings() {
).file("src/main.rs", "fn main() {}")
.build();
cargo_process("install")
.cwd(p.root())
p.cargo("install")
.masquerade_as_nightly_cargo()
.with_status(101)
.with_stderr_contains(
@ -744,8 +745,8 @@ fn uninstall_cwd() {
p.cargo("install --path .")
.with_stderr(&format!(
"\
[INSTALLING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 (CWD)
[INSTALLING] foo v0.0.1 ([CWD])
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] release [optimized] target(s) in [..]
[INSTALLING] {home}/bin/foo[EXE]
warning: be sure to add `{home}/bin` to your PATH to be able to run the installed binaries",
@ -769,10 +770,10 @@ fn uninstall_cwd_not_installed() {
p.cargo("uninstall")
.with_status(101)
.with_stdout("")
.with_stderr(format!(
.with_stderr(
"\
error: package `foo v0.0.1 (CWD)` is not installed",
)).run();
error: package `foo v0.0.1 ([CWD])` is not installed",
).run();
}
#[test]
@ -787,7 +788,7 @@ fn uninstall_cwd_no_project() {
.with_stdout("")
.with_stderr(format!(
"\
[ERROR] failed to read `CWD/Cargo.toml`
[ERROR] failed to read `[CWD]/Cargo.toml`
Caused by:
{err_msg} (os error 2)",
@ -1007,7 +1008,7 @@ fn vers_precise() {
pkg("foo", "0.1.2");
cargo_process("install foo --vers 0.1.1")
.with_stderr_contains("[DOWNLOADING] foo v0.1.1 (registry [..])")
.with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])")
.run();
}
@ -1017,7 +1018,7 @@ fn version_too() {
pkg("foo", "0.1.2");
cargo_process("install foo --version 0.1.1")
.with_stderr_contains("[DOWNLOADING] foo v0.1.1 (registry [..])")
.with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])")
.run();
}
@ -1071,7 +1072,7 @@ fn uninstall_multiple_and_some_pkg_does_not_exist() {
.with_status(101)
.with_stderr(
"\
[REMOVING] CWD/home/.cargo/bin/foo[EXE]
[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]
error: package id specification `bar` matched no packages
[SUMMARY] Successfully uninstalled foo! Failed to uninstall bar (see error(s) above).
error: some packages failed to uninstall
@ -1237,3 +1238,22 @@ warning: be sure to add `[..]` to your PATH to be able to run the installed bina
",
).run();
}
#[test]
fn install_ignores_cargo_config() {
pkg("bar", "0.0.1");
let p = project()
.file(
".cargo/config",
r#"
[build]
target = "non-existing-target"
"#,
)
.file("src/main.rs", "fn main() {}")
.build();
p.cargo("install bar").run();
assert_has_installed_exe(cargo_home(), "bar");
}

View file

@ -46,14 +46,14 @@ fn simple() {
).build();
p.cargo("build")
.with_stderr(&format!(
.with_stderr(
"\
[UNPACKING] bar v0.0.1 ([..])
[COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] [..]
",
)).run();
).run();
p.cargo("build").with_stderr("[FINISHED] [..]").run();
p.cargo("test").run();
}
@ -85,14 +85,14 @@ fn multiple_versions() {
).build();
p.cargo("build")
.with_stderr(&format!(
.with_stderr(
"\
[UNPACKING] bar v0.1.0 ([..])
[COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] [..]
",
)).run();
).run();
Package::new("bar", "0.2.0")
.local(true)
@ -142,16 +142,16 @@ fn multiple_names() {
).build();
p.cargo("build")
.with_stderr(&format!(
.with_stderr(
"\
[UNPACKING] [..]
[UNPACKING] [..]
[COMPILING] [..]
[COMPILING] [..]
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] [..]
",
)).run();
).run();
}
#[test]
@ -193,16 +193,16 @@ fn interdependent() {
).build();
p.cargo("build")
.with_stderr(&format!(
.with_stderr(
"\
[UNPACKING] [..]
[UNPACKING] [..]
[COMPILING] bar v0.0.1
[COMPILING] baz v0.1.0
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] [..]
",
)).run();
).run();
}
#[test]
@ -257,16 +257,16 @@ fn path_dep_rewritten() {
).build();
p.cargo("build")
.with_stderr(&format!(
.with_stderr(
"\
[UNPACKING] [..]
[UNPACKING] [..]
[COMPILING] bar v0.0.1
[COMPILING] baz v0.1.0
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] [..]
",
)).run();
).run();
}
#[test]
@ -409,14 +409,14 @@ fn crates_io_registry_url_is_optional() {
).build();
p.cargo("build")
.with_stderr(&format!(
.with_stderr(
"\
[UNPACKING] bar v0.0.1 ([..])
[COMPILING] bar v0.0.1
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] [..]
",
)).run();
).run();
p.cargo("build").with_stderr("[FINISHED] [..]").run();
p.cargo("test").run();
}

View file

@ -224,7 +224,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
.with_status(101)
.with_stderr(
"\
[UPDATING] registry `[..]`
[UPDATING] `[..]` index
error: checksum for `bar v0.1.0` changed between lock files
this could be indicative of a few possible errors:
@ -284,7 +284,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
.with_status(101)
.with_stderr(
"\
[UPDATING] registry `[..]`
[UPDATING] `[..]` index
error: checksum for `bar v0.1.0` was not previously calculated, but a checksum \
could now be calculated
@ -479,7 +479,7 @@ fn locked_correct_error() {
.with_status(101)
.with_stderr(
"\
[UPDATING] registry `[..]`
[UPDATING] `[..]` index
error: the lock file needs to be updated but --locked was passed to prevent this
",
).run();

View file

@ -103,3 +103,9 @@ mod verify_project;
mod version;
mod warn_on_failure;
mod workspaces;
#[test]
fn aaa_trigger_cross_compile_disabled_check() {
// This triggers the cross compile disabled check to run ASAP, see #5141
support::cross_compile::disabled();
}

View file

@ -903,7 +903,6 @@ fn cargo_metadata_no_deps_cwd() {
.build();
p.cargo("metadata --no-deps")
.cwd(p.root())
.with_json(MANIFEST_OUTPUT)
.run();
}
@ -916,7 +915,6 @@ fn cargo_metadata_bad_version() {
.build();
p.cargo("metadata --no-deps --format-version 2")
.cwd(p.root())
.with_status(1)
.with_stderr_contains(
"\

View file

@ -14,7 +14,7 @@ fn create_empty_gitconfig() {
#[test]
fn simple_lib() {
cargo_process("new --lib foo --vcs none")
cargo_process("new --lib foo --vcs none --edition 2015")
.env("USER", "foo")
.with_stderr("[CREATED] library `foo` project")
.run();
@ -47,7 +47,7 @@ mod tests {
#[test]
fn simple_bin() {
cargo_process("new --bin foo")
cargo_process("new --bin foo --edition 2015")
.env("USER", "foo")
.with_stderr("[CREATED] binary (application) `foo` project")
.run();
@ -75,7 +75,7 @@ fn both_lib_and_bin() {
#[test]
fn simple_git() {
cargo_process("new --lib foo").env("USER", "foo").run();
cargo_process("new --lib foo --edition 2015").env("USER", "foo").run();
assert!(paths::root().is_dir());
assert!(paths::root().join("foo/Cargo.toml").is_file());
@ -105,7 +105,7 @@ fn existing() {
cargo_process("new foo")
.with_status(101)
.with_stderr(
"[ERROR] destination `CWD/foo` already exists\n\n\
"[ERROR] destination `[CWD]/foo` already exists\n\n\
Use `cargo init` to initialize the directory",
).run();
}
@ -455,3 +455,39 @@ fn explicit_project_name() {
.with_stderr("[CREATED] library `bar` project")
.run();
}
#[test]
fn new_with_edition_2015() {
cargo_process("new --edition 2015 foo")
.env("USER", "foo")
.run();
let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap();
assert!(manifest.contains("edition = \"2015\""));
}
#[test]
fn new_with_edition_2018() {
cargo_process("new --edition 2018 foo")
.env("USER", "foo")
.run();
let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap();
assert!(manifest.contains("edition = \"2018\""));
}
#[test]
fn new_default_edition() {
cargo_process("new foo")
.env("USER", "foo")
.run();
let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap();
assert!(manifest.contains("edition = \"2018\""));
}
#[test]
fn new_with_bad_edition() {
cargo_process("new --edition something_else foo")
.env("USER", "foo")
.with_stderr_contains("error: 'something_else' isn't a valid value[..]")
.with_status(1)
.run();
}

View file

@ -38,10 +38,10 @@ fn override_simple() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
[UPDATING] git repository `[..]`
[COMPILING] bar v0.1.0 (file://[..])
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -183,12 +183,13 @@ fn transitive() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
[UPDATING] git repository `[..]`
[DOWNLOADING] baz v0.2.0 (registry [..])
[DOWNLOADING] crates ...
[DOWNLOADED] baz v0.2.0 (registry [..])
[COMPILING] bar v0.1.0 (file://[..])
[COMPILING] baz v0.2.0
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -231,10 +232,10 @@ fn persists_across_rebuilds() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
[UPDATING] git repository `file://[..]`
[COMPILING] bar v0.1.0 (file://[..])
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -275,9 +276,9 @@ fn replace_registry_with_path() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[COMPILING] bar v0.1.0 (file://[..])
[COMPILING] foo v0.0.1 (CWD)
[UPDATING] `[ROOT][..]` index
[COMPILING] bar v0.1.0 ([ROOT][..])
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -336,14 +337,15 @@ fn use_a_spec_to_select() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
[UPDATING] git repository `[..]`
[DOWNLOADING] [..]
[DOWNLOADING] [..]
[DOWNLOADING] crates ...
[DOWNLOADED] [..]
[DOWNLOADED] [..]
[COMPILING] [..]
[COMPILING] [..]
[COMPILING] [..]
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -393,12 +395,13 @@ fn override_adds_some_deps() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
[UPDATING] git repository `[..]`
[DOWNLOADING] baz v0.1.1 (registry [..])
[DOWNLOADING] crates ...
[DOWNLOADED] baz v0.1.1 (registry [..])
[COMPILING] baz v0.1.1
[COMPILING] bar v0.1.0 ([..])
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -411,13 +414,13 @@ fn override_adds_some_deps() {
.with_stderr(
"\
[UPDATING] git repository `file://[..]`
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
",
).run();
p.cargo("update -p https://github.com/rust-lang/crates.io-index#bar")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
",
).run();
@ -508,7 +511,7 @@ fn override_wrong_name() {
.with_status(101)
.with_stderr(
"\
[UPDATING] registry [..]
[UPDATING] [..] index
[UPDATING] git repository [..]
error: no matching package for override `[..]baz:0.1.0` found
location searched: file://[..]
@ -550,7 +553,7 @@ fn override_with_nothing() {
.with_status(101)
.with_stderr(
"\
[UPDATING] registry [..]
[UPDATING] [..] index
[UPDATING] git repository [..]
[ERROR] failed to load source for a dependency on `bar`
@ -629,7 +632,7 @@ fn multiple_specs() {
.with_status(101)
.with_stderr(
"\
[UPDATING] registry [..]
[UPDATING] [..] index
[UPDATING] git repository [..]
error: overlapping replacement specifications found:
@ -717,7 +720,7 @@ fn update() {
p.cargo("update")
.with_stderr(
"\
[UPDATING] registry `[..]`
[UPDATING] `[..]` index
[UPDATING] git repository `[..]`
",
).run();
@ -830,9 +833,10 @@ To change the dependency graph via an override it's recommended to use the
`[replace]` feature of Cargo instead of the path override feature. This is
documented online at the url below for more information.
http://doc.crates.io/specifying-dependencies.html#overriding-dependencies
https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#overriding-dependencies
[DOWNLOADING] [..]
[DOWNLOADING] crates ...
[DOWNLOADED] [..]
[COMPILING] [..]
[COMPILING] [..]
[COMPILING] [..]
@ -1039,7 +1043,7 @@ fn no_warnings_when_replace_is_used_in_another_workspace_member() {
.with_stdout("")
.with_stderr(
"\
[UPDATING] registry `[..]`
[UPDATING] `[..]` index
[COMPILING] bar v0.1.0 ([..])
[COMPILING] first_crate v0.1.0 ([..])
[FINISHED] [..]",

View file

@ -27,16 +27,16 @@ fn simple() {
.build();
p.cargo("package")
.with_stderr(&format!(
.with_stderr(
"\
[WARNING] manifest has no documentation[..]
See [..]
[PACKAGING] foo v0.0.1 (CWD)
[VERIFYING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 (CWD[..])
[PACKAGING] foo v0.0.1 ([CWD])
[VERIFYING] foo v0.0.1 ([CWD])
[COMPILING] foo v0.0.1 ([CWD][..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
p.cargo("package -l")
.with_stdout(
@ -70,17 +70,17 @@ src/main.rs
fn metadata_warning() {
let p = project().file("src/main.rs", "fn main() {}").build();
p.cargo("package")
.with_stderr(&format!(
.with_stderr(
"\
warning: manifest has no description, license, license-file, documentation, \
homepage or repository.
See http://doc.crates.io/manifest.html#package-metadata for more info.
[PACKAGING] foo v0.0.1 (CWD)
[VERIFYING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 (CWD[..])
[PACKAGING] foo v0.0.1 ([CWD])
[VERIFYING] foo v0.0.1 ([CWD])
[COMPILING] foo v0.0.1 ([CWD][..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
let p = project()
.file(
@ -95,16 +95,16 @@ See http://doc.crates.io/manifest.html#package-metadata for more info.
).file("src/main.rs", "fn main() {}")
.build();
p.cargo("package")
.with_stderr(&format!(
.with_stderr(
"\
warning: manifest has no description, documentation, homepage or repository.
See http://doc.crates.io/manifest.html#package-metadata for more info.
[PACKAGING] foo v0.0.1 (CWD)
[VERIFYING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 (CWD[..])
[PACKAGING] foo v0.0.1 ([CWD])
[VERIFYING] foo v0.0.1 ([CWD])
[COMPILING] foo v0.0.1 ([CWD][..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
let p = project()
.file(
@ -121,14 +121,14 @@ See http://doc.crates.io/manifest.html#package-metadata for more info.
).file("src/main.rs", "fn main() {}")
.build();
p.cargo("package")
.with_stderr(&format!(
.with_stderr(
"\
[PACKAGING] foo v0.0.1 (CWD)
[VERIFYING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 (CWD[..])
[PACKAGING] foo v0.0.1 ([CWD])
[VERIFYING] foo v0.0.1 ([CWD])
[COMPILING] foo v0.0.1 ([CWD][..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
}
#[test]
@ -202,16 +202,16 @@ fn package_verification() {
let p = project().file("src/main.rs", "fn main() {}").build();
p.cargo("build").run();
p.cargo("package")
.with_stderr(&format!(
.with_stderr(
"\
[WARNING] manifest has no description[..]
See http://doc.crates.io/manifest.html#package-metadata for more info.
[PACKAGING] foo v0.0.1 (CWD)
[VERIFYING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 (CWD[..])
[PACKAGING] foo v0.0.1 ([CWD])
[VERIFYING] foo v0.0.1 ([CWD])
[COMPILING] foo v0.0.1 ([CWD][..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
}
#[test]
@ -242,12 +242,12 @@ fn vcs_file_collision() {
p.cargo("package")
.arg("--no-verify")
.with_status(101)
.with_stderr(&format!(
.with_stderr(
"\
[ERROR] Invalid inclusion of reserved file name .cargo_vcs_info.json \
in package source
",
)).run();
).run();
}
#[test]
@ -490,8 +490,7 @@ fn package_git_submodule() {
None,
).unwrap();
cargo_process("package --no-verify -v")
.cwd(project.root())
project.cargo("package --no-verify -v")
.with_stderr_contains("[ARCHIVING] bar/Makefile")
.run();
}
@ -541,16 +540,16 @@ fn ignore_nested() {
.build();
p.cargo("package")
.with_stderr(&format!(
.with_stderr(
"\
[WARNING] manifest has no documentation[..]
See http://doc.crates.io/manifest.html#package-metadata for more info.
[PACKAGING] foo v0.0.1 (CWD)
[VERIFYING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 (CWD[..])
[PACKAGING] foo v0.0.1 ([CWD])
[VERIFYING] foo v0.0.1 ([CWD])
[COMPILING] foo v0.0.1 ([CWD][..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
p.cargo("package -l")
.with_stdout(
@ -625,18 +624,17 @@ fn repackage_on_source_change() {
std::mem::drop(file);
// Check that cargo rebuilds the tarball
cargo_process("package")
.cwd(p.root())
.with_stderr(&format!(
p.cargo("package")
.with_stderr(
"\
[WARNING] [..]
See [..]
[PACKAGING] foo v0.0.1 (CWD)
[VERIFYING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 (CWD[..])
[PACKAGING] foo v0.0.1 ([CWD])
[VERIFYING] foo v0.0.1 ([CWD])
[COMPILING] foo v0.0.1 ([CWD][..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
// Check that the tarball contains the added file
let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
@ -934,10 +932,6 @@ fn package_two_kinds_of_deps() {
#[test]
fn test_edition() {
if !is_nightly() {
// --edition is nightly-only
return;
}
let p = project()
.file(
"Cargo.toml",
@ -953,10 +947,11 @@ fn test_edition() {
.build();
p.cargo("build -v").masquerade_as_nightly_cargo()
// --edition is still in flux and we're not passing -Zunstable-options
// from Cargo so it will probably error. Only partially match the output
// until stuff stabilizes
.with_stderr_contains("\
.without_status() // passes on nightly, fails on stable, b/c --edition is nightly-only
// --edition is still in flux and we're not passing -Zunstable-options
// from Cargo so it will probably error. Only partially match the output
// until stuff stabilizes
.with_stderr_contains("\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..]--edition=2018 [..]
").run();
@ -973,45 +968,19 @@ fn edition_with_metadata() {
.file(
"Cargo.toml",
r#"
cargo-features = ["edition"]
[package]
name = "foo"
version = "0.0.1"
authors = []
edition = "2018"
[package.metadata.docs.rs]
features = ["foobar"]
"#,
[package]
name = "foo"
version = "0.0.1"
authors = []
edition = "2018"
[package.metadata.docs.rs]
features = ["foobar"]
"#,
).file("src/lib.rs", "")
.build();
p.cargo("package").masquerade_as_nightly_cargo().run();
}
#[test]
fn test_edition_missing() {
// no edition = 2015
let p = project()
.file(
"Cargo.toml",
r#"
cargo-features = ["edition"]
[package]
name = "foo"
version = "0.0.1"
authors = []
"#,
).file("src/lib.rs", r#" "#)
.build();
p.cargo("build -v").masquerade_as_nightly_cargo()
// --edition is still in flux and we're not passing -Zunstable-options
// from Cargo so it will probably error. Only partially match the output
// until stuff stabilizes
.with_stderr_contains("\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..]--edition=2015 [..]
").run();
p.cargo("package").run();
}
#[test]
@ -1020,18 +989,16 @@ fn test_edition_malformed() {
.file(
"Cargo.toml",
r#"
cargo-features = ["edition"]
[package]
name = "foo"
version = "0.0.1"
authors = []
edition = "chicken"
"#,
[package]
name = "foo"
version = "0.0.1"
authors = []
edition = "chicken"
"#,
).file("src/lib.rs", r#" "#)
.build();
p.cargo("build -v")
.masquerade_as_nightly_cargo()
.with_status(101)
.with_stderr(
"\
@ -1046,39 +1013,6 @@ Caused by:
).run();
}
#[test]
fn test_edition_nightly() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
edition = "2015"
"#,
).file("src/lib.rs", r#" "#)
.build();
p.cargo("build -v")
.masquerade_as_nightly_cargo()
.with_status(101)
.with_stderr(
"\
error: failed to parse manifest at `[..]`
Caused by:
editions are unstable
Caused by:
feature `edition` is required
consider adding `cargo-features = [\"edition\"]` to the manifest
",
).run();
}
#[test]
fn package_lockfile() {
let p = project()
@ -1100,16 +1034,16 @@ fn package_lockfile() {
p.cargo("package")
.masquerade_as_nightly_cargo()
.with_stderr(&format!(
.with_stderr(
"\
[WARNING] manifest has no documentation[..]
See [..]
[PACKAGING] foo v0.0.1 (CWD)
[VERIFYING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 (CWD[..])
[PACKAGING] foo v0.0.1 ([CWD])
[VERIFYING] foo v0.0.1 ([CWD])
[COMPILING] foo v0.0.1 ([CWD][..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
)).run();
).run();
assert!(p.root().join("target/package/foo-0.0.1.crate").is_file());
p.cargo("package -l")
.masquerade_as_nightly_cargo()

View file

@ -50,11 +50,12 @@ fn replace() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[DOWNLOADING] baz v0.1.0 ([..])
[COMPILING] bar v0.1.0 (CWD/bar)
[UPDATING] `[ROOT][..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] baz v0.1.0 ([..])
[COMPILING] bar v0.1.0 ([CWD]/bar)
[COMPILING] baz v0.1.0
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -91,9 +92,9 @@ fn nonexistent() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[COMPILING] bar v0.1.0 (CWD/bar)
[COMPILING] foo v0.0.1 (CWD)
[UPDATING] `[ROOT][..]` index
[COMPILING] bar v0.1.0 ([CWD]/bar)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -136,8 +137,8 @@ fn patch_git() {
.with_stderr(
"\
[UPDATING] git repository `file://[..]`
[COMPILING] bar v0.1.0 (CWD/bar)
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] bar v0.1.0 ([CWD]/bar)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -180,9 +181,9 @@ fn patch_to_git() {
.with_stderr(
"\
[UPDATING] git repository `file://[..]`
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
[COMPILING] bar v0.1.0 (file://[..])
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -216,10 +217,11 @@ fn unused() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[DOWNLOADING] bar v0.1.0 [..]
[UPDATING] `[ROOT][..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 [..]
[COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -274,10 +276,11 @@ fn unused_git() {
.with_stderr(
"\
[UPDATING] git repository `file://[..]`
[UPDATING] registry `file://[..]`
[DOWNLOADING] bar v0.1.0 [..]
[UPDATING] `[ROOT][..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 [..]
[COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -308,10 +311,11 @@ fn add_patch() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[DOWNLOADING] bar v0.1.0 [..]
[UPDATING] `[ROOT][..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 [..]
[COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -335,8 +339,8 @@ fn add_patch() {
p.cargo("build")
.with_stderr(
"\
[COMPILING] bar v0.1.0 (CWD/bar)
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] bar v0.1.0 ([CWD]/bar)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -367,10 +371,11 @@ fn add_ignored_patch() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[DOWNLOADING] bar v0.1.0 [..]
[UPDATING] `[ROOT][..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] bar v0.1.0 [..]
[COMPILING] bar v0.1.0
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -424,9 +429,9 @@ fn new_minor() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
[COMPILING] bar v0.1.1 [..]
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -471,10 +476,10 @@ fn transitive_new_minor() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
[COMPILING] baz v0.1.1 [..]
[COMPILING] bar v0.1.0 [..]
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -507,9 +512,9 @@ fn new_major() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
[COMPILING] bar v0.2.0 [..]
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -534,10 +539,11 @@ fn new_major() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[DOWNLOADING] bar v0.2.0 [..]
[UPDATING] `[ROOT][..]` index
[DOWNLOADING] crates ...
[DOWNLOADED] bar v0.2.0 [..]
[COMPILING] bar v0.2.0
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
@ -582,10 +588,10 @@ fn transitive_new_major() {
p.cargo("build")
.with_stderr(
"\
[UPDATING] registry `file://[..]`
[UPDATING] `[ROOT][..]` index
[COMPILING] baz v0.2.0 [..]
[COMPILING] bar v0.1.0 [..]
[COMPILING] foo v0.0.1 (CWD)
[COMPILING] foo v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();

Some files were not shown because too many files have changed in this diff Show more