Implement a cargo fetch command

This command is used to download all dependencies of a package ahead of time to
ensure that no more network communication will be necessary as part of a build.

cc #358
This commit is contained in:
Alex Crichton 2014-09-11 11:50:57 -07:00
parent 494f7d5a78
commit 2dff1ed610
9 changed files with 205 additions and 87 deletions

View file

@ -55,6 +55,7 @@ macro_rules! each_subcommand( ($macro:ident) => ({
$macro!(config_for_key)
$macro!(config_list)
$macro!(doc)
$macro!(fetch)
$macro!(generate_lockfile)
$macro!(git_checkout)
$macro!(locate_project)

38
src/bin/fetch.rs Normal file
View file

@ -0,0 +1,38 @@
use docopt;
use cargo::ops;
use cargo::core::{MultiShell};
use cargo::util::{CliResult, CliError};
use cargo::util::important_paths::find_root_manifest_for_cwd;
docopt!(Options, "
Fetch dependencies of a package from the network.
Usage:
cargo fetch [options]
Options:
-h, --help Print this message
--manifest-path PATH Path to the manifest to fetch dependencies for
-v, --verbose Use verbose output
If a lockfile is available, this command will ensure that all of the git
dependencies and/or registries dependencies are downloaded and locally
available. The network is never touched after a `cargo fetch` unless
the lockfile changes.
If the lockfile is not available, then this is the equivalent of
`cargo generate-lockfile`. A lockfile is generated and dependencies are also
all updated.
", flag_manifest_path: Option<String>)
pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
shell.set_verbose(options.flag_verbose);
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
try!(ops::fetch(&root, shell).map_err(|e| {
CliError::from_boxed(e, 101)
}));
Ok(None)
}

View file

@ -23,11 +23,11 @@
//!
use std::os;
use std::collections::{HashMap, HashSet};
use std::collections::HashMap;
use core::registry::PackageRegistry;
use core::{MultiShell, Source, SourceId, PackageSet, Target, PackageId};
use core::{Package, Summary, Resolve, resolver};
use core::resolver;
use ops;
use sources::{PathSource};
use util::config::{Config, ConfigValue};
@ -72,26 +72,21 @@ pub fn compile(manifest_path: &Path,
manifest_path.dir_path()));
let (packages, resolve_with_overrides, sources) = {
let _p = profile::start("resolving...");
let lockfile = manifest_path.dir_path().join("Cargo.lock");
let source_id = package.get_package_id().get_source_id();
let mut config = try!(Config::new(*shell, jobs, target.clone()));
let mut registry = PackageRegistry::new(&mut config);
// First, resolve the package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
try!(ops::resolve_and_fetch(&mut registry, &package));
// Second, resolve with precisely what we're doing. Filter out
// transitive dependencies if necessary, specify features, handle
// overrides, etc.
let _p = profile::start("resolving w/ overrides...");
let dependencies = package.get_dependencies().iter().filter(|dep| {
dep.is_transitive() || dev_deps
}).map(|d| d.clone()).collect::<Vec<_>>();
match try!(ops::load_lockfile(&lockfile, source_id)) {
Some(r) => try!(add_lockfile_sources(&mut registry, &package, &r)),
None => try!(registry.add_sources(package.get_source_ids())),
}
let resolved = try!(resolver::resolve(package.get_package_id(),
package.get_dependencies(),
&mut registry));
try!(ops::write_resolve(&package, &resolved));
try!(registry.add_overrides(override_ids));
let resolved_with_overrides =
try!(resolver::resolve(package.get_package_id(),
@ -196,63 +191,3 @@ fn scrape_target_config(config: &mut Config,
Ok(())
}
/// When a lockfile is present, we want to keep as many dependencies at their
/// original revision as possible. We need to account, however, for
/// modifications to the manifest in terms of modifying, adding, or deleting
/// dependencies.
///
/// This method will add any appropriate sources from the lockfile into the
/// registry, and add all other sources from the root package to the registry.
/// Any dependency which has not been modified has its source added to the
/// registry (to retain the precise field if possible). Any dependency which
/// *has* changed has its source id listed in the manifest added and all of its
/// transitive dependencies are blacklisted to not be added from the lockfile.
///
/// TODO: this won't work too well for registry-based packages, but we don't
/// have many of those anyway so we should be ok for now.
fn add_lockfile_sources(registry: &mut PackageRegistry,
root: &Package,
resolve: &Resolve) -> CargoResult<()> {
let deps = resolve.deps(root.get_package_id()).move_iter().flat_map(|deps| {
deps.map(|d| (d.get_name(), d))
}).collect::<HashMap<_, _>>();
let mut sources = vec![root.get_package_id().get_source_id().clone()];
let mut to_avoid = HashSet::new();
let mut to_add = HashSet::new();
for dep in root.get_dependencies().iter() {
match deps.find(&dep.get_name()) {
Some(&lockfile_dep) => {
let summary = Summary::new(lockfile_dep, []);
if dep.matches(&summary) {
fill_with_deps(resolve, lockfile_dep, &mut to_add);
} else {
fill_with_deps(resolve, lockfile_dep, &mut to_avoid);
sources.push(dep.get_source_id().clone());
}
}
None => sources.push(dep.get_source_id().clone()),
}
}
// Only afterward once we know the entire blacklist are the lockfile
// sources added.
for addition in to_add.iter() {
if !to_avoid.contains(addition) {
sources.push(addition.get_source_id().clone());
}
}
return registry.add_sources(sources);
fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
set: &mut HashSet<&'a PackageId>) {
if !set.insert(dep) { return }
for mut deps in resolve.deps(dep).move_iter() {
for dep in deps {
fill_with_deps(resolve, dep, set);
}
}
}
}

View file

@ -0,0 +1,100 @@
use std::collections::{HashSet, HashMap};
use core::{MultiShell, Package, PackageId, Summary};
use core::registry::PackageRegistry;
use core::resolver::{mod, Resolve};
use core::source::Source;
use ops;
use sources::PathSource;
use util::{CargoResult, Config};
use util::profile;
pub fn fetch(manifest_path: &Path,
shell: &mut MultiShell) -> CargoResult<()> {
let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));
try!(source.update());
let package = try!(source.get_root_package());
let mut config = try!(Config::new(shell, None, None));
let mut registry = PackageRegistry::new(&mut config);
try!(resolve_and_fetch(&mut registry, &package));
Ok(())
}
pub fn resolve_and_fetch(registry: &mut PackageRegistry, package: &Package)
-> CargoResult<Resolve> {
let _p = profile::start("resolve and fetch...");
let lockfile = package.get_manifest_path().dir_path().join("Cargo.lock");
let source_id = package.get_package_id().get_source_id();
match try!(ops::load_lockfile(&lockfile, source_id)) {
Some(r) => try!(add_lockfile_sources(registry, package, &r)),
None => try!(registry.add_sources(package.get_source_ids())),
}
let resolved = try!(resolver::resolve(package.get_package_id(),
package.get_dependencies(),
registry));
try!(ops::write_resolve(package, &resolved));
Ok(resolved)
}
/// When a lockfile is present, we want to keep as many dependencies at their
/// original revision as possible. We need to account, however, for
/// modifications to the manifest in terms of modifying, adding, or deleting
/// dependencies.
///
/// This method will add any appropriate sources from the lockfile into the
/// registry, and add all other sources from the root package to the registry.
/// Any dependency which has not been modified has its source added to the
/// registry (to retain the precise field if possible). Any dependency which
/// *has* changed has its source id listed in the manifest added and all of its
/// transitive dependencies are blacklisted to not be added from the lockfile.
///
/// TODO: this won't work too well for registry-based packages, but we don't
/// have many of those anyway so we should be ok for now.
fn add_lockfile_sources(registry: &mut PackageRegistry,
root: &Package,
resolve: &Resolve) -> CargoResult<()> {
let deps = resolve.deps(root.get_package_id()).move_iter().flat_map(|deps| {
deps.map(|d| (d.get_name(), d))
}).collect::<HashMap<_, _>>();
let mut sources = vec![root.get_package_id().get_source_id().clone()];
let mut to_avoid = HashSet::new();
let mut to_add = HashSet::new();
for dep in root.get_dependencies().iter() {
match deps.find(&dep.get_name()) {
Some(&lockfile_dep) => {
let summary = Summary::new(lockfile_dep, []);
if dep.matches(&summary) {
fill_with_deps(resolve, lockfile_dep, &mut to_add);
} else {
fill_with_deps(resolve, lockfile_dep, &mut to_avoid);
sources.push(dep.get_source_id().clone());
}
}
None => sources.push(dep.get_source_id().clone()),
}
}
// Only afterward once we know the entire blacklist are the lockfile
// sources added.
for addition in to_add.iter() {
if !to_avoid.contains(addition) {
sources.push(addition.get_source_id().clone());
}
}
return registry.add_sources(sources);
fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
set: &mut HashSet<&'a PackageId>) {
if !set.insert(dep) { return }
for mut deps in resolve.deps(dep).move_iter() {
for dep in deps {
fill_with_deps(resolve, dep, set);
}
}
}
}

View file

@ -16,28 +16,19 @@ use util::toml as cargo_toml;
pub fn generate_lockfile(manifest_path: &Path,
shell: &mut MultiShell)
-> CargoResult<()> {
log!(4, "compile; manifest-path={}", manifest_path.display());
let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));
try!(source.update());
// TODO: Move this into PathSource
let package = try!(source.get_root_package());
debug!("loaded package; package={}", package);
let source_ids = package.get_source_ids();
let mut config = try!(Config::new(shell, None, None));
let resolve = {
let mut config = try!(Config::new(shell, None, None));
let mut registry = PackageRegistry::new(&mut config);
try!(registry.add_sources(source_ids));
try!(resolver::resolve(package.get_package_id(),
package.get_dependencies(),
&mut registry))
};
try!(write_resolve(&package, &resolve));
Ok(())
}

View file

@ -11,6 +11,7 @@ pub use self::cargo_test::{run_tests, run_benches, TestOptions};
pub use self::cargo_package::package;
pub use self::cargo_upload::{upload, upload_configuration, UploadConfig};
pub use self::cargo_upload::{upload_login, http_proxy, http_handle};
pub use self::cargo_fetch::{fetch, resolve_and_fetch};
mod cargo_clean;
mod cargo_compile;
@ -23,3 +24,4 @@ mod cargo_generate_lockfile;
mod cargo_test;
mod cargo_package;
mod cargo_upload;
mod cargo_fetch;

View file

@ -1244,3 +1244,32 @@ test!(git_dep_build_cmd {
execs().with_stdout("1\n"));
})
test!(fetch_downloads {
let bar = git_repo("bar", |project| {
project.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("src/lib.rs", "pub fn bar() -> int { 1 }")
}).assert();
let p = project("p1")
.file("Cargo.toml", format!(r#"
[project]
name = "p1"
version = "0.5.0"
authors = []
[dependencies.bar]
git = '{}'
"#, bar.url()).as_slice())
.file("src/main.rs", "fn main() {}");
assert_that(p.cargo_process("fetch"),
execs().with_status(0).with_stdout(format!("\
{updating} git repository `{url}`
", updating = UPDATING, url = bar.url())));
assert_that(p.process(cargo_dir().join("cargo")).arg("fetch"),
execs().with_status(0).with_stdout(""));
})

21
tests/test_cargo_fetch.rs Normal file
View file

@ -0,0 +1,21 @@
use support::{project, execs};
use hamcrest::assert_that;
fn setup() {}
test!(no_deps {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
"#)
.file("src/main.rs", r#"
mod a; fn main() {}
"#)
.file("src/a.rs", "");
assert_that(p.cargo_process("fetch"),
execs().with_status(0).with_stdout(""));
})

View file

@ -46,3 +46,4 @@ mod test_cargo_package;
mod test_cargo_build_auth;
mod test_cargo_registry;
mod test_cargo_upload;
mod test_cargo_fetch;