Move duplicate pkgs check to resolve builder.

This commit is contained in:
boxdot 2018-06-03 22:55:50 +02:00
parent f801672cf9
commit b04c79a0e5
3 changed files with 40 additions and 34 deletions

View file

@ -65,7 +65,7 @@ use self::context::{Activations, Context};
use self::types::{ActivateError, ActivateResult, Candidate, ConflictReason, DepsFrame, GraphNode};
use self::types::{RcVecIter, RegistryQueryer};
pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve, encodable_package_id};
pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve};
pub use self::encode::{Metadata, WorkspaceResolve};
pub use self::resolve::{Deps, DepsNotReplaced, Resolve};
pub use self::types::Method;
@ -140,6 +140,7 @@ pub fn resolve(
);
check_cycles(&resolve, &cx.activations)?;
check_duplicate_pkgs(&resolve)?;
trace!("resolved: {:?}", resolve);
// If we have a shell, emit warnings about required deps used as feature.
@ -1098,3 +1099,28 @@ fn check_cycles(resolve: &Resolve, activations: &Activations) -> CargoResult<()>
Ok(())
}
}
fn get_duplicate_pkgs(resolve: &Resolve) -> Vec<&'static str> {
let mut unique_pkg_ids = HashSet::new();
let mut result = HashSet::new();
for pkg_id in resolve.iter() {
let mut encodable_pkd_id = encode::encodable_package_id(pkg_id);
if !unique_pkg_ids.insert(encodable_pkd_id) {
result.insert(pkg_id.name().as_str());
}
}
result.into_iter().collect()
}
fn check_duplicate_pkgs(resolve: &Resolve) -> CargoResult<()> {
let names = get_duplicate_pkgs(resolve);
if names.is_empty() {
Ok(())
} else {
bail!(
"dependencies contain duplicate package(s) in the \
same namespace from the same source: {}",
names.join(", ")
)
}
}

View file

@ -1,13 +1,12 @@
use std::collections::HashSet;
use std::io::prelude::*;
use toml;
use core::resolver::WorkspaceResolve;
use core::{resolver, Resolve, Workspace};
use util::errors::{CargoResult, CargoResultExt, Internal};
use util::toml as cargo_toml;
use core::resolver::WorkspaceResolve;
use util::Filesystem;
use util::errors::{CargoResult, CargoResultExt};
use util::toml as cargo_toml;
pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
if !ws.root().join("Cargo.lock").exists() {
@ -26,34 +25,11 @@ pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?;
let v: resolver::EncodableResolve = resolve.try_into()?;
Ok(Some(v.into_resolve(ws)?))
})().chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?;
})()
.chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?;
Ok(resolve)
}
fn duplicate_pkgs(resolve: &Resolve) -> Vec<&'static str> {
let mut unique_names = HashSet::new();
let mut result = HashSet::new();
for pkg_id in resolve.iter() {
let mut encodable_pkd_id = resolver::encodable_package_id(pkg_id);
if !unique_names.insert(encodable_pkd_id) {
result.insert(pkg_id.name().as_str());
}
}
result.into_iter().collect()
}
fn check_duplicate_pkgs(resolve: &Resolve) -> Result<(), Internal> {
let names = duplicate_pkgs(resolve);
if names.is_empty() {
Ok(())
} else {
Err(Internal::new(format_err!(
"dependencies contain duplicate package(s): {}",
names.join(", ")
)))
}
}
pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> {
// Load the original lockfile if it exists.
let ws_root = Filesystem::new(ws.root().to_path_buf());
@ -64,8 +40,6 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()>
Ok(s)
});
check_duplicate_pkgs(resolve).chain_err(|| format!("failed to generate lock file"))?;
let toml = toml::Value::try_from(WorkspaceResolve { ws, resolve }).unwrap();
let mut out = String::new();

View file

@ -2,7 +2,7 @@ use std::fs::{self, File};
use std::io::prelude::*;
use cargotest::support::registry::Package;
use cargotest::support::{execs, project, ProjectBuilder, paths};
use cargotest::support::{execs, paths, project, ProjectBuilder};
use cargotest::ChannelChanger;
use hamcrest::{assert_that, existing_file, is_not};
@ -304,5 +304,11 @@ fn duplicate_entries_in_lockfile() {
.build();
// should fail due to a duplicate package `common` in the lockfile
assert_that(b.cargo("build"), execs().with_status(101));
assert_that(
b.cargo("build"),
execs().with_status(101).with_stderr_contains(
"[..]dependencies contain duplicate package(s) in the \
same namespace from the same source: common",
),
);
}