diff --git a/Cargo.toml b/Cargo.toml index 840e3c597..088ed28bd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,7 +22,7 @@ path = "src/cargo/lib.rs" atty = "0.2" bytesize = "1.0" cargo-platform = { path = "crates/cargo-platform", version = "0.1.1" } -crates-io = { path = "crates/crates-io", version = "0.32" } +crates-io = { path = "crates/crates-io", version = "0.31" } crossbeam-utils = "0.7" crypto-hash = "0.3.1" curl = { version = "0.4.23", features = ["http2"] } diff --git a/crates/cargo-test-support/src/registry.rs b/crates/cargo-test-support/src/registry.rs index ff3697b06..bfa16de03 100644 --- a/crates/cargo-test-support/src/registry.rs +++ b/crates/cargo-test-support/src/registry.rs @@ -420,7 +420,6 @@ impl Package { "cksum": cksum, "features": self.features, "yanked": self.yanked, - "pm": self.proc_macro, }) .to_string(); diff --git a/crates/crates-io/Cargo.toml b/crates/crates-io/Cargo.toml index 4f56d92d8..a4f93c8c0 100644 --- a/crates/crates-io/Cargo.toml +++ b/crates/crates-io/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "crates-io" -version = "0.32.0" +version = "0.31.0" edition = "2018" authors = ["Alex Crichton "] license = "MIT OR Apache-2.0" diff --git a/crates/crates-io/lib.rs b/crates/crates-io/lib.rs index b31aeb203..29bbd5546 100644 --- a/crates/crates-io/lib.rs +++ b/crates/crates-io/lib.rs @@ -55,7 +55,6 @@ pub struct NewCrate { pub repository: Option, pub badges: BTreeMap>, pub links: Option, - pub proc_macro: bool, } #[derive(Serialize)] diff --git a/crates/resolver-tests/src/lib.rs b/crates/resolver-tests/src/lib.rs index 1f37b18da..ef47f11ff 100644 --- a/crates/resolver-tests/src/lib.rs +++ b/crates/resolver-tests/src/lib.rs @@ -176,7 +176,6 @@ pub fn resolve_with_config_raw( &BTreeMap::>::new(), None::<&String>, false, - false, ) .unwrap(); let opts = ResolveOpts::everything(); @@ -578,7 +577,6 @@ pub fn pkg_dep(name: T, dep: Vec) -> Summary { &BTreeMap::>::new(), link, false, - false, ) .unwrap() } @@ -607,7 +605,6 @@ pub fn pkg_loc(name: &str, loc: &str) -> Summary { &BTreeMap::>::new(), link, false, - false, ) .unwrap() } @@ -622,7 +619,6 @@ pub fn remove_dep(sum: &Summary, ind: usize) -> Summary { &BTreeMap::>::new(), sum.links().map(|a| a.as_str()), sum.namespaced_features(), - sum.proc_macro(), ) .unwrap() } diff --git a/src/cargo/core/compiler/unit_dependencies.rs b/src/cargo/core/compiler/unit_dependencies.rs index 48ca740d4..b58a0ea33 100644 --- a/src/cargo/core/compiler/unit_dependencies.rs +++ b/src/cargo/core/compiler/unit_dependencies.rs @@ -19,7 +19,6 @@ use crate::core::compiler::unit_graph::{UnitDep, UnitGraph}; use crate::core::compiler::Unit; use crate::core::compiler::{BuildContext, CompileKind, CompileMode}; use crate::core::dependency::DepKind; -use crate::core::package::Downloads; use crate::core::profiles::{Profile, UnitFor}; use crate::core::resolver::features::{FeaturesFor, ResolvedFeatures}; use crate::core::resolver::Resolve; @@ -31,10 +30,7 @@ use std::collections::{HashMap, HashSet}; /// Collection of stuff used while creating the `UnitGraph`. struct State<'a, 'cfg> { bcx: &'a BuildContext<'a, 'cfg>, - waiting_on_download: HashSet, - downloads: Downloads<'a, 'cfg>, unit_dependencies: UnitGraph<'a>, - package_cache: HashMap, usr_resolve: &'a Resolve, usr_features: &'a ResolvedFeatures, std_resolve: Option<&'a Resolve>, @@ -58,10 +54,7 @@ pub fn build_unit_dependencies<'a, 'cfg>( }; let mut state = State { bcx, - downloads: bcx.packages.enable_download()?, - waiting_on_download: HashSet::new(), unit_dependencies: HashMap::new(), - package_cache: HashMap::new(), usr_resolve: resolve, usr_features: features, std_resolve, @@ -141,44 +134,32 @@ fn attach_std_deps<'a, 'cfg>( /// Compute all the dependencies of the given root units. /// The result is stored in state.unit_dependencies. fn deps_of_roots<'a, 'cfg>(roots: &[Unit<'a>], mut state: &mut State<'a, 'cfg>) -> CargoResult<()> { - // Loop because we are downloading while building the dependency graph. - // The partially-built unit graph is discarded through each pass of the - // loop because it is incomplete because not all required Packages have - // been downloaded. - loop { - for unit in roots.iter() { - state.get(unit.pkg.package_id())?; + for unit in roots.iter() { + state.get(unit.pkg.package_id()); - // Dependencies of tests/benches should not have `panic` set. - // We check the global test mode to see if we are running in `cargo - // test` in which case we ensure all dependencies have `panic` - // cleared, and avoid building the lib thrice (once with `panic`, once - // without, once for `--test`). In particular, the lib included for - // Doc tests and examples are `Build` mode here. - let unit_for = if unit.mode.is_any_test() || state.bcx.build_config.test() { - UnitFor::new_test(state.bcx.config) - } else if unit.target.is_custom_build() { - // This normally doesn't happen, except `clean` aggressively - // generates all units. - UnitFor::new_host(false) - } else if unit.target.proc_macro() { - UnitFor::new_host(true) - } else if unit.target.for_host() { - // Plugin should never have panic set. - UnitFor::new_compiler() - } else { - UnitFor::new_normal() - }; - deps_of(unit, &mut state, unit_for)?; - } - - if !state.waiting_on_download.is_empty() { - state.finish_some_downloads()?; - state.unit_dependencies.clear(); + // Dependencies of tests/benches should not have `panic` set. + // We check the global test mode to see if we are running in `cargo + // test` in which case we ensure all dependencies have `panic` + // cleared, and avoid building the lib thrice (once with `panic`, once + // without, once for `--test`). In particular, the lib included for + // Doc tests and examples are `Build` mode here. + let unit_for = if unit.mode.is_any_test() || state.bcx.build_config.test() { + UnitFor::new_test(state.bcx.config) + } else if unit.target.is_custom_build() { + // This normally doesn't happen, except `clean` aggressively + // generates all units. + UnitFor::new_host(false) + } else if unit.target.proc_macro() { + UnitFor::new_host(true) + } else if unit.target.for_host() { + // Plugin should never have panic set. + UnitFor::new_compiler() } else { - break; - } + UnitFor::new_normal() + }; + deps_of(unit, &mut state, unit_for)?; } + Ok(()) } @@ -269,10 +250,7 @@ fn compute_deps<'a, 'cfg>( let mut ret = Vec::new(); for (id, _) in filtered_deps { - let pkg = match state.get(id)? { - Some(pkg) => pkg, - None => continue, - }; + let pkg = state.get(id); let lib = match pkg.targets().iter().find(|t| t.is_lib()) { Some(t) => t, None => continue, @@ -419,10 +397,7 @@ fn compute_deps_doc<'a, 'cfg>( // the documentation of the library being built. let mut ret = Vec::new(); for (id, _deps) in deps { - let dep = match state.get(id)? { - Some(dep) => dep, - None => continue, - }; + let dep = state.get(id); let lib = match dep.targets().iter().find(|t| t.is_lib()) { Some(lib) => lib, None => continue, @@ -730,44 +705,10 @@ impl<'a, 'cfg> State<'a, 'cfg> { features.activated_features(pkg_id, features_for) } - fn get(&mut self, id: PackageId) -> CargoResult> { - if let Some(pkg) = self.package_cache.get(&id) { - return Ok(Some(pkg)); - } - if !self.waiting_on_download.insert(id) { - return Ok(None); - } - if let Some(pkg) = self.downloads.start(id)? { - self.package_cache.insert(id, pkg); - self.waiting_on_download.remove(&id); - return Ok(Some(pkg)); - } - Ok(None) - } - - /// Completes at least one downloading, maybe waiting for more to complete. - /// - /// This function will block the current thread waiting for at least one - /// crate to finish downloading. The function may continue to download more - /// crates if it looks like there's a long enough queue of crates to keep - /// downloading. When only a handful of packages remain this function - /// returns, and it's hoped that by returning we'll be able to push more - /// packages to download into the queue. - fn finish_some_downloads(&mut self) -> CargoResult<()> { - assert!(self.downloads.remaining() > 0); - loop { - let pkg = self.downloads.wait()?; - self.waiting_on_download.remove(&pkg.package_id()); - self.package_cache.insert(pkg.package_id(), pkg); - - // Arbitrarily choose that 5 or more packages concurrently download - // is a good enough number to "fill the network pipe". If we have - // less than this let's recompute the whole unit dependency graph - // again and try to find some more packages to download. - if self.downloads.remaining() < 5 { - break; - } - } - Ok(()) + fn get(&self, id: PackageId) -> &'a Package { + self.bcx + .packages + .get_one(id) + .unwrap_or_else(|_| panic!("expected {} to be downloaded", id)) } } diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 77910c979..b5d0df5a9 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -1,6 +1,6 @@ use std::cell::{Cell, Ref, RefCell, RefMut}; use std::cmp::Ordering; -use std::collections::{HashMap, HashSet}; +use std::collections::{BTreeSet, HashMap, HashSet}; use std::fmt; use std::hash; use std::mem; @@ -17,7 +17,10 @@ use semver::Version; use serde::ser; use serde::Serialize; +use crate::core::compiler::{CompileKind, RustcTargetData}; +use crate::core::dependency::DepKind; use crate::core::interning::InternedString; +use crate::core::resolver::{HasDevUnits, Resolve}; use crate::core::source::MaybePackage; use crate::core::{Dependency, Manifest, PackageId, SourceId, Target}; use crate::core::{FeatureMap, SourceMap, Summary}; @@ -189,6 +192,10 @@ impl Package { pub fn publish(&self) -> &Option> { self.manifest.publish() } + /// Returns `true` if this package is a proc-macro. + pub fn proc_macro(&self) -> bool { + self.targets().iter().any(|target| target.proc_macro()) + } /// Returns `true` if the package uses a custom build script for any target. pub fn has_custom_build(&self) -> bool { @@ -432,6 +439,9 @@ impl<'cfg> PackageSet<'cfg> { } pub fn get_one(&self, id: PackageId) -> CargoResult<&Package> { + if let Some(pkg) = self.packages.get(&id).and_then(|slot| slot.borrow()) { + return Ok(pkg); + } Ok(self.get_many(Some(id))?.remove(0)) } @@ -448,6 +458,75 @@ impl<'cfg> PackageSet<'cfg> { Ok(pkgs) } + /// Downloads any packages accessible from the give root ids. + pub fn download_accessible( + &self, + resolve: &Resolve, + root_ids: &[PackageId], + has_dev_units: HasDevUnits, + requested_kind: CompileKind, + target_data: &RustcTargetData, + ) -> CargoResult<()> { + fn collect_used_deps( + used: &mut BTreeSet, + resolve: &Resolve, + pkg_id: PackageId, + has_dev_units: HasDevUnits, + requested_kind: CompileKind, + target_data: &RustcTargetData, + ) -> CargoResult<()> { + if !used.insert(pkg_id) { + return Ok(()); + } + let filtered_deps = resolve.deps(pkg_id).filter(|&(_id, deps)| { + deps.iter().any(|dep| { + if dep.kind() == DepKind::Development && has_dev_units == HasDevUnits::No { + return false; + } + // This is overly broad, since not all target-specific + // dependencies are used both for target and host. To tighten this + // up, this function would need to track "for_host" similar to how + // unit dependencies handles it. + if !target_data.dep_platform_activated(dep, requested_kind) + && !target_data.dep_platform_activated(dep, CompileKind::Host) + { + return false; + } + true + }) + }); + for (dep_id, _deps) in filtered_deps { + collect_used_deps( + used, + resolve, + dep_id, + has_dev_units, + requested_kind, + target_data, + )?; + } + Ok(()) + } + + // This is sorted by PackageId to get consistent behavior and error + // messages for Cargo's testsuite. Perhaps there is a better ordering + // that optimizes download time? + let mut to_download = BTreeSet::new(); + + for id in root_ids { + collect_used_deps( + &mut to_download, + resolve, + *id, + has_dev_units, + requested_kind, + target_data, + )?; + } + self.get_many(to_download.into_iter())?; + Ok(()) + } + pub fn sources(&self) -> Ref<'_, SourceMap<'cfg>> { self.sources.borrow() } diff --git a/src/cargo/core/resolver/features.rs b/src/cargo/core/resolver/features.rs index 155db581e..9dd514339 100644 --- a/src/cargo/core/resolver/features.rs +++ b/src/cargo/core/resolver/features.rs @@ -42,7 +42,7 @@ use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::{DepKind, Dependency}; use crate::core::resolver::types::FeaturesSet; use crate::core::resolver::Resolve; -use crate::core::{FeatureValue, InternedString, PackageId, PackageIdSpec, Workspace}; +use crate::core::{FeatureValue, InternedString, PackageId, PackageIdSpec, PackageSet, Workspace}; use crate::util::{CargoResult, Config}; use std::collections::{BTreeSet, HashMap, HashSet}; use std::rc::Rc; @@ -85,6 +85,7 @@ struct FeatureOpts { /// dependencies are computed, and can result in longer build times with /// `cargo test` because the lib may need to be built 3 times instead of /// twice. +#[derive(Copy, Clone, PartialEq)] pub enum HasDevUnits { Yes, No, @@ -231,6 +232,7 @@ pub struct FeatureResolver<'a, 'cfg> { /// The platform to build for, requested by the user. requested_target: CompileKind, resolve: &'a Resolve, + package_set: &'a PackageSet<'cfg>, /// Options that change how the feature resolver operates. opts: FeatureOpts, /// Map of features activated for each package. @@ -247,6 +249,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { ws: &Workspace<'cfg>, target_data: &RustcTargetData, resolve: &Resolve, + package_set: &'a PackageSet<'cfg>, requested_features: &RequestedFeatures, specs: &[PackageIdSpec], requested_target: CompileKind, @@ -269,6 +272,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { target_data, requested_target, resolve, + package_set, opts, activated_features: HashMap::new(), processed_deps: HashSet::new(), @@ -294,8 +298,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { let member_features = self.ws.members_with_features(specs, requested_features)?; for (member, requested_features) in &member_features { let fvs = self.fvs_from_requested(member.package_id(), requested_features); - let for_host = self.opts.decouple_host_deps - && self.resolve.summary(member.package_id()).proc_macro(); + let for_host = self.opts.decouple_host_deps && self.is_proc_macro(member.package_id()); self.activate_pkg(member.package_id(), &fvs, for_host)?; if for_host { // Also activate without for_host. This is needed if the @@ -522,7 +525,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { self.resolve .deps(pkg_id) .map(|(dep_id, deps)| { - let is_proc_macro = self.resolve.summary(dep_id).proc_macro(); + let is_proc_macro = self.is_proc_macro(dep_id); let deps = deps .iter() .filter(|dep| { @@ -566,4 +569,11 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { panic!("feature mismatch"); } } + + fn is_proc_macro(&self, package_id: PackageId) -> bool { + self.package_set + .get_one(package_id) + .expect("packages downloaded") + .proc_macro() + } } diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index 07b09e94f..d502467e2 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -30,11 +30,6 @@ struct Inner { checksum: Option, links: Option, namespaced_features: bool, - /// Whether or not this package is a proc-macro library. - /// - /// This was added in 2020. Packages published before this will always be - /// `false`. - proc_macro: bool, } impl Summary { @@ -44,7 +39,6 @@ impl Summary { features: &BTreeMap>>, links: Option>, namespaced_features: bool, - proc_macro: bool, ) -> CargoResult where K: Borrow + Ord + Display, @@ -74,7 +68,6 @@ impl Summary { checksum: None, links: links.map(|l| l.into()), namespaced_features, - proc_macro, }), }) } @@ -106,9 +99,6 @@ impl Summary { pub fn namespaced_features(&self) -> bool { self.inner.namespaced_features } - pub fn proc_macro(&self) -> bool { - self.inner.proc_macro - } pub fn override_id(mut self, id: PackageId) -> Summary { Rc::make_mut(&mut self.inner).package_id = id; diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index 1e13d5a2f..b548ad5ae 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -7,9 +7,11 @@ use crate::core::compiler::unit_dependencies; use crate::core::compiler::{BuildConfig, BuildContext, CompileKind, CompileMode, Context}; use crate::core::compiler::{RustcTargetData, UnitInterner}; use crate::core::profiles::{Profiles, UnitFor}; -use crate::core::resolver::features::{FeatureResolver, HasDevUnits, RequestedFeatures}; +use crate::core::resolver::features::HasDevUnits; +use crate::core::resolver::ResolveOpts; use crate::core::{PackageIdSpec, Workspace}; use crate::ops; +use crate::ops::resolve::WorkspaceResolve; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::paths; use crate::util::Config; @@ -57,15 +59,34 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { if opts.spec.is_empty() { return rm_rf(&target_dir.into_path_unlocked(), config); } - let (packages, resolve) = ops::resolve_ws(ws)?; - - let interner = UnitInterner::new(); let mut build_config = BuildConfig::new(config, Some(1), &opts.target, CompileMode::Build)?; build_config.requested_profile = opts.requested_profile; let target_data = RustcTargetData::new(ws, build_config.requested_kind)?; + let resolve_opts = ResolveOpts::everything(); + let specs = opts + .spec + .iter() + .map(|spec| PackageIdSpec::parse(spec)) + .collect::>>()?; + let ws_resolve = ops::resolve_ws_with_opts( + ws, + &target_data, + build_config.requested_kind, + &resolve_opts, + &specs, + HasDevUnits::Yes, + )?; + let WorkspaceResolve { + pkg_set, + targeted_resolve: resolve, + resolved_features: features, + .. + } = ws_resolve; + + let interner = UnitInterner::new(); let bcx = BuildContext::new( ws, - &packages, + &pkg_set, opts.config, &build_config, profiles, @@ -73,27 +94,12 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { HashMap::new(), target_data, )?; - let requested_features = RequestedFeatures::new_all(true); - let specs = opts - .spec - .iter() - .map(|spec| PackageIdSpec::parse(spec)) - .collect::>>()?; - let features = FeatureResolver::resolve( - ws, - &bcx.target_data, - &resolve, - &requested_features, - &specs, - bcx.build_config.requested_kind, - HasDevUnits::Yes, - )?; let mut units = Vec::new(); for spec in opts.spec.iter() { // Translate the spec to a Package let pkgid = resolve.query(spec)?; - let pkg = packages.get_one(pkgid)?; + let pkg = pkg_set.get_one(pkgid)?; // Generate all relevant `Unit` targets for this package for target in pkg.targets() { diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index f1b0190c2..6b0f4fa3d 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -356,6 +356,7 @@ pub fn compile_ws<'a>( .iter() .map(|s| s.query(resolve.iter())) .collect::>>()?; + // Now get the `Package` for each `PackageId`. This may trigger a download // if the user specified `-p` for a dependency that is not downloaded. // Dependencies will be downloaded during build_unit_dependencies. @@ -909,7 +910,12 @@ fn generate_targets<'a>( let unavailable_features = match target.required_features() { Some(rf) => { let features = features_map.entry(pkg).or_insert_with(|| { - resolve_all_features(resolve, resolved_features, pkg.package_id()) + resolve_all_features( + resolve, + resolved_features, + &bcx.packages, + pkg.package_id(), + ) }); rf.iter().filter(|f| !features.contains(*f)).collect() } @@ -946,6 +952,7 @@ fn generate_targets<'a>( fn resolve_all_features( resolve_with_overrides: &Resolve, resolved_features: &features::ResolvedFeatures, + package_set: &PackageSet<'_>, package_id: PackageId, ) -> HashSet { let mut features: HashSet = resolved_features @@ -957,7 +964,10 @@ fn resolve_all_features( // Include features enabled for use by dependencies so targets can also use them with the // required-features field when deciding whether to be built or skipped. for (dep_id, deps) in resolve_with_overrides.deps(package_id) { - let is_proc_macro = resolve_with_overrides.summary(dep_id).proc_macro(); + let is_proc_macro = package_set + .get_one(dep_id) + .expect("packages downloaded") + .proc_macro(); for dep in deps { let features_for = if is_proc_macro || dep.is_build() { FeaturesFor::HostDep diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index ba8ab12fa..5587625e4 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -7,9 +7,8 @@ use anyhow::{bail, format_err}; use tempfile::Builder as TempFileBuilder; use crate::core::compiler::Freshness; -use crate::core::compiler::{CompileKind, DefaultExecutor, Executor, RustcTargetData}; -use crate::core::resolver::{HasDevUnits, ResolveOpts}; -use crate::core::{Edition, Package, PackageId, PackageIdSpec, Source, SourceId, Workspace}; +use crate::core::compiler::{CompileKind, DefaultExecutor, Executor}; +use crate::core::{Edition, Package, PackageId, Source, SourceId, Workspace}; use crate::ops; use crate::ops::common_for_install_and_uninstall::*; use crate::sources::{GitSource, SourceConfigMap}; @@ -493,30 +492,17 @@ fn check_yanked_install(ws: &Workspace<'_>) -> CargoResult<()> { if ws.ignore_lock() || !ws.root().join("Cargo.lock").exists() { return Ok(()); } - let specs = vec![PackageIdSpec::from_package_id(ws.current()?.package_id())]; - // CompileKind here doesn't really matter, it's only needed for features. - let target_data = RustcTargetData::new(ws, CompileKind::Host)?; // It would be best if `source` could be passed in here to avoid a // duplicate "Updating", but since `source` is taken by value, then it // wouldn't be available for `compile_ws`. - // TODO: It would be easier to use resolve_ws, but it does not honor - // require_optional_deps to avoid writing the lock file. It might be good - // to try to fix that. - let ws_resolve = ops::resolve_ws_with_opts( - ws, - &target_data, - CompileKind::Host, - &ResolveOpts::everything(), - &specs, - HasDevUnits::No, - )?; - let mut sources = ws_resolve.pkg_set.sources_mut(); + let (pkg_set, resolve) = ops::resolve_ws(ws)?; + let mut sources = pkg_set.sources_mut(); // Checking the yanked status involves taking a look at the registry and // maybe updating files, so be sure to lock it here. let _lock = ws.config().acquire_package_cache_lock()?; - for pkg_id in ws_resolve.targeted_resolve.iter() { + for pkg_id in resolve.iter() { if let Some(source) = sources.get_mut(pkg_id.source_id()) { if source.is_yanked(pkg_id)? { ws.config().shell().warn(format!( diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index cd1c3a0f8..18c24a5df 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -255,7 +255,6 @@ fn transmit( ) }) .collect::>>(); - let proc_macro = pkg.targets().iter().any(|target| target.proc_macro()); let publish = registry.publish( &NewCrate { @@ -276,7 +275,6 @@ fn transmit( license_file: license_file.clone(), badges: badges.clone(), links: links.clone(), - proc_macro, }, tarball, ); diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 72e9c1b91..dd32b9367 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -126,10 +126,24 @@ pub fn resolve_ws_with_opts<'cfg>( let pkg_set = get_resolved_packages(&resolved_with_overrides, registry)?; + let member_ids = ws + .members_with_features(&specs, &opts.features)? + .into_iter() + .map(|(p, _fts)| p.package_id()) + .collect::>(); + pkg_set.download_accessible( + &resolved_with_overrides, + &member_ids, + has_dev_units, + requested_target, + &target_data, + )?; + let resolved_features = FeatureResolver::resolve( ws, target_data, &resolved_with_overrides, + &pkg_set, &opts.features, specs, requested_target, @@ -159,7 +173,7 @@ fn resolve_with_registry<'cfg>( true, )?; - if !ws.is_ephemeral() { + if !ws.is_ephemeral() && ws.require_optional_deps() { ops::write_pkg_lockfile(ws, &resolve)?; } Ok(resolve) diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs index 19f53be30..9743ccfe3 100644 --- a/src/cargo/sources/registry/index.rs +++ b/src/cargo/sources/registry/index.rs @@ -715,7 +715,6 @@ impl IndexSummary { features, yanked, links, - pm, } = serde_json::from_slice(line)?; log::trace!("json parsed registry {}/{}", name, vers); let pkgid = PackageId::new(name, &vers, source_id)?; @@ -724,14 +723,7 @@ impl IndexSummary { .map(|dep| dep.into_dep(source_id)) .collect::>>()?; let namespaced_features = false; - let mut summary = Summary::new( - pkgid, - deps, - &features, - links, - namespaced_features, - pm.unwrap_or(false), - )?; + let mut summary = Summary::new(pkgid, deps, &features, links, namespaced_features)?; summary.set_checksum(cksum); Ok(IndexSummary { summary, diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs index 27c18a725..9a181b907 100644 --- a/src/cargo/sources/registry/mod.rs +++ b/src/cargo/sources/registry/mod.rs @@ -236,11 +236,6 @@ pub struct RegistryPackage<'a> { /// Added early 2018 (see https://github.com/rust-lang/cargo/pull/4978), /// can be `None` if published before then. links: Option, - /// Whether or not this package is a proc-macro library. - /// - /// If `None`, then the status is unknown (crate was published before this - /// field was added), and generally should be treated as `false.` - pm: Option, } #[test] diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index 13adf093f..7a2214d6a 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -1155,14 +1155,12 @@ impl TomlManifest { .collect() }) .unwrap_or_else(BTreeMap::new); - let proc_macro = targets.iter().any(|target| target.proc_macro()); let summary = Summary::new( pkgid, deps, &summary_features, project.links.as_deref(), project.namespaced_features.unwrap_or(false), - proc_macro, )?; let metadata = ManifestMetadata { description: project.description.clone(), diff --git a/src/doc/src/reference/registries.md b/src/doc/src/reference/registries.md index c3f043eb5..e40432786 100644 --- a/src/doc/src/reference/registries.md +++ b/src/doc/src/reference/registries.md @@ -239,11 +239,7 @@ explaining the format of the entry. "yanked": false, // The `links` string value from the package's manifest, or null if not // specified. This field is optional and defaults to null. - "links": null, - // This is `true` if the package is a proc-macro. - // Note: This field was added in Rust 1.44. Packages published with - // earlier versions will not set this field. - "pm": false, + "links": null } ``` @@ -407,11 +403,7 @@ considered as an exhaustive list of restrictions [crates.io] imposes. }, // The `links` string value from the package's manifest, or null if not // specified. This field is optional and defaults to null. - "links": null, - // This is `true` if the package is a proc-macro. - // Note: This field was added in Rust 1.44. Packages published with - // earlier versions will not set this field. - "proc_macro": false, + "links": null } ``` diff --git a/tests/testsuite/alt_registry.rs b/tests/testsuite/alt_registry.rs index 972f81ce7..cf45400b2 100644 --- a/tests/testsuite/alt_registry.rs +++ b/tests/testsuite/alt_registry.rs @@ -357,7 +357,6 @@ fn publish_with_registry_dependency() { "license_file": null, "links": null, "name": "foo", - "proc_macro": false, "readme": null, "readme_file": null, "repository": null, @@ -457,7 +456,6 @@ fn publish_to_alt_registry() { "license_file": null, "links": null, "name": "foo", - "proc_macro": false, "readme": null, "readme_file": null, "repository": null, @@ -521,7 +519,6 @@ fn publish_with_crates_io_dep() { "license_file": null, "links": null, "name": "foo", - "proc_macro": false, "readme": null, "readme_file": null, "repository": null, diff --git a/tests/testsuite/offline.rs b/tests/testsuite/offline.rs index 62d2e891f..5917b41b4 100644 --- a/tests/testsuite/offline.rs +++ b/tests/testsuite/offline.rs @@ -322,7 +322,7 @@ fn compile_offline_while_transitive_dep_not_cached() { .with_status(101) .with_stderr( "\ -[ERROR] failed to download `baz v1.0.0` +[ERROR] failed to download `bar v0.1.0` Caused by: can't make HTTP request in the offline mode diff --git a/tests/testsuite/publish.rs b/tests/testsuite/publish.rs index 05c82df27..345f26976 100644 --- a/tests/testsuite/publish.rs +++ b/tests/testsuite/publish.rs @@ -23,7 +23,6 @@ const CLEAN_FOO_JSON: &str = r#" "license_file": null, "links": null, "name": "foo", - "proc_macro": false, "readme": null, "readme_file": null, "repository": "foo", @@ -48,7 +47,6 @@ fn validate_upload_foo() { "license_file": null, "links": null, "name": "foo", - "proc_macro": false, "readme": null, "readme_file": null, "repository": null, @@ -981,7 +979,6 @@ fn publish_with_patch() { "license_file": null, "links": null, "name": "foo", - "proc_macro": false, "readme": null, "readme_file": null, "repository": null, @@ -1151,7 +1148,6 @@ fn publish_git_with_version() { "license_file": null, "links": null, "name": "foo", - "proc_macro": false, "readme": null, "readme_file": null, "repository": null, @@ -1239,7 +1235,6 @@ fn publish_dev_dep_no_version() { "license_file": null, "links": null, "name": "foo", - "proc_macro": false, "readme": null, "readme_file": null, "repository": "foo", @@ -1300,66 +1295,3 @@ fn credentials_ambiguous_filename() { validate_upload_foo(); } - -#[cargo_test] -fn publish_proc_macro() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - edition = "2018" - homepage = "https://example.com" - - [lib] - proc-macro = true - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify --index") - .arg(registry_url().to_string()) - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.0.1 ([CWD]) -[UPLOADING] foo v0.0.1 ([CWD]) -", - ) - .run(); - - publish::validate_upload( - r#" - { - "authors": [], - "badges": {}, - "categories": [], - "deps": [], - "description": "foo", - "documentation": null, - "features": {}, - "homepage": "https://example.com", - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "name": "foo", - "proc_macro": true, - "readme": null, - "readme_file": null, - "repository": null, - "vers": "0.0.1" - } - "#, - "foo-0.0.1.crate", - &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], - ); -}