From 8a504a852f987833ac4a442b61d5ab5e2ba48f21 Mon Sep 17 00:00:00 2001 From: Melody Horn Date: Wed, 1 Jan 2020 13:58:50 -0700 Subject: [PATCH 1/2] write failing test to confirm that an issue exists --- tests/testsuite/build_script.rs | 77 +++++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) diff --git a/tests/testsuite/build_script.rs b/tests/testsuite/build_script.rs index 9faeefdfccd..cc712862d8d 100644 --- a/tests/testsuite/build_script.rs +++ b/tests/testsuite/build_script.rs @@ -3958,3 +3958,80 @@ Caused by: .run(); fs::set_permissions(&path, fs::Permissions::from_mode(0o755)).unwrap(); } + +#[cargo_test] +fn links_duplicated_across_build_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies.a] + path = "a" + [dependencies.b] + path = "b" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + + [build-dependencies.bar-sys] + path = "../bar-sys/1" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + + [build-dependencies.bar-sys] + path = "../bar-sys/2" + "#, + ) + .file("b/src/lib.rs", "") + .file( + "bar-sys/1/Cargo.toml", + r#" + [project] + name = "bar-sys" + version = "0.1.0" + authors = [] + links = "bar" + build = "build.rs" + "#, + ) + .file("bar-sys/1/src/lib.rs", "") + .file("bar-sys/1/build.rs", "") + .file( + "bar-sys/2/Cargo.toml", + r#" + [project] + name = "bar-sys" + version = "0.2.0" + authors = [] + links = "bar" + build = "build.rs" + "#, + ) + .file("bar-sys/2/src/lib.rs", "") + .file("bar-sys/2/build.rs", "") + .build(); + + p.cargo("build") + .env("CARGO_LOG", "trace") + .run(); +} From 7288d6cf0a99b2e1b7a2d040e233e669e8f26179 Mon Sep 17 00:00:00 2001 From: Melody Horn Date: Wed, 1 Jan 2020 18:43:34 -0700 Subject: [PATCH 2/2] resolve build deps independently this whole approach probably isn't great, and it breaks a few dozen tests for reasons i don't understand, but the code is more complicated than i probably realize --- src/cargo/core/compiler/unit_dependencies.rs | 7 +++- src/cargo/core/manifest.rs | 6 +++ src/cargo/core/package.rs | 10 ++++- src/cargo/core/resolver/context.rs | 8 ++++ src/cargo/core/resolver/dep_cache.rs | 14 ++++++- src/cargo/core/resolver/encode.rs | 1 + src/cargo/core/resolver/mod.rs | 40 +++++++++++++++++--- src/cargo/core/resolver/resolve.rs | 11 +++++- src/cargo/core/summary.rs | 6 +++ tests/testsuite/build_script.rs | 1 - 10 files changed, 92 insertions(+), 12 deletions(-) diff --git a/src/cargo/core/compiler/unit_dependencies.rs b/src/cargo/core/compiler/unit_dependencies.rs index a57fecf5f17..8c86625e358 100644 --- a/src/cargo/core/compiler/unit_dependencies.rs +++ b/src/cargo/core/compiler/unit_dependencies.rs @@ -228,7 +228,12 @@ fn compute_deps<'a, 'cfg>( let bcx = state.bcx; let id = unit.pkg.package_id(); - let deps = state.resolve().deps(id).filter(|&(_id, deps)| { + let deps = if unit.target.is_custom_build() { + state.resolve().build_graph(id).map(|x| x.deps(id)).into_iter().flatten() + } else { + Some(state.resolve().deps(id)).into_iter().flatten() + }; + let deps = deps.filter(|&(_id, deps)| { assert!(!deps.is_empty()); deps.iter().any(|dep| { // If this target is a build command, then we only want build diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index 96561d2cc9b..7116bc0cb52 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -436,6 +436,12 @@ impl Manifest { } } + pub fn run_dependencies(&self) -> impl Iterator { + self.summary.run_dependencies() + } + pub fn build_dependencies(&self) -> impl Iterator { + self.summary.build_dependencies() + } pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() } diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 8a3035c8f18..6b7c850d216 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -141,7 +141,15 @@ impl Package { } } - /// Gets the manifest dependencies. + /// Gets the manifest runtime (i.e. normal and development) dependencies. + pub fn run_dependencies(&self) -> impl Iterator { + self.manifest.run_dependencies() + } + /// Gets the manifest build dependencies. + pub fn build_dependencies(&self) -> impl Iterator { + self.manifest.build_dependencies() + } + /// Gets all dependencies. pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() } diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 376d9b7751c..0c01724b0ff 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -36,6 +36,9 @@ pub struct Context { /// a way to look up for a package in activations what packages required it /// and all of the exact deps that it fulfilled. pub parents: Graph>>, + + /// full resolution for build dependencies + pub build_graphs: im_rc::HashMap, } /// When backtracking it can be useful to know how far back to go. @@ -93,6 +96,7 @@ impl Context { }, parents: Graph::new(), activations: im_rc::HashMap::new(), + build_graphs: im_rc::HashMap::new(), } } @@ -269,6 +273,10 @@ impl Context { } graph } + + pub fn build_graphs(&self) -> HashMap { + self.build_graphs.iter().cloned().collect() + } } impl Graph>> { diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs index 75b10935ad6..982a9a12ebc 100644 --- a/src/cargo/core/resolver/dep_cache.rs +++ b/src/cargo/core/resolver/dep_cache.rs @@ -200,6 +200,7 @@ impl<'a> RegistryQueryer<'a> { parent: Option, candidate: &Summary, opts: &ResolveOpts, + is_build: bool, ) -> ActivateResult, Rc>)>> { // if we have calculated a result before, then we can just return it, // as it is a "pure" query of its arguments. @@ -213,7 +214,7 @@ impl<'a> RegistryQueryer<'a> { // First, figure out our set of dependencies based on the requested set // of features. This also calculates what features we're going to enable // for our own dependencies. - let (used_features, deps) = resolve_features(parent, candidate, opts)?; + let (used_features, deps) = resolve_features(parent, candidate, opts, is_build)?; // Next, transform all dependencies into a list of possible candidates // which can satisfy that dependency. @@ -248,10 +249,19 @@ pub fn resolve_features<'b>( parent: Option, s: &'b Summary, opts: &'b ResolveOpts, + is_build: bool, ) -> ActivateResult<(HashSet, Vec<(Dependency, FeaturesSet)>)> { // First, filter by dev-dependencies. let deps = s.dependencies(); - let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps); + let deps = deps.iter().filter(|d| { + use crate::core::dependency::Kind; + match (d.kind(), is_build) { + (Kind::Build, _) => is_build, + (_, true) => false, + (Kind::Normal, false) => true, + (Kind::Development, false) => opts.dev_deps, + } + }); let reqs = build_requirements(s, opts)?; let mut ret = Vec::new(); diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 6375a9dffc1..7b660c54832 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -364,6 +364,7 @@ impl EncodableResolve { checksums, metadata, unused_patches, + HashMap::new(), // TODO how to serialize?? version, )) } diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 86947ca8efd..1bf8cf6b3e2 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -127,14 +127,27 @@ pub fn resolve( config: Option<&Config>, check_public_visible_dependencies: bool, ) -> CargoResult { - let cx = Context::new(check_public_visible_dependencies); let _p = profile::start("resolving"); let minimal_versions = match config { Some(config) => config.cli_unstable().minimal_versions, None => false, }; let mut registry = RegistryQueryer::new(registry, replacements, try_to_use, minimal_versions); - let cx = activate_deps_loop(cx, &mut registry, summaries, config)?; + let resolve = resolve_deps(summaries, &mut registry, config, check_public_visible_dependencies, false)?; + trace!("resolved: {:?}", resolve); + + Ok(resolve) +} + +fn resolve_deps( + summaries: &[(Summary, ResolveOpts)], + registry: &mut RegistryQueryer<'_>, + config: Option<&Config>, + check_public_visible_dependencies: bool, + is_build: bool, +) -> CargoResult { + let cx = Context::new(check_public_visible_dependencies); + let cx = activate_deps_loop(cx, registry, summaries, config, is_build)?; let mut cksums = HashMap::new(); for (summary, _) in cx.activations.values() { @@ -151,12 +164,12 @@ pub fn resolve( cksums, BTreeMap::new(), Vec::new(), + cx.build_graphs(), ResolveVersion::default_for_new_lockfiles(), ); check_cycles(&resolve)?; check_duplicate_pkgs_in_lockfile(&resolve)?; - trace!("resolved: {:?}", resolve); Ok(resolve) } @@ -171,6 +184,7 @@ fn activate_deps_loop( registry: &mut RegistryQueryer<'_>, summaries: &[(Summary, ResolveOpts)], config: Option<&Config>, + is_build: bool, ) -> CargoResult { let mut backtrack_stack = Vec::new(); let mut remaining_deps = RemainingDeps::new(); @@ -182,7 +196,7 @@ fn activate_deps_loop( // Activate all the initial summaries to kick off some work. for &(ref summary, ref opts) in summaries { debug!("initial activation: {}", summary.package_id()); - let res = activate(&mut cx, registry, None, summary.clone(), opts.clone()); + let res = activate(&mut cx, registry, None, summary.clone(), opts.clone(), config.clone(), is_build); match res { Ok(Some((frame, _))) => remaining_deps.push(frame), Ok(None) => (), @@ -379,7 +393,7 @@ fn activate_deps_loop( dep.package_name(), candidate.version() ); - let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, opts); + let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, opts, config, false); let successfully_activated = match res { // Success! We've now activated our `candidate` in our context @@ -592,6 +606,8 @@ fn activate( parent: Option<(&Summary, &Dependency)>, candidate: Summary, opts: ResolveOpts, + config: Option<&Config>, + is_build: bool, ) -> ActivateResult> { let candidate_pid = candidate.package_id(); cx.age += 1; @@ -616,6 +632,18 @@ fn activate( let activated = cx.flag_activated(&candidate, &opts, parent)?; + if !is_build && candidate.build_dependencies().count() > 0 { + // resolve build dependencies + let build_graph = resolve_deps( + &[(candidate.clone(), opts.clone())], + registry, + config, + cx.public_dependency.is_some(), + true, + )?; + cx.build_graphs.insert(candidate_pid, build_graph); + } + let candidate = match registry.replacement_summary(candidate_pid) { Some(replace) => { // Note the `None` for parent here since `[replace]` is a bit wonky @@ -644,7 +672,7 @@ fn activate( let now = Instant::now(); let (used_features, deps) = - &*registry.build_deps(parent.map(|p| p.0.package_id()), &candidate, &opts)?; + &*registry.build_deps(parent.map(|p| p.0.package_id()), &candidate, &opts, is_build)?; // Record what list of features is active for this package. if !used_features.is_empty() { diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs index 430ad39b85b..619a139e91d 100644 --- a/src/cargo/core/resolver/resolve.rs +++ b/src/cargo/core/resolver/resolve.rs @@ -16,6 +16,7 @@ use super::encode::Metadata; /// /// Each instance of `Resolve` also understands the full set of features used /// for each package. +#[derive(Clone)] pub struct Resolve { /// A graph, whose vertices are packages and edges are dependency specifications /// from `Cargo.toml`. We need a `Vec` here because the same package @@ -47,6 +48,8 @@ pub struct Resolve { unused_patches: Vec, /// A map from packages to a set of their public dependencies public_dependencies: HashMap>, + /// A map from packages to their build dependency graphs + build_graphs: HashMap, /// Version of the `Cargo.lock` format, see /// `cargo::core::resolver::encode` for more. version: ResolveVersion, @@ -77,6 +80,7 @@ impl Resolve { checksums: HashMap>, metadata: Metadata, unused_patches: Vec, + build_graphs: HashMap, version: ResolveVersion, ) -> Resolve { let reverse_replacements = replacements.iter().map(|(&p, &r)| (r, p)).collect(); @@ -106,6 +110,7 @@ impl Resolve { empty_features: HashSet::new(), reverse_replacements, public_dependencies, + build_graphs, version, } } @@ -277,6 +282,10 @@ unable to verify that `{0}` is the same as when the lockfile was generated .map(|(id, deps)| (*id, deps.as_slice())) } + pub fn build_graph(&self, pkg: PackageId) -> Option<&Resolve> { + self.build_graphs.get(&pkg) + } + pub fn replacement(&self, pkg: PackageId) -> Option { self.replacements.get(&pkg).cloned() } @@ -388,7 +397,7 @@ impl PartialEq for Resolve { compare! { // fields to compare graph replacements reverse_replacements empty_features features - checksums metadata unused_patches public_dependencies + checksums metadata unused_patches public_dependencies build_graphs | // fields to ignore version diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index d6732ec8997..f348865d3e0 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -84,6 +84,12 @@ impl Summary { pub fn source_id(&self) -> SourceId { self.package_id().source_id() } + pub fn run_dependencies(&self) -> impl Iterator { + self.inner.dependencies.iter().filter(|dep| !dep.is_build()) + } + pub fn build_dependencies(&self) -> impl Iterator { + self.inner.dependencies.iter().filter(|dep| dep.is_build()) + } pub fn dependencies(&self) -> &[Dependency] { &self.inner.dependencies } diff --git a/tests/testsuite/build_script.rs b/tests/testsuite/build_script.rs index cc712862d8d..619883f863f 100644 --- a/tests/testsuite/build_script.rs +++ b/tests/testsuite/build_script.rs @@ -4032,6 +4032,5 @@ fn links_duplicated_across_build_deps() { .build(); p.cargo("build") - .env("CARGO_LOG", "trace") .run(); }