Skip to content

Commit

Permalink
Avoid updating registry when adding existing deps
Browse files Browse the repository at this point in the history
Cargo previously erroneously updated the registry whenever a new dependency was
added on a crate which already exists in the DAG. This commit fixes this
behavior by ensuring that if the new dependency matches a previously locked
version it uses that instead.

This commit involved a bit of refactoring around this logic to be a bit more
clear how the locking and "falling back to the registry" is happening.

Closes rust-lang#2895
Closes rust-lang#2931
  • Loading branch information
alexcrichton committed Sep 30, 2016
1 parent d8936af commit c0306a8
Show file tree
Hide file tree
Showing 3 changed files with 180 additions and 113 deletions.
112 changes: 53 additions & 59 deletions src/cargo/core/registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,22 +201,24 @@ impl<'cfg> PackageRegistry<'cfg> {
Ok(ret)
}

// This function is used to transform a summary to another locked summary if
// possible. This is where the concept of a lockfile comes into play.
//
// If a summary points at a package id which was previously locked, then we
// override the summary's id itself, as well as all dependencies, to be
// rewritten to the locked versions. This will transform the summary's
// source to a precise source (listed in the locked version) as well as
// transforming all of the dependencies from range requirements on imprecise
// sources to exact requirements on precise sources.
//
// If a summary does not point at a package id which was previously locked,
// we still want to avoid updating as many dependencies as possible to keep
// the graph stable. In this case we map all of the summary's dependencies
// to be rewritten to a locked version wherever possible. If we're unable to
// map a dependency though, we just pass it on through.
fn lock(&self, summary: Summary) -> Summary {
/// This function is used to transform a summary to another locked summary
/// if possible. This is where the concept of a lockfile comes into play.
///
/// If a summary points at a package id which was previously locked, then we
/// override the summary's id itself, as well as all dependencies, to be
/// rewritten to the locked versions. This will transform the summary's
/// source to a precise source (listed in the locked version) as well as
/// transforming all of the dependencies from range requirements on
/// imprecise sources to exact requirements on precise sources.
///
/// If a summary does not point at a package id which was previously locked,
/// or if any dependencies were added and don't have a previously listed
/// version, we still want to avoid updating as many dependencies as
/// possible to keep the graph stable. In this case we map all of the
/// summary's dependencies to be rewritten to a locked version wherever
/// possible. If we're unable to map a dependency though, we just pass it on
/// through.
pub fn lock(&self, summary: Summary) -> Summary {
let pair = self.locked.get(summary.source_id()).and_then(|map| {
map.get(summary.name())
}).and_then(|vec| {
Expand All @@ -229,51 +231,43 @@ impl<'cfg> PackageRegistry<'cfg> {
None => summary,
};
summary.map_dependencies(|dep| {
match pair {
// If we've got a known set of overrides for this summary, then
// one of a few cases can arise:
//
// 1. We have a lock entry for this dependency from the same
// source as it's listed as coming from. In this case we make
// sure to lock to precisely the given package id.
//
// 2. We have a lock entry for this dependency, but it's from a
// different source than what's listed, or the version
// requirement has changed. In this case we must discard the
// locked version because the dependency needs to be
// re-resolved.
//
// 3. We don't have a lock entry for this dependency, in which
// case it was likely an optional dependency which wasn't
// included previously so we just pass it through anyway.
Some(&(_, ref deps)) => {
match deps.iter().find(|d| d.name() == dep.name()) {
Some(lock) => {
if dep.matches_id(lock) {
dep.lock_to(lock)
} else {
dep
}
}
None => dep,
}
// If we've got a known set of overrides for this summary, then
// one of a few cases can arise:
//
// 1. We have a lock entry for this dependency from the same
// source as it's listed as coming from. In this case we make
// sure to lock to precisely the given package id.
//
// 2. We have a lock entry for this dependency, but it's from a
// different source than what's listed, or the version
// requirement has changed. In this case we must discard the
// locked version because the dependency needs to be
// re-resolved.
//
// 3. We don't have a lock entry for this dependency, in which
// case it was likely an optional dependency which wasn't
// included previously so we just pass it through anyway.
//
// Cases 1/2 are handled by `matches_id` and case 3 is handled by
// falling through to the logic below.
if let Some(&(_, ref locked_deps)) = pair {
let locked = locked_deps.iter().find(|id| dep.matches_id(id));
if let Some(locked) = locked {
return dep.lock_to(locked)
}
}

// If this summary did not have a locked version, then we query
// all known locked packages to see if they match this
// dependency. If anything does then we lock it to that and move
// on.
None => {
let v = self.locked.get(dep.source_id()).and_then(|map| {
map.get(dep.name())
}).and_then(|vec| {
vec.iter().find(|&&(ref id, _)| dep.matches_id(id))
});
match v {
Some(&(ref id, _)) => dep.lock_to(id),
None => dep
}
}
// If this dependency did not have a locked version, then we query
// all known locked packages to see if they match this dependency.
// If anything does then we lock it to that and move on.
let v = self.locked.get(dep.source_id()).and_then(|map| {
map.get(dep.name())
}).and_then(|vec| {
vec.iter().find(|&&(ref id, _)| dep.matches_id(id))
});
match v {
Some(&(ref id, _)) => dep.lock_to(id),
None => dep
}
})
}
Expand Down
86 changes: 32 additions & 54 deletions src/cargo/ops/resolve.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::collections::{HashMap, HashSet};
use std::collections::HashSet;

use core::{PackageId, PackageIdSpec, SourceId, Workspace};
use core::registry::PackageRegistry;
Expand Down Expand Up @@ -55,6 +55,36 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
.filter(|s| !s.is_registry()));
}

// In the case where a previous instance of resolve is available, we
// want to lock as many packages as possible to the previous version
// without disturbing the graph structure. To this end we perform
// two actions here:
//
// 1. We inform the package registry of all locked packages. This
// involves informing it of both the locked package's id as well
// as the versions of all locked dependencies. The registry will
// then takes this information into account when it is queried.
//
// 2. The specified package's summary will have its dependencies
// modified to their precise variants. This will instruct the
// first step of the resolution process to not query for ranges
// but rather for precise dependency versions.
//
// This process must handle altered dependencies, however, as
// it's possible for a manifest to change over time to have
// dependencies added, removed, or modified to different version
// ranges. To deal with this, we only actually lock a dependency
// to the previously resolved version if the dependency listed
// still matches the locked version.
if let Some(r) = previous {
for node in r.iter().filter(|p| keep(p, to_avoid, &to_avoid_sources)) {
let deps = r.deps_not_replaced(node)
.filter(|p| keep(p, to_avoid, &to_avoid_sources))
.cloned().collect();
registry.register_lock(node.clone(), deps);
}
}

let mut summaries = Vec::new();
for member in ws.members() {
try!(registry.add_sources(&[member.package_id().source_id()
Expand All @@ -75,59 +105,7 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
}
}

// If we don't have a previous instance of resolve then we just need to
// resolve our entire summary (method should be Everything) and we just
// move along to the next member.
let r = match previous {
Some(r) => r,
None => {
summaries.push((member.summary().clone(), method));
continue
}
};

// In the case where a previous instance of resolve is available, we
// want to lock as many packages as possible to the previous version
// without disturbing the graph structure. To this end we perform
// two actions here:
//
// 1. We inform the package registry of all locked packages. This
// involves informing it of both the locked package's id as well
// as the versions of all locked dependencies. The registry will
// then takes this information into account when it is queried.
//
// 2. The specified package's summary will have its dependencies
// modified to their precise variants. This will instruct the
// first step of the resolution process to not query for ranges
// but rather for precise dependency versions.
//
// This process must handle altered dependencies, however, as
// it's possible for a manifest to change over time to have
// dependencies added, removed, or modified to different version
// ranges. To deal with this, we only actually lock a dependency
// to the previously resolved version if the dependency listed
// still matches the locked version.
for node in r.iter().filter(|p| keep(p, to_avoid, &to_avoid_sources)) {
let deps = r.deps_not_replaced(node)
.filter(|p| keep(p, to_avoid, &to_avoid_sources))
.cloned().collect();
registry.register_lock(node.clone(), deps);
}

let summary = {
let map = r.deps_not_replaced(member.package_id()).filter(|p| {
keep(p, to_avoid, &to_avoid_sources)
}).map(|d| {
(d.name(), d)
}).collect::<HashMap<_, _>>();

member.summary().clone().map_dependencies(|dep| {
match map.get(dep.name()) {
Some(&lock) if dep.matches_id(lock) => dep.lock_to(lock),
_ => dep,
}
})
};
let summary = registry.lock(member.summary().clone());
summaries.push((summary, method));
}

Expand Down
95 changes: 95 additions & 0 deletions tests/registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1147,3 +1147,98 @@ Caused by:
attempting to make an HTTP request, but --frozen was specified
"));
}

#[test]
fn add_dep_dont_update_registry() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = []
[dependencies]
baz = { path = "baz" }
"#)
.file("src/main.rs", "fn main() {}")
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = []
[dependencies]
remote = "0.3"
"#)
.file("baz/src/lib.rs", "");
p.build();

Package::new("remote", "0.3.4").publish();

assert_that(p.cargo("build"), execs().with_status(0));

t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#"
[project]
name = "bar"
version = "0.5.0"
authors = []
[dependencies]
baz = { path = "baz" }
remote = "0.3"
"#));

assert_that(p.cargo("build"),
execs().with_status(0)
.with_stderr("\
[COMPILING] bar v0.5.0 ([..])
[FINISHED] [..]
"));
}

#[test]
fn bump_version_dont_update_registry() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = []
[dependencies]
baz = { path = "baz" }
"#)
.file("src/main.rs", "fn main() {}")
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = []
[dependencies]
remote = "0.3"
"#)
.file("baz/src/lib.rs", "");
p.build();

Package::new("remote", "0.3.4").publish();

assert_that(p.cargo("build"), execs().with_status(0));

t!(t!(File::create(p.root().join("Cargo.toml"))).write_all(br#"
[project]
name = "bar"
version = "0.6.0"
authors = []
[dependencies]
baz = { path = "baz" }
"#));

assert_that(p.cargo("build"),
execs().with_status(0)
.with_stderr("\
[COMPILING] bar v0.6.0 ([..])
[FINISHED] [..]
"));
}

0 comments on commit c0306a8

Please sign in to comment.