Skip to content

Commit

Permalink
fix: prefer using patched dependencies in lockfile
Browse files Browse the repository at this point in the history
We've tracked patched depencencies in Cargo.lock with the `patched+`
protocol. However, we don't really make use of it when rebuilding from a
lockfile. This PR fixes that. You can see the test cases now won't
update registry becaseu it starts resuing locked patch dependencies.
  • Loading branch information
weihanglo committed Jul 5, 2024
1 parent b1e7be6 commit f8cbe46
Show file tree
Hide file tree
Showing 4 changed files with 135 additions and 14 deletions.
4 changes: 4 additions & 0 deletions crates/cargo-util-schemas/src/core/source_kind.rs
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,10 @@ impl PatchInfo {
PatchInfo::Deferred { patches } => patches.as_slice(),
}
}

pub fn is_resolved(&self) -> bool {
matches!(self, PatchInfo::Resolved { .. })
}
}

/// A [`PatchInfo`] that can be `Display`ed as URL query string.
Expand Down
1 change: 1 addition & 0 deletions src/cargo/core/registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,7 @@ pub type PatchDependency<'a> = (&'a Dependency, Option<LockedPatchDependency>);

/// Argument to [`PackageRegistry::patch`] which is information about a `[patch]`
/// directive that we found in a lockfile, if present.
#[derive(Debug)]
pub struct LockedPatchDependency {
/// The original `Dependency` directive, except "locked" so it's version
/// requirement is Locked to `foo` and its `SourceId` has a "precise" listed.
Expand Down
142 changes: 130 additions & 12 deletions src/cargo/ops/resolve.rs
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@ pub fn resolve_with_previous<'gctx>(
register_patches: bool,
) -> CargoResult<Resolve> {
let mut patches = if register_patches {
Some(PatchEntries::from_workspace(ws)?)
Some(PatchEntries::from_workspace(ws, previous)?)
} else {
None
};
Expand All @@ -364,7 +364,7 @@ pub fn resolve_with_previous<'gctx>(

if let Some(patches) = patches.as_ref() {
emit_warnings_of_unused_patches(ws, &resolved, registry)?;
emit_warnings_of_unresolved_patches(ws, patches)?;
emit_warnings_of_unresolved_patches(ws, &resolved, patches)?;
}

return Ok(resolved);
Expand Down Expand Up @@ -843,16 +843,45 @@ fn emit_warnings_of_unused_patches(
/// [`emit_warnings_of_unused_patches`] which requires summaries.
fn emit_warnings_of_unresolved_patches(
ws: &Workspace<'_>,
resolve: &Resolve,
patches: &PatchEntries,
) -> CargoResult<()> {
let resolved_patched_pkg_ids: Vec<_> = resolve
.iter()
.filter(|pkg_id| pkg_id.source_id().is_patched())
.collect();
for (url, patches) in &patches.unresolved {
let canonical_url = CanonicalUrl::new(url)?;
for patch in patches {
let maybe_already_used = resolved_patched_pkg_ids
.iter()
.filter(|id| {
let source_id = id.source_id();
let url = source_id.canonical_url().raw_canonicalized_url();
// To check if the patch is for the same source,
// we need strip `patched` prefix here,
// as CanonicalUrl preserves that.
let url = url.as_str().strip_prefix("patched+").unwrap();
url == canonical_url.raw_canonicalized_url().as_str()
})
.any(|id| patch.matches_ignoring_source(*id));

if maybe_already_used {
continue;
}

let url = if url.as_str() == CRATES_IO_INDEX {
"crates-io"
} else {
url.as_str()
};
let msg = format!("unused patch for `{}` in `{url}`", patch.name_in_toml());

let msg = format!(
"unused patch `{}@{}` {} in `{url}`",
patch.name_in_toml(),
patch.version_req(),
patch.source_id(),
);
ws.gctx().shell().warn(msg)?;
}
}
Expand Down Expand Up @@ -880,8 +909,8 @@ fn register_patch_entries(

registry.clear_patch();

for (url, patches) in patches {
for patch in patches {
for (url, patches) in patches.iter() {
for patch in patches.iter() {
version_prefs.prefer_dependency(patch.clone());
}
let Some(previous) = previous else {
Expand All @@ -900,7 +929,7 @@ fn register_patch_entries(
// previous resolve graph, which is primarily what's done here to
// build the `registrations` list.
let mut registrations = Vec::new();
for dep in patches {
for dep in patches.iter() {
let candidates = || {
previous
.iter()
Expand Down Expand Up @@ -1029,14 +1058,16 @@ struct PatchEntries {
/// resolved dependency graph.
unresolved: Vec<(Url, Vec<Dependency>)>,

from_previous: HashMap<Url, Vec<LockedPatchDependency>>,

/// How many times [`PatchEntries::requires_reresolve`] has been called.
resolve_times: u32,
}

impl PatchEntries {
const RESOLVE_TIMES_LIMIT: u32 = 20;

fn from_workspace(ws: &Workspace<'_>) -> CargoResult<PatchEntries> {
fn from_workspace(ws: &Workspace<'_>, previous: Option<&Resolve>) -> CargoResult<PatchEntries> {
let mut ready_patches = HashMap::new();
let mut unresolved = Vec::new();
for (url, patches) in ws.root_patch()?.into_iter() {
Expand All @@ -1050,9 +1081,60 @@ impl PatchEntries {
unresolved.push((url, file_patches));
}
}

let mut from_previous = HashMap::<Url, Vec<_>>::new();
let resolved_patched_pkg_ids: Vec<_> = previous
.iter()
.flat_map(|previous| {
previous
.iter()
.chain(previous.unused_patches().iter().cloned())
})
.filter(|pkg_id| {
pkg_id
.source_id()
.patch_info()
.map(|info| info.is_resolved())
.unwrap_or_default()
})
.collect();
for (url, patches) in &unresolved {
let mut ready_patches = Vec::new();
let canonical_url = CanonicalUrl::new(url)?;
for patch in patches {
for pkg_id in resolved_patched_pkg_ids.iter().filter(|id| {
let source_id = id.source_id();
let url = source_id.canonical_url().raw_canonicalized_url();
// To check if the patch is for the same source,
// we need strip `patched` prefix here,
// as CanonicalUrl preserves that.
let url = url.as_str().strip_prefix("patched+").unwrap();
url == canonical_url.raw_canonicalized_url().as_str()
}) {
if patch.matches_ignoring_source(*pkg_id) {
let mut patch = patch.clone();
patch.set_source_id(pkg_id.source_id());
patch.lock_to(*pkg_id);
ready_patches.push(LockedPatchDependency {
dependency: patch,
package_id: *pkg_id,
alt_package_id: None,
});
}
}
}
if !ready_patches.is_empty() {
from_previous
.entry(url.clone())
.or_default()
.extend(ready_patches);
}
}

Ok(PatchEntries {
ready_patches,
unresolved,
from_previous,
resolve_times: 0,
})
}
Expand Down Expand Up @@ -1102,6 +1184,16 @@ impl PatchEntries {
*unresolved = pending_patches;

if !ready_patches.is_empty() {
if let Some(from_previous) = self.from_previous.get_mut(url) {
for patch in &ready_patches {
if let Some(index) = from_previous
.iter()
.position(|locked| patch.matches_ignoring_source(locked.package_id))
{
from_previous.swap_remove(index);
}
}
}
self.ready_patches
.entry(url.clone())
.or_default()
Expand All @@ -1112,13 +1204,39 @@ impl PatchEntries {
self.resolve_times += 1;
Ok(has_new_patches)
}

fn iter(&self) -> Box<dyn Iterator<Item = (&Url, Deps<'_>)> + '_> {
if self.ready_patches.is_empty() {
Box::new(self.from_previous.iter().map(|(url, deps)| {
let deps = Deps {
deps: None,
locked_deps: Some(deps),
};
(url, deps)
}))
} else {
Box::new(self.ready_patches.iter().map(|(url, deps)| {
let deps = Deps {
deps: Some(deps),
locked_deps: self.from_previous.get(url),
};
(url, deps)
}))
}
}
}

impl<'a> IntoIterator for &'a PatchEntries {
type Item = (&'a Url, &'a Vec<Dependency>);
type IntoIter = std::collections::hash_map::Iter<'a, Url, Vec<Dependency>>;
struct Deps<'a> {
deps: Option<&'a Vec<Dependency>>,
locked_deps: Option<&'a Vec<LockedPatchDependency>>,
}

fn into_iter(self) -> Self::IntoIter {
self.ready_patches.iter()
impl<'a> Deps<'a> {
fn iter(&self) -> impl Iterator<Item = &'a Dependency> {
let locked_deps = self
.locked_deps
.into_iter()
.flat_map(|deps| deps.into_iter().map(|locked| &locked.dependency));
self.deps.into_iter().flatten().chain(locked_deps)
}
}
2 changes: 0 additions & 2 deletions tests/testsuite/patch_files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1124,7 +1124,6 @@ Hello, patched!
p.cargo("run -v")
.masquerade_as_nightly_cargo(&["patch-files"])
.with_stderr_data(str![[r#"
[UPDATING] `dummy-registry` index
[FRESH] bar v1.0.0 (bar@1.0.0 with 1 patch file)
[FRESH] foo v0.0.0 ([ROOT]/foo)
[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s
Expand Down Expand Up @@ -1177,7 +1176,6 @@ Hello, patched!
p.cargo("run")
.masquerade_as_nightly_cargo(&["patch-files"])
.with_stderr_data(str![[r#"
[UPDATING] `dummy-registry` index
[PATCHING] bar v1.0.0
[COMPILING] bar v1.0.0 (bar@1.0.0 with 1 patch file)
[COMPILING] foo v0.0.0 ([ROOT]/foo)
Expand Down

0 comments on commit f8cbe46

Please sign in to comment.