cargo/ops/resolve.rs
1//! High-level APIs for executing the resolver.
2//!
3//! This module provides functions for running the resolver given a workspace, including loading
4//! the `Cargo.lock` file and checkinf if it needs updating.
5//!
6//! There are roughly 3 main functions:
7//!
8//! - [`resolve_ws`]: A simple, high-level function with no options.
9//! - [`resolve_ws_with_opts`]: A medium-level function with options like
10//! user-provided features. This is the most appropriate function to use in
11//! most cases.
12//! - [`resolve_with_previous`]: A low-level function for running the resolver,
13//! providing the most power and flexibility.
14//!
15//! ### Data Structures
16//!
17//! - [`Workspace`]:
18//! Usually created by [`crate::util::command_prelude::ArgMatchesExt::workspace`] which discovers the root of the
19//! workspace, and loads all the workspace members as a [`Package`] object
20//! - [`Package`]
21//! Corresponds with `Cargo.toml` manifest (deserialized as [`Manifest`]) and its associated files.
22//! - [`Target`]s are crates such as the library, binaries, integration test, or examples.
23//! They are what is actually compiled by `rustc`.
24//! Each `Target` defines a crate root, like `src/lib.rs` or `examples/foo.rs`.
25//! - [`PackageId`] --- A unique identifier for a package.
26//! - [`PackageRegistry`]:
27//! The primary interface for how the dependency
28//! resolver finds packages. It contains the `SourceMap`, and handles things
29//! like the `[patch]` table. The dependency resolver
30//! sends a query to the `PackageRegistry` to "get me all packages that match
31//! this dependency declaration". The `Registry` trait provides a generic interface
32//! to the `PackageRegistry`, but this is only used for providing an alternate
33//! implementation of the `PackageRegistry` for testing.
34//! - [`SourceMap`]: Map of all available sources.
35//! - [`Source`]: An abstraction for something that can fetch packages (a remote
36//! registry, a git repo, the local filesystem, etc.). Check out the [source
37//! implementations] for all the details about registries, indexes, git
38//! dependencies, etc.
39//! * [`SourceId`]: A unique identifier for a source.
40//! - [`Summary`]: A of a [`Manifest`], and is essentially
41//! the information that can be found in a registry index. Queries against the
42//! `PackageRegistry` yields a `Summary`. The resolver uses the summary
43//! information to build the dependency graph.
44//! - [`PackageSet`] --- Contains all of the `Package` objects. This works with the
45//! [`Downloads`] struct to coordinate downloading packages. It has a reference
46//! to the `SourceMap` to get the `Source` objects which tell the `Downloads`
47//! struct which URLs to fetch.
48//!
49//! [`Package`]: crate::core::package
50//! [`Target`]: crate::core::Target
51//! [`Manifest`]: crate::core::Manifest
52//! [`Source`]: crate::sources::source::Source
53//! [`SourceMap`]: crate::sources::source::SourceMap
54//! [`PackageRegistry`]: crate::core::registry::PackageRegistry
55//! [source implementations]: crate::sources
56//! [`Downloads`]: crate::core::package::Downloads
57
58use crate::core::compiler::{CompileKind, RustcTargetData};
59use crate::core::registry::{LockedPatchDependency, PackageRegistry};
60use crate::core::resolver::features::{
61 CliFeatures, FeatureOpts, FeatureResolver, ForceAllTargets, RequestedFeatures, ResolvedFeatures,
62};
63use crate::core::resolver::{
64 self, HasDevUnits, Resolve, ResolveOpts, ResolveVersion, VersionOrdering, VersionPreferences,
65};
66use crate::core::summary::Summary;
67use crate::core::Dependency;
68use crate::core::GitReference;
69use crate::core::PackageId;
70use crate::core::PackageIdSpec;
71use crate::core::PackageIdSpecQuery;
72use crate::core::PackageSet;
73use crate::core::SourceId;
74use crate::core::Workspace;
75use crate::ops;
76use crate::sources::RecursivePathSource;
77use crate::util::cache_lock::CacheLockMode;
78use crate::util::context::FeatureUnification;
79use crate::util::errors::CargoResult;
80use crate::util::CanonicalUrl;
81use anyhow::Context as _;
82use cargo_util::paths;
83use cargo_util_schemas::core::PartialVersion;
84use std::collections::{HashMap, HashSet};
85use tracing::{debug, trace};
86
87/// Filter for keep using Package ID from previous lockfile.
88type Keep<'a> = &'a dyn Fn(&PackageId) -> bool;
89
90/// Result for `resolve_ws_with_opts`.
91pub struct WorkspaceResolve<'gctx> {
92 /// Packages to be downloaded.
93 pub pkg_set: PackageSet<'gctx>,
94 /// The resolve for the entire workspace.
95 ///
96 /// This may be `None` for things like `cargo install` and `-Zavoid-dev-deps`.
97 /// This does not include `paths` overrides.
98 pub workspace_resolve: Option<Resolve>,
99 /// The narrowed resolve, with the specific features enabled, and only the
100 /// given package specs requested.
101 pub targeted_resolve: Resolve,
102 /// The features activated per package.
103 pub resolved_features: ResolvedFeatures,
104}
105
106const UNUSED_PATCH_WARNING: &str = "\
107Check that the patched package version and available features are compatible
108with the dependency requirements. If the patch has a different version from
109what is locked in the Cargo.lock file, run `cargo update` to use the new
110version. This may also occur with an optional dependency that is not enabled.";
111
112/// Resolves all dependencies for the workspace using the previous
113/// lock file as a guide if present.
114///
115/// This function will also write the result of resolution as a new lock file
116/// (unless it is an ephemeral workspace such as `cargo install` or `cargo
117/// package`).
118///
119/// This is a simple interface used by commands like `clean`, `fetch`, and
120/// `package`, which don't specify any options or features.
121pub fn resolve_ws<'a>(ws: &Workspace<'a>, dry_run: bool) -> CargoResult<(PackageSet<'a>, Resolve)> {
122 let mut registry = ws.package_registry()?;
123 let resolve = resolve_with_registry(ws, &mut registry, dry_run)?;
124 let packages = get_resolved_packages(&resolve, registry)?;
125 Ok((packages, resolve))
126}
127
128/// Resolves dependencies for some packages of the workspace,
129/// taking into account `paths` overrides and activated features.
130///
131/// This function will also write the result of resolution as a new lock file
132/// (unless `Workspace::require_optional_deps` is false, such as `cargo
133/// install` or `-Z avoid-dev-deps`), or it is an ephemeral workspace (`cargo
134/// install` or `cargo package`).
135///
136/// `specs` may be empty, which indicates it should resolve all workspace
137/// members. In this case, `opts.all_features` must be `true`.
138pub fn resolve_ws_with_opts<'gctx>(
139 ws: &Workspace<'gctx>,
140 target_data: &mut RustcTargetData<'gctx>,
141 requested_targets: &[CompileKind],
142 cli_features: &CliFeatures,
143 specs: &[PackageIdSpec],
144 has_dev_units: HasDevUnits,
145 force_all_targets: ForceAllTargets,
146 dry_run: bool,
147) -> CargoResult<WorkspaceResolve<'gctx>> {
148 let specs = match ws.resolve_feature_unification() {
149 FeatureUnification::Selected => specs,
150 FeatureUnification::Workspace => &ops::Packages::All(Vec::new()).to_package_id_specs(ws)?,
151 };
152 let mut registry = ws.package_registry()?;
153 let (resolve, resolved_with_overrides) = if ws.ignore_lock() {
154 let add_patches = true;
155 let resolve = None;
156 let resolved_with_overrides = resolve_with_previous(
157 &mut registry,
158 ws,
159 cli_features,
160 has_dev_units,
161 resolve.as_ref(),
162 None,
163 specs,
164 add_patches,
165 )?;
166 ops::print_lockfile_changes(ws, None, &resolved_with_overrides, &mut registry)?;
167 (resolve, resolved_with_overrides)
168 } else if ws.require_optional_deps() {
169 // First, resolve the root_package's *listed* dependencies, as well as
170 // downloading and updating all remotes and such.
171 let resolve = resolve_with_registry(ws, &mut registry, dry_run)?;
172 // No need to add patches again, `resolve_with_registry` has done it.
173 let add_patches = false;
174
175 // Second, resolve with precisely what we're doing. Filter out
176 // transitive dependencies if necessary, specify features, handle
177 // overrides, etc.
178 add_overrides(&mut registry, ws)?;
179
180 for (replace_spec, dep) in ws.root_replace() {
181 if !resolve
182 .iter()
183 .any(|r| replace_spec.matches(r) && !dep.matches_id(r))
184 {
185 ws.gctx()
186 .shell()
187 .warn(format!("package replacement is not used: {}", replace_spec))?
188 }
189
190 if dep.features().len() != 0 || !dep.uses_default_features() {
191 ws.gctx()
192 .shell()
193 .warn(format!(
194 "replacement for `{}` uses the features mechanism. \
195 default-features and features will not take effect because the replacement dependency does not support this mechanism",
196 dep.package_name()
197 ))?
198 }
199 }
200
201 let resolved_with_overrides = resolve_with_previous(
202 &mut registry,
203 ws,
204 cli_features,
205 has_dev_units,
206 Some(&resolve),
207 None,
208 specs,
209 add_patches,
210 )?;
211 (Some(resolve), resolved_with_overrides)
212 } else {
213 let add_patches = true;
214 let resolve = ops::load_pkg_lockfile(ws)?;
215 let resolved_with_overrides = resolve_with_previous(
216 &mut registry,
217 ws,
218 cli_features,
219 has_dev_units,
220 resolve.as_ref(),
221 None,
222 specs,
223 add_patches,
224 )?;
225 // Skipping `print_lockfile_changes` as there are cases where this prints irrelevant
226 // information
227 (resolve, resolved_with_overrides)
228 };
229
230 let pkg_set = get_resolved_packages(&resolved_with_overrides, registry)?;
231
232 let member_ids = ws
233 .members_with_features(specs, cli_features)?
234 .into_iter()
235 .map(|(p, _fts)| p.package_id())
236 .collect::<Vec<_>>();
237 pkg_set.download_accessible(
238 &resolved_with_overrides,
239 &member_ids,
240 has_dev_units,
241 requested_targets,
242 target_data,
243 force_all_targets,
244 )?;
245
246 let feature_opts = FeatureOpts::new(ws, has_dev_units, force_all_targets)?;
247 let resolved_features = FeatureResolver::resolve(
248 ws,
249 target_data,
250 &resolved_with_overrides,
251 &pkg_set,
252 cli_features,
253 specs,
254 requested_targets,
255 feature_opts,
256 )?;
257
258 pkg_set.warn_no_lib_packages_and_artifact_libs_overlapping_deps(
259 ws,
260 &resolved_with_overrides,
261 &member_ids,
262 has_dev_units,
263 requested_targets,
264 target_data,
265 force_all_targets,
266 )?;
267
268 Ok(WorkspaceResolve {
269 pkg_set,
270 workspace_resolve: resolve,
271 targeted_resolve: resolved_with_overrides,
272 resolved_features,
273 })
274}
275
276#[tracing::instrument(skip_all)]
277fn resolve_with_registry<'gctx>(
278 ws: &Workspace<'gctx>,
279 registry: &mut PackageRegistry<'gctx>,
280 dry_run: bool,
281) -> CargoResult<Resolve> {
282 let prev = ops::load_pkg_lockfile(ws)?;
283 let mut resolve = resolve_with_previous(
284 registry,
285 ws,
286 &CliFeatures::new_all(true),
287 HasDevUnits::Yes,
288 prev.as_ref(),
289 None,
290 &[],
291 true,
292 )?;
293
294 let print = if !ws.is_ephemeral() && ws.require_optional_deps() {
295 if !dry_run {
296 ops::write_pkg_lockfile(ws, &mut resolve)?
297 } else {
298 true
299 }
300 } else {
301 // This mostly represents
302 // - `cargo install --locked` and the only change is the package is no longer local but
303 // from the registry which is noise
304 // - publish of libraries
305 false
306 };
307 if print {
308 ops::print_lockfile_changes(ws, prev.as_ref(), &resolve, registry)?;
309 }
310 Ok(resolve)
311}
312
313/// Resolves all dependencies for a package using an optional previous instance
314/// of resolve to guide the resolution process.
315///
316/// This also takes an optional filter `keep_previous`, which informs the `registry`
317/// which package ID should be locked to the previous instance of resolve
318/// (often used in pairings with updates). See comments in [`register_previous_locks`]
319/// for scenarios that might override this.
320///
321/// The previous resolve normally comes from a lock file. This function does not
322/// read or write lock files from the filesystem.
323///
324/// `specs` may be empty, which indicates it should resolve all workspace
325/// members. In this case, `opts.all_features` must be `true`.
326///
327/// If `register_patches` is true, then entries from the `[patch]` table in
328/// the manifest will be added to the given `PackageRegistry`.
329#[tracing::instrument(skip_all)]
330pub fn resolve_with_previous<'gctx>(
331 registry: &mut PackageRegistry<'gctx>,
332 ws: &Workspace<'gctx>,
333 cli_features: &CliFeatures,
334 has_dev_units: HasDevUnits,
335 previous: Option<&Resolve>,
336 keep_previous: Option<Keep<'_>>,
337 specs: &[PackageIdSpec],
338 register_patches: bool,
339) -> CargoResult<Resolve> {
340 // We only want one Cargo at a time resolving a crate graph since this can
341 // involve a lot of frobbing of the global caches.
342 let _lock = ws
343 .gctx()
344 .acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
345
346 // Some packages are already loaded when setting up a workspace. This
347 // makes it so anything that was already loaded will not be loaded again.
348 // Without this there were cases where members would be parsed multiple times
349 ws.preload(registry);
350
351 // In case any members were not already loaded or the Workspace is_ephemeral.
352 for member in ws.members() {
353 registry.add_sources(Some(member.package_id().source_id()))?;
354 }
355
356 // Try to keep all from previous resolve if no instruction given.
357 let keep_previous = keep_previous.unwrap_or(&|_| true);
358
359 // While registering patches, we will record preferences for particular versions
360 // of various packages.
361 let mut version_prefs = VersionPreferences::default();
362 if ws.gctx().cli_unstable().minimal_versions {
363 version_prefs.version_ordering(VersionOrdering::MinimumVersionsFirst)
364 }
365 if ws.resolve_honors_rust_version() {
366 let mut rust_versions: Vec<_> = ws
367 .members()
368 .filter_map(|p| p.rust_version().map(|rv| rv.as_partial().clone()))
369 .collect();
370 if rust_versions.is_empty() {
371 let rustc = ws.gctx().load_global_rustc(Some(ws))?;
372 let rust_version: PartialVersion = rustc.version.clone().into();
373 rust_versions.push(rust_version);
374 }
375 version_prefs.rust_versions(rust_versions);
376 }
377
378 let avoid_patch_ids = if register_patches {
379 register_patch_entries(registry, ws, previous, &mut version_prefs, keep_previous)?
380 } else {
381 HashSet::new()
382 };
383
384 // Refine `keep` with patches that should avoid locking.
385 let keep = |p: &PackageId| keep_previous(p) && !avoid_patch_ids.contains(p);
386
387 let dev_deps = ws.require_optional_deps() || has_dev_units == HasDevUnits::Yes;
388
389 if let Some(r) = previous {
390 trace!("previous: {:?}", r);
391
392 // In the case where a previous instance of resolve is available, we
393 // want to lock as many packages as possible to the previous version
394 // without disturbing the graph structure.
395 register_previous_locks(ws, registry, r, &keep, dev_deps);
396
397 // Prefer to use anything in the previous lock file, aka we want to have conservative updates.
398 let _span = tracing::span!(tracing::Level::TRACE, "prefer_package_id").entered();
399 for id in r.iter().filter(keep) {
400 debug!("attempting to prefer {}", id);
401 version_prefs.prefer_package_id(id);
402 }
403 }
404
405 if register_patches {
406 registry.lock_patches();
407 }
408
409 let summaries: Vec<(Summary, ResolveOpts)> = {
410 let _span = tracing::span!(tracing::Level::TRACE, "registry.lock").entered();
411 ws.members_with_features(specs, cli_features)?
412 .into_iter()
413 .map(|(member, features)| {
414 let summary = registry.lock(member.summary().clone());
415 (
416 summary,
417 ResolveOpts {
418 dev_deps,
419 features: RequestedFeatures::CliFeatures(features),
420 },
421 )
422 })
423 .collect()
424 };
425
426 let replace = lock_replacements(ws, previous, &keep);
427
428 let mut resolved = resolver::resolve(
429 &summaries,
430 &replace,
431 registry,
432 &version_prefs,
433 ResolveVersion::with_rust_version(ws.lowest_rust_version()),
434 Some(ws.gctx()),
435 )?;
436
437 let patches = registry.patches().values().flat_map(|v| v.iter());
438 resolved.register_used_patches(patches);
439
440 if register_patches && !resolved.unused_patches().is_empty() {
441 emit_warnings_of_unused_patches(ws, &resolved, registry)?;
442 }
443
444 if let Some(previous) = previous {
445 resolved.merge_from(previous)?;
446 }
447 let gctx = ws.gctx();
448 let mut deferred = gctx.deferred_global_last_use()?;
449 deferred.save_no_error(gctx);
450 Ok(resolved)
451}
452
453/// Read the `paths` configuration variable to discover all path overrides that
454/// have been configured.
455#[tracing::instrument(skip_all)]
456pub fn add_overrides<'a>(
457 registry: &mut PackageRegistry<'a>,
458 ws: &Workspace<'a>,
459) -> CargoResult<()> {
460 let gctx = ws.gctx();
461 let Some(paths) = gctx.get_list("paths")? else {
462 return Ok(());
463 };
464
465 let paths = paths.val.iter().map(|(s, def)| {
466 // The path listed next to the string is the config file in which the
467 // key was located, so we want to pop off the `.cargo/config` component
468 // to get the directory containing the `.cargo` folder.
469 (paths::normalize_path(&def.root(gctx).join(s)), def)
470 });
471
472 for (path, definition) in paths {
473 let id = SourceId::for_path(&path)?;
474 let mut source = RecursivePathSource::new(&path, id, ws.gctx());
475 source.load().with_context(|| {
476 format!(
477 "failed to update path override `{}` \
478 (defined in `{}`)",
479 path.display(),
480 definition
481 )
482 })?;
483 registry.add_override(Box::new(source));
484 }
485 Ok(())
486}
487
488pub fn get_resolved_packages<'gctx>(
489 resolve: &Resolve,
490 registry: PackageRegistry<'gctx>,
491) -> CargoResult<PackageSet<'gctx>> {
492 let ids: Vec<PackageId> = resolve.iter().collect();
493 registry.get(&ids)
494}
495
496/// In this function we're responsible for informing the `registry` of all
497/// locked dependencies from the previous lock file we had, `resolve`.
498///
499/// This gets particularly tricky for a couple of reasons. The first is that we
500/// want all updates to be conservative, so we actually want to take the
501/// `resolve` into account (and avoid unnecessary registry updates and such).
502/// the second, however, is that we want to be resilient to updates of
503/// manifests. For example if a dependency is added or a version is changed we
504/// want to make sure that we properly re-resolve (conservatively) instead of
505/// providing an opaque error.
506///
507/// The logic here is somewhat subtle, but there should be more comments below to
508/// clarify things.
509///
510/// Note that this function, at the time of this writing, is basically the
511/// entire fix for issue #4127.
512#[tracing::instrument(skip_all)]
513fn register_previous_locks(
514 ws: &Workspace<'_>,
515 registry: &mut PackageRegistry<'_>,
516 resolve: &Resolve,
517 keep: Keep<'_>,
518 dev_deps: bool,
519) {
520 let path_pkg = |id: SourceId| {
521 if !id.is_path() {
522 return None;
523 }
524 if let Ok(path) = id.url().to_file_path() {
525 if let Ok(pkg) = ws.load(&path.join("Cargo.toml")) {
526 return Some(pkg);
527 }
528 }
529 None
530 };
531
532 // Ok so we've been passed in a `keep` function which basically says "if I
533 // return `true` then this package wasn't listed for an update on the command
534 // line". That is, if we run `cargo update foo` then `keep(bar)` will return
535 // `true`, whereas `keep(foo)` will return `false` (roughly speaking).
536 //
537 // This isn't actually quite what we want, however. Instead we want to
538 // further refine this `keep` function with *all transitive dependencies* of
539 // the packages we're not keeping. For example, consider a case like this:
540 //
541 // * There's a crate `log`.
542 // * There's a crate `serde` which depends on `log`.
543 //
544 // Let's say we then run `cargo update serde`. This may *also* want to
545 // update the `log` dependency as our newer version of `serde` may have a
546 // new minimum version required for `log`. Now this isn't always guaranteed
547 // to work. What'll happen here is we *won't* lock the `log` dependency nor
548 // the `log` crate itself, but we will inform the registry "please prefer
549 // this version of `log`". That way if our newer version of serde works with
550 // the older version of `log`, we conservatively won't update `log`. If,
551 // however, nothing else in the dependency graph depends on `log` and the
552 // newer version of `serde` requires a new version of `log` it'll get pulled
553 // in (as we didn't accidentally lock it to an old version).
554 let mut avoid_locking = HashSet::new();
555 registry.add_to_yanked_whitelist(resolve.iter().filter(keep));
556 for node in resolve.iter() {
557 if !keep(&node) {
558 add_deps(resolve, node, &mut avoid_locking);
559 }
560 }
561
562 // Ok, but the above loop isn't the entire story! Updates to the dependency
563 // graph can come from two locations, the `cargo update` command or
564 // manifests themselves. For example a manifest on the filesystem may
565 // have been updated to have an updated version requirement on `serde`. In
566 // this case both `keep(serde)` and `keep(log)` return `true` (the `keep`
567 // that's an argument to this function). We, however, don't want to keep
568 // either of those! Otherwise we'll get obscure resolve errors about locked
569 // versions.
570 //
571 // To solve this problem we iterate over all packages with path sources
572 // (aka ones with manifests that are changing) and take a look at all of
573 // their dependencies. If any dependency does not match something in the
574 // previous lock file, then we're guaranteed that the main resolver will
575 // update the source of this dependency no matter what. Knowing this we
576 // poison all packages from the same source, forcing them all to get
577 // updated.
578 //
579 // This may seem like a heavy hammer, and it is! It means that if you change
580 // anything from crates.io then all of crates.io becomes unlocked. Note,
581 // however, that we still want conservative updates. This currently happens
582 // because the first candidate the resolver picks is the previously locked
583 // version, and only if that fails to activate to we move on and try
584 // a different version. (giving the guise of conservative updates)
585 //
586 // For example let's say we had `serde = "0.1"` written in our lock file.
587 // When we later edit this to `serde = "0.1.3"` we don't want to lock serde
588 // at its old version, 0.1.1. Instead we want to allow it to update to
589 // `0.1.3` and update its own dependencies (like above). To do this *all
590 // crates from crates.io* are not locked (aka added to `avoid_locking`).
591 // For dependencies like `log` their previous version in the lock file will
592 // come up first before newer version, if newer version are available.
593 {
594 let _span = tracing::span!(tracing::Level::TRACE, "poison").entered();
595 let mut path_deps = ws.members().cloned().collect::<Vec<_>>();
596 let mut visited = HashSet::new();
597 while let Some(member) = path_deps.pop() {
598 if !visited.insert(member.package_id()) {
599 continue;
600 }
601 let is_ws_member = ws.is_member(&member);
602 for dep in member.dependencies() {
603 // If this dependency didn't match anything special then we may want
604 // to poison the source as it may have been added. If this path
605 // dependencies is **not** a workspace member, however, and it's an
606 // optional/non-transitive dependency then it won't be necessarily
607 // be in our lock file. If this shows up then we avoid poisoning
608 // this source as otherwise we'd repeatedly update the registry.
609 //
610 // TODO: this breaks adding an optional dependency in a
611 // non-workspace member and then simultaneously editing the
612 // dependency on that crate to enable the feature. For now,
613 // this bug is better than the always-updating registry though.
614 if !is_ws_member && (dep.is_optional() || !dep.is_transitive()) {
615 continue;
616 }
617
618 // If dev-dependencies aren't being resolved, skip them.
619 if !dep.is_transitive() && !dev_deps {
620 continue;
621 }
622
623 // If this is a path dependency, then try to push it onto our
624 // worklist.
625 if let Some(pkg) = path_pkg(dep.source_id()) {
626 path_deps.push(pkg);
627 continue;
628 }
629
630 // If we match *anything* in the dependency graph then we consider
631 // ourselves all ok, and assume that we'll resolve to that.
632 if resolve.iter().any(|id| dep.matches_ignoring_source(id)) {
633 continue;
634 }
635
636 // Ok if nothing matches, then we poison the source of these
637 // dependencies and the previous lock file.
638 debug!(
639 "poisoning {} because {} looks like it changed {}",
640 dep.source_id(),
641 member.package_id(),
642 dep.package_name()
643 );
644 for id in resolve
645 .iter()
646 .filter(|id| id.source_id() == dep.source_id())
647 {
648 add_deps(resolve, id, &mut avoid_locking);
649 }
650 }
651 }
652 }
653
654 // Additionally, here we process all path dependencies listed in the previous
655 // resolve. They can not only have their dependencies change but also
656 // the versions of the package change as well. If this ends up happening
657 // then we want to make sure we don't lock a package ID node that doesn't
658 // actually exist. Note that we don't do transitive visits of all the
659 // package's dependencies here as that'll be covered below to poison those
660 // if they changed.
661 //
662 // This must come after all other `add_deps` calls to ensure it recursively walks the tree when
663 // called.
664 for node in resolve.iter() {
665 if let Some(pkg) = path_pkg(node.source_id()) {
666 if pkg.package_id() != node {
667 avoid_locking.insert(node);
668 }
669 }
670 }
671
672 // Alright now that we've got our new, fresh, shiny, and refined `keep`
673 // function let's put it to action. Take a look at the previous lock file,
674 // filter everything by this callback, and then shove everything else into
675 // the registry as a locked dependency.
676 let keep = |id: &PackageId| keep(id) && !avoid_locking.contains(id);
677
678 registry.clear_lock();
679 {
680 let _span = tracing::span!(tracing::Level::TRACE, "register_lock").entered();
681 for node in resolve.iter().filter(keep) {
682 let deps = resolve
683 .deps_not_replaced(node)
684 .map(|p| p.0)
685 .filter(keep)
686 .collect::<Vec<_>>();
687
688 // In the v2 lockfile format and prior the `branch=master` dependency
689 // directive was serialized the same way as the no-branch-listed
690 // directive. Nowadays in Cargo, however, these two directives are
691 // considered distinct and are no longer represented the same way. To
692 // maintain compatibility with older lock files we register locked nodes
693 // for *both* the master branch and the default branch.
694 //
695 // Note that this is only applicable for loading older resolves now at
696 // this point. All new lock files are encoded as v3-or-later, so this is
697 // just compat for loading an old lock file successfully.
698 if let Some(node) = master_branch_git_source(node, resolve) {
699 registry.register_lock(node, deps.clone());
700 }
701
702 registry.register_lock(node, deps);
703 }
704 }
705
706 /// Recursively add `node` and all its transitive dependencies to `set`.
707 fn add_deps(resolve: &Resolve, node: PackageId, set: &mut HashSet<PackageId>) {
708 if !set.insert(node) {
709 return;
710 }
711 debug!("ignoring any lock pointing directly at {}", node);
712 for (dep, _) in resolve.deps_not_replaced(node) {
713 add_deps(resolve, dep, set);
714 }
715 }
716}
717
718fn master_branch_git_source(id: PackageId, resolve: &Resolve) -> Option<PackageId> {
719 if resolve.version() <= ResolveVersion::V2 {
720 let source = id.source_id();
721 if let Some(GitReference::DefaultBranch) = source.git_reference() {
722 let new_source =
723 SourceId::for_git(source.url(), GitReference::Branch("master".to_string()))
724 .unwrap()
725 .with_precise_from(source);
726 return Some(id.with_source_id(new_source));
727 }
728 }
729 None
730}
731
732/// Emits warnings of unused patches case by case.
733///
734/// This function does its best to provide more targeted and helpful
735/// (such as showing close candidates that failed to match). However, that's
736/// not terribly easy to do, so just show a general help message if we cannot.
737fn emit_warnings_of_unused_patches(
738 ws: &Workspace<'_>,
739 resolve: &Resolve,
740 registry: &PackageRegistry<'_>,
741) -> CargoResult<()> {
742 const MESSAGE: &str = "was not used in the crate graph.";
743
744 // Patch package with the source URLs being patch
745 let mut patch_pkgid_to_urls = HashMap::new();
746 for (url, summaries) in registry.patches().iter() {
747 for summary in summaries.iter() {
748 patch_pkgid_to_urls
749 .entry(summary.package_id())
750 .or_insert_with(HashSet::new)
751 .insert(url);
752 }
753 }
754
755 // pkg name -> all source IDs of under the same pkg name
756 let mut source_ids_grouped_by_pkg_name = HashMap::new();
757 for pkgid in resolve.iter() {
758 source_ids_grouped_by_pkg_name
759 .entry(pkgid.name())
760 .or_insert_with(HashSet::new)
761 .insert(pkgid.source_id());
762 }
763
764 let mut unemitted_unused_patches = Vec::new();
765 for unused in resolve.unused_patches().iter() {
766 // Show alternative source URLs if the source URLs being patch
767 // cannot not be found in the crate graph.
768 match (
769 source_ids_grouped_by_pkg_name.get(&unused.name()),
770 patch_pkgid_to_urls.get(unused),
771 ) {
772 (Some(ids), Some(patched_urls))
773 if ids
774 .iter()
775 .all(|id| !patched_urls.contains(id.canonical_url())) =>
776 {
777 use std::fmt::Write;
778 let mut msg = String::new();
779 writeln!(msg, "Patch `{}` {}", unused, MESSAGE)?;
780 write!(
781 msg,
782 "Perhaps you misspelled the source URL being patched.\n\
783 Possible URLs for `[patch.<URL>]`:",
784 )?;
785 for id in ids.iter() {
786 write!(msg, "\n {}", id.display_registry_name())?;
787 }
788 ws.gctx().shell().warn(msg)?;
789 }
790 _ => unemitted_unused_patches.push(unused),
791 }
792 }
793
794 // Show general help message.
795 if !unemitted_unused_patches.is_empty() {
796 let warnings: Vec<_> = unemitted_unused_patches
797 .iter()
798 .map(|pkgid| format!("Patch `{}` {}", pkgid, MESSAGE))
799 .collect();
800 ws.gctx()
801 .shell()
802 .warn(format!("{}\n{}", warnings.join("\n"), UNUSED_PATCH_WARNING))?;
803 }
804
805 return Ok(());
806}
807
808/// Informs `registry` and `version_pref` that `[patch]` entries are available
809/// and preferable for the dependency resolution.
810///
811/// This returns a set of PackageIds of `[patch]` entries, and some related
812/// locked PackageIds, for which locking should be avoided (but which will be
813/// preferred when searching dependencies, via [`VersionPreferences::prefer_patch_deps`]).
814#[tracing::instrument(level = "debug", skip_all, ret)]
815fn register_patch_entries(
816 registry: &mut PackageRegistry<'_>,
817 ws: &Workspace<'_>,
818 previous: Option<&Resolve>,
819 version_prefs: &mut VersionPreferences,
820 keep_previous: Keep<'_>,
821) -> CargoResult<HashSet<PackageId>> {
822 let mut avoid_patch_ids = HashSet::new();
823 for (url, patches) in ws.root_patch()?.iter() {
824 for patch in patches {
825 version_prefs.prefer_dependency(patch.clone());
826 }
827 let Some(previous) = previous else {
828 let patches: Vec<_> = patches.iter().map(|p| (p, None)).collect();
829 let unlock_ids = registry.patch(url, &patches)?;
830 // Since nothing is locked, this shouldn't possibly return anything.
831 assert!(unlock_ids.is_empty());
832 continue;
833 };
834
835 // This is a list of pairs where the first element of the pair is
836 // the raw `Dependency` which matches what's listed in `Cargo.toml`.
837 // The second element is, if present, the "locked" version of
838 // the `Dependency` as well as the `PackageId` that it previously
839 // resolved to. This second element is calculated by looking at the
840 // previous resolve graph, which is primarily what's done here to
841 // build the `registrations` list.
842 let mut registrations = Vec::new();
843 for dep in patches {
844 let candidates = || {
845 previous
846 .iter()
847 .chain(previous.unused_patches().iter().cloned())
848 .filter(&keep_previous)
849 };
850
851 let lock = match candidates().find(|id| dep.matches_id(*id)) {
852 // If we found an exactly matching candidate in our list of
853 // candidates, then that's the one to use.
854 Some(package_id) => {
855 let mut locked_dep = dep.clone();
856 locked_dep.lock_to(package_id);
857 Some(LockedPatchDependency {
858 dependency: locked_dep,
859 package_id,
860 alt_package_id: None,
861 })
862 }
863 None => {
864 // If the candidate does not have a matching source id
865 // then we may still have a lock candidate. If we're
866 // loading a v2-encoded resolve graph and `dep` is a
867 // git dep with `branch = 'master'`, then this should
868 // also match candidates without `branch = 'master'`
869 // (which is now treated separately in Cargo).
870 //
871 // In this scenario we try to convert candidates located
872 // in the resolve graph to explicitly having the
873 // `master` branch (if they otherwise point to
874 // `DefaultBranch`). If this works and our `dep`
875 // matches that then this is something we'll lock to.
876 match candidates().find(|&id| match master_branch_git_source(id, previous) {
877 Some(id) => dep.matches_id(id),
878 None => false,
879 }) {
880 Some(id_using_default) => {
881 let id_using_master = id_using_default.with_source_id(
882 dep.source_id()
883 .with_precise_from(id_using_default.source_id()),
884 );
885
886 let mut locked_dep = dep.clone();
887 locked_dep.lock_to(id_using_master);
888 Some(LockedPatchDependency {
889 dependency: locked_dep,
890 package_id: id_using_master,
891 // Note that this is where the magic
892 // happens, where the resolve graph
893 // probably has locks pointing to
894 // DefaultBranch sources, and by including
895 // this here those will get transparently
896 // rewritten to Branch("master") which we
897 // have a lock entry for.
898 alt_package_id: Some(id_using_default),
899 })
900 }
901
902 // No locked candidate was found
903 None => None,
904 }
905 }
906 };
907
908 registrations.push((dep, lock));
909 }
910
911 let canonical = CanonicalUrl::new(url)?;
912 for (orig_patch, unlock_id) in registry.patch(url, ®istrations)? {
913 // Avoid the locked patch ID.
914 avoid_patch_ids.insert(unlock_id);
915 // Also avoid the thing it is patching.
916 avoid_patch_ids.extend(previous.iter().filter(|id| {
917 orig_patch.matches_ignoring_source(*id)
918 && *id.source_id().canonical_url() == canonical
919 }));
920 }
921 }
922
923 Ok(avoid_patch_ids)
924}
925
926/// Locks each `[replace]` entry to a specific Package ID
927/// if the lockfile contains any corresponding previous replacement.
928fn lock_replacements(
929 ws: &Workspace<'_>,
930 previous: Option<&Resolve>,
931 keep: Keep<'_>,
932) -> Vec<(PackageIdSpec, Dependency)> {
933 let root_replace = ws.root_replace();
934 let replace = match previous {
935 Some(r) => root_replace
936 .iter()
937 .map(|(spec, dep)| {
938 for (&key, &val) in r.replacements().iter() {
939 if spec.matches(key) && dep.matches_id(val) && keep(&val) {
940 let mut dep = dep.clone();
941 dep.lock_to(val);
942 return (spec.clone(), dep);
943 }
944 }
945 (spec.clone(), dep.clone())
946 })
947 .collect::<Vec<_>>(),
948 None => root_replace.to_vec(),
949 };
950 replace
951}