cargo/ops/cargo_compile/
mod.rs

1//! The entry point for starting the compilation process for commands like
2//! `build`, `test`, `doc`, `rustc`, etc.
3//!
4//! The [`compile`] function will do all the work to compile a workspace. A
5//! rough outline is:
6//!
7//! 1. Resolve the dependency graph (see [`ops::resolve`]).
8//! 2. Download any packages needed (see [`PackageSet`]).
9//! 3. Generate a list of top-level "units" of work for the targets the user
10//!   requested on the command-line. Each [`Unit`] corresponds to a compiler
11//!   invocation. This is done in this module ([`UnitGenerator::generate_root_units`]).
12//! 4. Starting from the root [`Unit`]s, generate the [`UnitGraph`] by walking the dependency graph
13//!   from the resolver.  See also [`unit_dependencies`].
14//! 5. Construct the [`BuildContext`] with all of the information collected so
15//!   far. This is the end of the "front end" of compilation.
16//! 6. Create a [`BuildRunner`] which coordinates the compilation process
17//!   and will perform the following steps:
18//!     1. Prepare the `target` directory (see [`Layout`]).
19//!     2. Create a [`JobQueue`]. The queue checks the
20//!       fingerprint of each `Unit` to determine if it should run or be
21//!       skipped.
22//!     3. Execute the queue via [`drain_the_queue`]. Each leaf in the queue's dependency graph is
23//!        executed, and then removed from the graph when finished. This repeats until the queue is
24//!        empty.  Note that this is the only point in cargo that currently uses threads.
25//! 7. The result of the compilation is stored in the [`Compilation`] struct. This can be used for
26//!    various things, such as running tests after the compilation  has finished.
27//!
28//! **Note**: "target" inside this module generally refers to ["Cargo Target"],
29//! which corresponds to artifact that will be built in a package. Not to be
30//! confused with target-triple or target architecture.
31//!
32//! [`unit_dependencies`]: crate::core::compiler::unit_dependencies
33//! [`Layout`]: crate::core::compiler::Layout
34//! [`JobQueue`]: crate::core::compiler::job_queue
35//! [`drain_the_queue`]: crate::core::compiler::job_queue
36//! ["Cargo Target"]: https://doc.rust-lang.org/nightly/cargo/reference/cargo-targets.html
37
38use std::collections::{HashMap, HashSet};
39use std::hash::{Hash, Hasher};
40use std::sync::Arc;
41
42use crate::core::compiler::unit_dependencies::build_unit_dependencies;
43use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph};
44use crate::core::compiler::{apply_env_config, standard_lib, CrateType, TargetInfo};
45use crate::core::compiler::{BuildConfig, BuildContext, BuildRunner, Compilation};
46use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit};
47use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
48use crate::core::profiles::Profiles;
49use crate::core::resolver::features::{self, CliFeatures, FeaturesFor};
50use crate::core::resolver::{HasDevUnits, Resolve};
51use crate::core::{PackageId, PackageSet, SourceId, TargetKind, Workspace};
52use crate::drop_println;
53use crate::ops;
54use crate::ops::resolve::WorkspaceResolve;
55use crate::util::context::{GlobalContext, WarningHandling};
56use crate::util::interning::InternedString;
57use crate::util::{CargoResult, StableHasher};
58
59mod compile_filter;
60pub use compile_filter::{CompileFilter, FilterRule, LibRule};
61
62mod unit_generator;
63use unit_generator::UnitGenerator;
64
65mod packages;
66
67pub use packages::Packages;
68
69/// Contains information about how a package should be compiled.
70///
71/// Note on distinction between `CompileOptions` and [`BuildConfig`]:
72/// `BuildConfig` contains values that need to be retained after
73/// [`BuildContext`] is created. The other fields are no longer necessary. Think
74/// of it as `CompileOptions` are high-level settings requested on the
75/// command-line, and `BuildConfig` are low-level settings for actually
76/// driving `rustc`.
77#[derive(Debug, Clone)]
78pub struct CompileOptions {
79    /// Configuration information for a rustc build
80    pub build_config: BuildConfig,
81    /// Feature flags requested by the user.
82    pub cli_features: CliFeatures,
83    /// A set of packages to build.
84    pub spec: Packages,
85    /// Filter to apply to the root package to select which targets will be
86    /// built.
87    pub filter: CompileFilter,
88    /// Extra arguments to be passed to rustdoc (single target only)
89    pub target_rustdoc_args: Option<Vec<String>>,
90    /// The specified target will be compiled with all the available arguments,
91    /// note that this only accounts for the *final* invocation of rustc
92    pub target_rustc_args: Option<Vec<String>>,
93    /// Crate types to be passed to rustc (single target only)
94    pub target_rustc_crate_types: Option<Vec<String>>,
95    /// Whether the `--document-private-items` flags was specified and should
96    /// be forwarded to `rustdoc`.
97    pub rustdoc_document_private_items: bool,
98    /// Whether the build process should check the minimum Rust version
99    /// defined in the cargo metadata for a crate.
100    pub honor_rust_version: Option<bool>,
101}
102
103impl CompileOptions {
104    pub fn new(gctx: &GlobalContext, mode: CompileMode) -> CargoResult<CompileOptions> {
105        let jobs = None;
106        let keep_going = false;
107        Ok(CompileOptions {
108            build_config: BuildConfig::new(gctx, jobs, keep_going, &[], mode)?,
109            cli_features: CliFeatures::new_all(false),
110            spec: ops::Packages::Packages(Vec::new()),
111            filter: CompileFilter::Default {
112                required_features_filterable: false,
113            },
114            target_rustdoc_args: None,
115            target_rustc_args: None,
116            target_rustc_crate_types: None,
117            rustdoc_document_private_items: false,
118            honor_rust_version: None,
119        })
120    }
121}
122
123/// Compiles!
124///
125/// This uses the [`DefaultExecutor`]. To use a custom [`Executor`], see [`compile_with_exec`].
126pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>> {
127    let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
128    compile_with_exec(ws, options, &exec)
129}
130
131/// Like [`compile`] but allows specifying a custom [`Executor`]
132/// that will be able to intercept build calls and add custom logic.
133///
134/// [`compile`] uses [`DefaultExecutor`] which just passes calls through.
135pub fn compile_with_exec<'a>(
136    ws: &Workspace<'a>,
137    options: &CompileOptions,
138    exec: &Arc<dyn Executor>,
139) -> CargoResult<Compilation<'a>> {
140    ws.emit_warnings()?;
141    let compilation = compile_ws(ws, options, exec)?;
142    if ws.gctx().warning_handling()? == WarningHandling::Deny && compilation.warning_count > 0 {
143        anyhow::bail!("warnings are denied by `build.warnings` configuration")
144    }
145    Ok(compilation)
146}
147
148/// Like [`compile_with_exec`] but without warnings from manifest parsing.
149#[tracing::instrument(skip_all)]
150pub fn compile_ws<'a>(
151    ws: &Workspace<'a>,
152    options: &CompileOptions,
153    exec: &Arc<dyn Executor>,
154) -> CargoResult<Compilation<'a>> {
155    let interner = UnitInterner::new();
156    let bcx = create_bcx(ws, options, &interner)?;
157    if options.build_config.unit_graph {
158        unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.gctx())?;
159        return Compilation::new(&bcx);
160    }
161    crate::core::gc::auto_gc(bcx.gctx);
162    let build_runner = BuildRunner::new(&bcx)?;
163    if options.build_config.dry_run {
164        build_runner.dry_run()
165    } else {
166        build_runner.compile(exec)
167    }
168}
169
170/// Executes `rustc --print <VALUE>`.
171///
172/// * `print_opt_value` is the VALUE passed through.
173pub fn print<'a>(
174    ws: &Workspace<'a>,
175    options: &CompileOptions,
176    print_opt_value: &str,
177) -> CargoResult<()> {
178    let CompileOptions {
179        ref build_config,
180        ref target_rustc_args,
181        ..
182    } = *options;
183    let gctx = ws.gctx();
184    let rustc = gctx.load_global_rustc(Some(ws))?;
185    for (index, kind) in build_config.requested_kinds.iter().enumerate() {
186        if index != 0 {
187            drop_println!(gctx);
188        }
189        let target_info = TargetInfo::new(gctx, &build_config.requested_kinds, &rustc, *kind)?;
190        let mut process = rustc.process();
191        apply_env_config(gctx, &mut process)?;
192        process.args(&target_info.rustflags);
193        if let Some(args) = target_rustc_args {
194            process.args(args);
195        }
196        if let CompileKind::Target(t) = kind {
197            process.arg("--target").arg(t.rustc_target());
198        }
199        process.arg("--print").arg(print_opt_value);
200        process.exec()?;
201    }
202    Ok(())
203}
204
205/// Prepares all required information for the actual compilation.
206///
207/// For how it works and what data it collects,
208/// please see the [module-level documentation](self).
209#[tracing::instrument(skip_all)]
210pub fn create_bcx<'a, 'gctx>(
211    ws: &'a Workspace<'gctx>,
212    options: &'a CompileOptions,
213    interner: &'a UnitInterner,
214) -> CargoResult<BuildContext<'a, 'gctx>> {
215    let CompileOptions {
216        ref build_config,
217        ref spec,
218        ref cli_features,
219        ref filter,
220        ref target_rustdoc_args,
221        ref target_rustc_args,
222        ref target_rustc_crate_types,
223        rustdoc_document_private_items,
224        honor_rust_version,
225    } = *options;
226    let gctx = ws.gctx();
227
228    // Perform some pre-flight validation.
229    match build_config.mode {
230        CompileMode::Test
231        | CompileMode::Build
232        | CompileMode::Check { .. }
233        | CompileMode::Bench
234        | CompileMode::RunCustomBuild => {
235            if ws.gctx().get_env("RUST_FLAGS").is_ok() {
236                gctx.shell().warn(
237                    "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?",
238                )?;
239            }
240        }
241        CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::Docscrape => {
242            if ws.gctx().get_env("RUSTDOC_FLAGS").is_ok() {
243                gctx.shell().warn(
244                    "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?"
245                )?;
246            }
247        }
248    }
249    gctx.validate_term_config()?;
250
251    let mut target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?;
252
253    let specs = spec.to_package_id_specs(ws)?;
254    let has_dev_units = {
255        // Rustdoc itself doesn't need dev-dependencies. But to scrape examples from packages in the
256        // workspace, if any of those packages need dev-dependencies, then we need include dev-dependencies
257        // to scrape those packages.
258        let any_pkg_has_scrape_enabled = ws
259            .members_with_features(&specs, cli_features)?
260            .iter()
261            .any(|(pkg, _)| {
262                pkg.targets()
263                    .iter()
264                    .any(|target| target.is_example() && target.doc_scrape_examples().is_enabled())
265            });
266
267        if filter.need_dev_deps(build_config.mode)
268            || (build_config.mode.is_doc() && any_pkg_has_scrape_enabled)
269        {
270            HasDevUnits::Yes
271        } else {
272            HasDevUnits::No
273        }
274    };
275    let dry_run = false;
276    let resolve = ops::resolve_ws_with_opts(
277        ws,
278        &mut target_data,
279        &build_config.requested_kinds,
280        cli_features,
281        &specs,
282        has_dev_units,
283        crate::core::resolver::features::ForceAllTargets::No,
284        dry_run,
285    )?;
286    let WorkspaceResolve {
287        mut pkg_set,
288        workspace_resolve,
289        targeted_resolve: resolve,
290        resolved_features,
291    } = resolve;
292
293    let std_resolve_features = if let Some(crates) = &gctx.cli_unstable().build_std {
294        let (std_package_set, std_resolve, std_features) = standard_lib::resolve_std(
295            ws,
296            &mut target_data,
297            &build_config,
298            crates,
299            &build_config.requested_kinds,
300        )?;
301        pkg_set.add_set(std_package_set);
302        Some((std_resolve, std_features))
303    } else {
304        None
305    };
306
307    // Find the packages in the resolver that the user wants to build (those
308    // passed in with `-p` or the defaults from the workspace), and convert
309    // Vec<PackageIdSpec> to a Vec<PackageId>.
310    let to_build_ids = resolve.specs_to_ids(&specs)?;
311    // Now get the `Package` for each `PackageId`. This may trigger a download
312    // if the user specified `-p` for a dependency that is not downloaded.
313    // Dependencies will be downloaded during build_unit_dependencies.
314    let mut to_builds = pkg_set.get_many(to_build_ids)?;
315
316    // The ordering here affects some error messages coming out of cargo, so
317    // let's be test and CLI friendly by always printing in the same order if
318    // there's an error.
319    to_builds.sort_by_key(|p| p.package_id());
320
321    for pkg in to_builds.iter() {
322        pkg.manifest().print_teapot(gctx);
323
324        if build_config.mode.is_any_test()
325            && !ws.is_member(pkg)
326            && pkg.dependencies().iter().any(|dep| !dep.is_transitive())
327        {
328            anyhow::bail!(
329                "package `{}` cannot be tested because it requires dev-dependencies \
330                 and is not a member of the workspace",
331                pkg.name()
332            );
333        }
334    }
335
336    let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
337        (Some(args), _) => (Some(args.clone()), "rustc"),
338        (_, Some(args)) => (Some(args.clone()), "rustdoc"),
339        _ => (None, ""),
340    };
341
342    if extra_args.is_some() && to_builds.len() != 1 {
343        panic!(
344            "`{}` should not accept multiple `-p` flags",
345            extra_args_name
346        );
347    }
348
349    let profiles = Profiles::new(ws, build_config.requested_profile)?;
350    profiles.validate_packages(
351        ws.profiles(),
352        &mut gctx.shell(),
353        workspace_resolve.as_ref().unwrap_or(&resolve),
354    )?;
355
356    // If `--target` has not been specified, then the unit graph is built
357    // assuming `--target $HOST` was specified. See
358    // `rebuild_unit_graph_shared` for more on why this is done.
359    let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?);
360    let explicit_host_kinds: Vec<_> = build_config
361        .requested_kinds
362        .iter()
363        .map(|kind| match kind {
364            CompileKind::Host => explicit_host_kind,
365            CompileKind::Target(t) => CompileKind::Target(*t),
366        })
367        .collect();
368
369    // Passing `build_config.requested_kinds` instead of
370    // `explicit_host_kinds` here so that `generate_root_units` can do
371    // its own special handling of `CompileKind::Host`. It will
372    // internally replace the host kind by the `explicit_host_kind`
373    // before setting as a unit.
374    let generator = UnitGenerator {
375        ws,
376        packages: &to_builds,
377        spec,
378        target_data: &target_data,
379        filter,
380        requested_kinds: &build_config.requested_kinds,
381        explicit_host_kind,
382        mode: build_config.mode,
383        resolve: &resolve,
384        workspace_resolve: &workspace_resolve,
385        resolved_features: &resolved_features,
386        package_set: &pkg_set,
387        profiles: &profiles,
388        interner,
389        has_dev_units,
390    };
391    let mut units = generator.generate_root_units()?;
392
393    if let Some(args) = target_rustc_crate_types {
394        override_rustc_crate_types(&mut units, args, interner)?;
395    }
396
397    let should_scrape = build_config.mode.is_doc() && gctx.cli_unstable().rustdoc_scrape_examples;
398    let mut scrape_units = if should_scrape {
399        UnitGenerator {
400            mode: CompileMode::Docscrape,
401            ..generator
402        }
403        .generate_scrape_units(&units)?
404    } else {
405        Vec::new()
406    };
407
408    let std_roots = if let Some(crates) = gctx.cli_unstable().build_std.as_ref() {
409        let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
410        standard_lib::generate_std_roots(
411            &crates,
412            &units,
413            std_resolve,
414            std_features,
415            &explicit_host_kinds,
416            &pkg_set,
417            interner,
418            &profiles,
419            &target_data,
420        )?
421    } else {
422        Default::default()
423    };
424
425    let mut unit_graph = build_unit_dependencies(
426        ws,
427        &pkg_set,
428        &resolve,
429        &resolved_features,
430        std_resolve_features.as_ref(),
431        &units,
432        &scrape_units,
433        &std_roots,
434        build_config.mode,
435        &target_data,
436        &profiles,
437        interner,
438    )?;
439
440    // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain
441    // what heuristics to use in that case.
442    if matches!(build_config.mode, CompileMode::Doc { deps: true, .. }) {
443        remove_duplicate_doc(build_config, &units, &mut unit_graph);
444    }
445
446    let host_kind_requested = build_config
447        .requested_kinds
448        .iter()
449        .any(CompileKind::is_host);
450    // Rebuild the unit graph, replacing the explicit host targets with
451    // CompileKind::Host, removing `artifact_target_for_features` and merging any dependencies
452    // shared with build and artifact dependencies.
453    (units, scrape_units, unit_graph) = rebuild_unit_graph_shared(
454        interner,
455        unit_graph,
456        &units,
457        &scrape_units,
458        host_kind_requested.then_some(explicit_host_kind),
459    );
460
461    let mut extra_compiler_args = HashMap::new();
462    if let Some(args) = extra_args {
463        if units.len() != 1 {
464            anyhow::bail!(
465                "extra arguments to `{}` can only be passed to one \
466                 target, consider filtering\nthe package by passing, \
467                 e.g., `--lib` or `--bin NAME` to specify a single target",
468                extra_args_name
469            );
470        }
471        extra_compiler_args.insert(units[0].clone(), args);
472    }
473
474    for unit in units
475        .iter()
476        .filter(|unit| unit.mode.is_doc() || unit.mode.is_doc_test())
477        .filter(|unit| rustdoc_document_private_items || unit.target.is_bin())
478    {
479        // Add `--document-private-items` rustdoc flag if requested or if
480        // the target is a binary. Binary crates get their private items
481        // documented by default.
482        let mut args = vec!["--document-private-items".into()];
483        if unit.target.is_bin() {
484            // This warning only makes sense if it's possible to document private items
485            // sometimes and ignore them at other times. But cargo consistently passes
486            // `--document-private-items`, so the warning isn't useful.
487            args.push("-Arustdoc::private-intra-doc-links".into());
488        }
489        extra_compiler_args
490            .entry(unit.clone())
491            .or_default()
492            .extend(args);
493    }
494
495    if honor_rust_version.unwrap_or(true) {
496        let rustc_version = target_data.rustc.version.clone().into();
497
498        let mut incompatible = Vec::new();
499        let mut local_incompatible = false;
500        for unit in unit_graph.keys() {
501            let Some(pkg_msrv) = unit.pkg.rust_version() else {
502                continue;
503            };
504
505            if pkg_msrv.is_compatible_with(&rustc_version) {
506                continue;
507            }
508
509            local_incompatible |= unit.is_local();
510            incompatible.push((unit, pkg_msrv));
511        }
512        if !incompatible.is_empty() {
513            use std::fmt::Write as _;
514
515            let plural = if incompatible.len() == 1 { "" } else { "s" };
516            let mut message = format!(
517                "rustc {rustc_version} is not supported by the following package{plural}:\n"
518            );
519            incompatible.sort_by_key(|(unit, _)| (unit.pkg.name(), unit.pkg.version()));
520            for (unit, msrv) in incompatible {
521                let name = &unit.pkg.name();
522                let version = &unit.pkg.version();
523                writeln!(&mut message, "  {name}@{version} requires rustc {msrv}").unwrap();
524            }
525            if ws.is_ephemeral() {
526                if ws.ignore_lock() {
527                    writeln!(
528                        &mut message,
529                        "Try re-running `cargo install` with `--locked`"
530                    )
531                    .unwrap();
532                }
533            } else if !local_incompatible {
534                writeln!(
535                    &mut message,
536                    "Either upgrade rustc or select compatible dependency versions with
537`cargo update <name>@<current-ver> --precise <compatible-ver>`
538where `<compatible-ver>` is the latest version supporting rustc {rustc_version}",
539                )
540                .unwrap();
541            }
542            return Err(anyhow::Error::msg(message));
543        }
544    }
545
546    let bcx = BuildContext::new(
547        ws,
548        pkg_set,
549        build_config,
550        profiles,
551        extra_compiler_args,
552        target_data,
553        units,
554        unit_graph,
555        scrape_units,
556    )?;
557
558    Ok(bcx)
559}
560
561/// This is used to rebuild the unit graph, sharing host dependencies if possible,
562/// and applying other unit adjustments based on the whole graph.
563///
564/// This will translate any unit's `CompileKind::Target(host)` to
565/// `CompileKind::Host` if `to_host` is not `None` and the kind is equal to `to_host`.
566/// This also handles generating the unit `dep_hash`, and merging shared units if possible.
567///
568/// This is necessary because if normal dependencies used `CompileKind::Host`,
569/// there would be no way to distinguish those units from build-dependency
570/// units or artifact dependency units.
571/// This can cause a problem if a shared normal/build/artifact dependency needs
572/// to link to another dependency whose features differ based on whether or
573/// not it is a normal, build or artifact dependency. If all units used
574/// `CompileKind::Host`, then they would end up being identical, causing a
575/// collision in the `UnitGraph`, and Cargo would end up randomly choosing one
576/// value or the other.
577///
578/// The solution is to keep normal, build and artifact dependencies separate when
579/// building the unit graph, and then run this second pass which will try to
580/// combine shared dependencies safely. By adding a hash of the dependencies
581/// to the `Unit`, this allows the `CompileKind` to be changed back to `Host`
582/// and `artifact_target_for_features` to be removed without fear of an unwanted
583/// collision for build or artifact dependencies.
584///
585/// This is also responsible for adjusting the `strip` profile option to
586/// opportunistically strip if debug is 0 for all dependencies. This helps
587/// remove debuginfo added by the standard library.
588///
589/// This is also responsible for adjusting the `debug` setting for host
590/// dependencies, turning off debug if the user has not explicitly enabled it,
591/// and the unit is not shared with a target unit.
592fn rebuild_unit_graph_shared(
593    interner: &UnitInterner,
594    unit_graph: UnitGraph,
595    roots: &[Unit],
596    scrape_units: &[Unit],
597    to_host: Option<CompileKind>,
598) -> (Vec<Unit>, Vec<Unit>, UnitGraph) {
599    let mut result = UnitGraph::new();
600    // Map of the old unit to the new unit, used to avoid recursing into units
601    // that have already been computed to improve performance.
602    let mut memo = HashMap::new();
603    let new_roots = roots
604        .iter()
605        .map(|root| {
606            traverse_and_share(
607                interner,
608                &mut memo,
609                &mut result,
610                &unit_graph,
611                root,
612                false,
613                to_host,
614            )
615        })
616        .collect();
617    // If no unit in the unit graph ended up having scrape units attached as dependencies,
618    // then they won't have been discovered in traverse_and_share and hence won't be in
619    // memo. So we filter out missing scrape units.
620    let new_scrape_units = scrape_units
621        .iter()
622        .map(|unit| memo.get(unit).unwrap().clone())
623        .collect();
624    (new_roots, new_scrape_units, result)
625}
626
627/// Recursive function for rebuilding the graph.
628///
629/// This walks `unit_graph`, starting at the given `unit`. It inserts the new
630/// units into `new_graph`, and returns a new updated version of the given
631/// unit (`dep_hash` is filled in, and `kind` switched if necessary).
632fn traverse_and_share(
633    interner: &UnitInterner,
634    memo: &mut HashMap<Unit, Unit>,
635    new_graph: &mut UnitGraph,
636    unit_graph: &UnitGraph,
637    unit: &Unit,
638    unit_is_for_host: bool,
639    to_host: Option<CompileKind>,
640) -> Unit {
641    if let Some(new_unit) = memo.get(unit) {
642        // Already computed, no need to recompute.
643        return new_unit.clone();
644    }
645    let mut dep_hash = StableHasher::new();
646    let new_deps: Vec<_> = unit_graph[unit]
647        .iter()
648        .map(|dep| {
649            let new_dep_unit = traverse_and_share(
650                interner,
651                memo,
652                new_graph,
653                unit_graph,
654                &dep.unit,
655                dep.unit_for.is_for_host(),
656                to_host,
657            );
658            new_dep_unit.hash(&mut dep_hash);
659            UnitDep {
660                unit: new_dep_unit,
661                ..dep.clone()
662            }
663        })
664        .collect();
665    // Here, we have recursively traversed this unit's dependencies, and hashed them: we can
666    // finalize the dep hash.
667    let new_dep_hash = Hasher::finish(&dep_hash);
668
669    // This is the key part of the sharing process: if the unit is a runtime dependency, whose
670    // target is the same as the host, we canonicalize the compile kind to `CompileKind::Host`.
671    // A possible host dependency counterpart to this unit would have that kind, and if such a unit
672    // exists in the current `unit_graph`, they will unify in the new unit graph map `new_graph`.
673    // The resulting unit graph will be optimized with less units, thanks to sharing these host
674    // dependencies.
675    let canonical_kind = match to_host {
676        Some(to_host) if to_host == unit.kind => CompileKind::Host,
677        _ => unit.kind,
678    };
679
680    let mut profile = unit.profile.clone();
681    if profile.strip.is_deferred() {
682        // If strip was not manually set, and all dependencies of this unit together
683        // with this unit have debuginfo turned off, we enable debuginfo stripping.
684        // This will remove pre-existing debug symbols coming from the standard library.
685        if !profile.debuginfo.is_turned_on()
686            && new_deps
687                .iter()
688                .all(|dep| !dep.unit.profile.debuginfo.is_turned_on())
689        {
690            profile.strip = profile.strip.strip_debuginfo();
691        }
692    }
693
694    // If this is a build dependency, and it's not shared with runtime dependencies, we can weaken
695    // its debuginfo level to optimize build times. We do nothing if it's an artifact dependency,
696    // as it and its debuginfo may end up embedded in the main program.
697    if unit_is_for_host
698        && to_host.is_some()
699        && profile.debuginfo.is_deferred()
700        && !unit.artifact.is_true()
701    {
702        // We create a "probe" test to see if a unit with the same explicit debuginfo level exists
703        // in the graph. This is the level we'd expect if it was set manually or the default value
704        // set by a profile for a runtime dependency: its canonical value.
705        let canonical_debuginfo = profile.debuginfo.finalize();
706        let mut canonical_profile = profile.clone();
707        canonical_profile.debuginfo = canonical_debuginfo;
708        let unit_probe = interner.intern(
709            &unit.pkg,
710            &unit.target,
711            canonical_profile,
712            to_host.unwrap(),
713            unit.mode,
714            unit.features.clone(),
715            unit.rustflags.clone(),
716            unit.rustdocflags.clone(),
717            unit.links_overrides.clone(),
718            unit.is_std,
719            unit.dep_hash,
720            unit.artifact,
721            unit.artifact_target_for_features,
722        );
723
724        // We can now turn the deferred value into its actual final value.
725        profile.debuginfo = if unit_graph.contains_key(&unit_probe) {
726            // The unit is present in both build time and runtime subgraphs: we canonicalize its
727            // level to the other unit's, thus ensuring reuse between the two to optimize build times.
728            canonical_debuginfo
729        } else {
730            // The unit is only present in the build time subgraph, we can weaken its debuginfo
731            // level to optimize build times.
732            canonical_debuginfo.weaken()
733        }
734    }
735
736    let new_unit = interner.intern(
737        &unit.pkg,
738        &unit.target,
739        profile,
740        canonical_kind,
741        unit.mode,
742        unit.features.clone(),
743        unit.rustflags.clone(),
744        unit.rustdocflags.clone(),
745        unit.links_overrides.clone(),
746        unit.is_std,
747        new_dep_hash,
748        unit.artifact,
749        // Since `dep_hash` is now filled in, there's no need to specify the artifact target
750        // for target-dependent feature resolution
751        None,
752    );
753    assert!(memo.insert(unit.clone(), new_unit.clone()).is_none());
754    new_graph.entry(new_unit.clone()).or_insert(new_deps);
755    new_unit
756}
757
758/// Removes duplicate `CompileMode::Doc` units that would cause problems with
759/// filename collisions.
760///
761/// Rustdoc only separates units by crate name in the file directory
762/// structure. If any two units with the same crate name exist, this would
763/// cause a filename collision, causing different rustdoc invocations to stomp
764/// on one another's files.
765///
766/// Unfortunately this does not remove all duplicates, as some of them are
767/// either user error, or difficult to remove. Cases that I can think of:
768///
769/// - Same target name in different packages. See the `collision_doc` test.
770/// - Different sources. See `collision_doc_sources` test.
771///
772/// Ideally this would not be necessary.
773fn remove_duplicate_doc(
774    build_config: &BuildConfig,
775    root_units: &[Unit],
776    unit_graph: &mut UnitGraph,
777) {
778    // First, create a mapping of crate_name -> Unit so we can see where the
779    // duplicates are.
780    let mut all_docs: HashMap<String, Vec<Unit>> = HashMap::new();
781    for unit in unit_graph.keys() {
782        if unit.mode.is_doc() {
783            all_docs
784                .entry(unit.target.crate_name())
785                .or_default()
786                .push(unit.clone());
787        }
788    }
789    // Keep track of units to remove so that they can be efficiently removed
790    // from the unit_deps.
791    let mut removed_units: HashSet<Unit> = HashSet::new();
792    let mut remove = |units: Vec<Unit>, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec<Unit> {
793        let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) = units
794            .into_iter()
795            .partition(|unit| cb(unit) && !root_units.contains(unit));
796        for unit in to_remove {
797            tracing::debug!(
798                "removing duplicate doc due to {} for package {} target `{}`",
799                reason,
800                unit.pkg,
801                unit.target.name()
802            );
803            unit_graph.remove(&unit);
804            removed_units.insert(unit);
805        }
806        remaining_units
807    };
808    // Iterate over the duplicates and try to remove them from unit_graph.
809    for (_crate_name, mut units) in all_docs {
810        if units.len() == 1 {
811            continue;
812        }
813        // Prefer target over host if --target was not specified.
814        if build_config
815            .requested_kinds
816            .iter()
817            .all(CompileKind::is_host)
818        {
819            // Note these duplicates may not be real duplicates, since they
820            // might get merged in rebuild_unit_graph_shared. Either way, it
821            // shouldn't hurt to remove them early (although the report in the
822            // log might be confusing).
823            units = remove(units, "host/target merger", &|unit| unit.kind.is_host());
824            if units.len() == 1 {
825                continue;
826            }
827        }
828        // Prefer newer versions over older.
829        let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec<Unit>> =
830            HashMap::new();
831        for unit in units {
832            let pkg_id = unit.pkg.package_id();
833            // Note, this does not detect duplicates from different sources.
834            source_map
835                .entry((pkg_id.name(), pkg_id.source_id(), unit.kind))
836                .or_default()
837                .push(unit);
838        }
839        let mut remaining_units = Vec::new();
840        for (_key, mut units) in source_map {
841            if units.len() > 1 {
842                units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
843                // Remove any entries with version < newest.
844                let newest_version = units.last().unwrap().pkg.version().clone();
845                let keep_units = remove(units, "older version", &|unit| {
846                    unit.pkg.version() < &newest_version
847                });
848                remaining_units.extend(keep_units);
849            } else {
850                remaining_units.extend(units);
851            }
852        }
853        if remaining_units.len() == 1 {
854            continue;
855        }
856        // Are there other heuristics to remove duplicates that would make
857        // sense? Maybe prefer path sources over all others?
858    }
859    // Also remove units from the unit_deps so there aren't any dangling edges.
860    for unit_deps in unit_graph.values_mut() {
861        unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit));
862    }
863    // Remove any orphan units that were detached from the graph.
864    let mut visited = HashSet::new();
865    fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet<Unit>) {
866        if !visited.insert(unit.clone()) {
867            return;
868        }
869        for dep in &graph[unit] {
870            visit(&dep.unit, graph, visited);
871        }
872    }
873    for unit in root_units {
874        visit(unit, unit_graph, &mut visited);
875    }
876    unit_graph.retain(|unit, _| visited.contains(unit));
877}
878
879/// Override crate types for given units.
880///
881/// This is primarily used by `cargo rustc --crate-type`.
882fn override_rustc_crate_types(
883    units: &mut [Unit],
884    args: &[String],
885    interner: &UnitInterner,
886) -> CargoResult<()> {
887    if units.len() != 1 {
888        anyhow::bail!(
889            "crate types to rustc can only be passed to one \
890            target, consider filtering\nthe package by passing, \
891            e.g., `--lib` or `--example` to specify a single target"
892        );
893    }
894
895    let unit = &units[0];
896    let override_unit = |f: fn(Vec<CrateType>) -> TargetKind| {
897        let crate_types = args.iter().map(|s| s.into()).collect();
898        let mut target = unit.target.clone();
899        target.set_kind(f(crate_types));
900        interner.intern(
901            &unit.pkg,
902            &target,
903            unit.profile.clone(),
904            unit.kind,
905            unit.mode,
906            unit.features.clone(),
907            unit.rustflags.clone(),
908            unit.rustdocflags.clone(),
909            unit.links_overrides.clone(),
910            unit.is_std,
911            unit.dep_hash,
912            unit.artifact,
913            unit.artifact_target_for_features,
914        )
915    };
916    units[0] = match unit.target.kind() {
917        TargetKind::Lib(_) => override_unit(TargetKind::Lib),
918        TargetKind::ExampleLib(_) => override_unit(TargetKind::ExampleLib),
919        _ => {
920            anyhow::bail!(
921                "crate types can only be specified for libraries and example libraries.\n\
922                Binaries, tests, and benchmarks are always the `bin` crate type"
923            );
924        }
925    };
926
927    Ok(())
928}
929
930/// Gets all of the features enabled for a package, plus its dependencies'
931/// features.
932///
933/// Dependencies are added as `dep_name/feat_name` because `required-features`
934/// wants to support that syntax.
935pub fn resolve_all_features(
936    resolve_with_overrides: &Resolve,
937    resolved_features: &features::ResolvedFeatures,
938    package_set: &PackageSet<'_>,
939    package_id: PackageId,
940) -> HashSet<String> {
941    let mut features: HashSet<String> = resolved_features
942        .activated_features(package_id, FeaturesFor::NormalOrDev)
943        .iter()
944        .map(|s| s.to_string())
945        .collect();
946
947    // Include features enabled for use by dependencies so targets can also use them with the
948    // required-features field when deciding whether to be built or skipped.
949    for (dep_id, deps) in resolve_with_overrides.deps(package_id) {
950        let is_proc_macro = package_set
951            .get_one(dep_id)
952            .expect("packages downloaded")
953            .proc_macro();
954        for dep in deps {
955            let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build());
956            for feature in resolved_features
957                .activated_features_unverified(dep_id, features_for)
958                .unwrap_or_default()
959            {
960                features.insert(format!("{}/{}", dep.name_in_toml(), feature));
961            }
962        }
963    }
964
965    features
966}