Skip to main content

cargo/ops/cargo_compile/
mod.rs

1//! The entry point for starting the compilation process for commands like
2//! `build`, `test`, `doc`, `rustc`, etc.
3//!
4//! The [`compile`] function will do all the work to compile a workspace. A
5//! rough outline is:
6//!
7//! 1. Resolve the dependency graph (see [`ops::resolve`]).
8//! 2. Download any packages needed (see [`PackageSet`]).
9//! 3. Generate a list of top-level "units" of work for the targets the user
10//!   requested on the command-line. Each [`Unit`] corresponds to a compiler
11//!   invocation. This is done in this module ([`UnitGenerator::generate_root_units`]).
12//! 4. Starting from the root [`Unit`]s, generate the [`UnitGraph`] by walking the dependency graph
13//!   from the resolver.  See also [`unit_dependencies`].
14//! 5. Construct the [`BuildContext`] with all of the information collected so
15//!   far. This is the end of the "front end" of compilation.
16//! 6. Create a [`BuildRunner`] which coordinates the compilation process
17//!   and will perform the following steps:
18//!     1. Prepare the `target` directory (see [`Layout`]).
19//!     2. Create a [`JobQueue`]. The queue checks the
20//!       fingerprint of each `Unit` to determine if it should run or be
21//!       skipped.
22//!     3. Execute the queue via [`drain_the_queue`]. Each leaf in the queue's dependency graph is
23//!        executed, and then removed from the graph when finished. This repeats until the queue is
24//!        empty.  Note that this is the only point in cargo that currently uses threads.
25//! 7. The result of the compilation is stored in the [`Compilation`] struct. This can be used for
26//!    various things, such as running tests after the compilation  has finished.
27//!
28//! **Note**: "target" inside this module generally refers to ["Cargo Target"],
29//! which corresponds to artifact that will be built in a package. Not to be
30//! confused with target-triple or target architecture.
31//!
32//! [`unit_dependencies`]: crate::core::compiler::unit_dependencies
33//! [`Layout`]: crate::core::compiler::Layout
34//! [`JobQueue`]: crate::core::compiler::job_queue
35//! [`drain_the_queue`]: crate::core::compiler::job_queue
36//! ["Cargo Target"]: https://doc.rust-lang.org/nightly/cargo/reference/cargo-targets.html
37
38use std::collections::{HashMap, HashSet};
39use std::hash::{Hash, Hasher};
40use std::sync::Arc;
41
42use crate::core::compiler::UserIntent;
43use crate::core::compiler::unit_dependencies::build_unit_dependencies;
44use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph};
45use crate::core::compiler::{BuildConfig, BuildContext, BuildRunner, Compilation};
46use crate::core::compiler::{CompileKind, CompileTarget, RustcTargetData, Unit};
47use crate::core::compiler::{CrateType, TargetInfo, apply_env_config, standard_lib};
48use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
49use crate::core::compiler::{DepKindSet, UnitIndex};
50use crate::core::profiles::Profiles;
51use crate::core::resolver::features::{self, CliFeatures, FeaturesFor};
52use crate::core::resolver::{ForceAllTargets, HasDevUnits, Resolve};
53use crate::core::{PackageId, PackageSet, SourceId, TargetKind, Workspace};
54use crate::drop_println;
55use crate::ops;
56use crate::ops::resolve::{SpecsAndResolvedFeatures, WorkspaceResolve};
57use crate::util::BuildLogger;
58use crate::util::context::{GlobalContext, WarningHandling};
59use crate::util::interning::InternedString;
60use crate::util::log_message::LogMessage;
61use crate::util::{CargoResult, StableHasher};
62
63mod compile_filter;
64use cargo_util_terminal::report::{Group, Level, Origin};
65pub use compile_filter::{CompileFilter, FilterRule, LibRule};
66
67pub(super) mod unit_generator;
68use itertools::Itertools as _;
69use unit_generator::UnitGenerator;
70
71mod packages;
72
73pub use packages::Packages;
74
75/// Contains information about how a package should be compiled.
76///
77/// Note on distinction between `CompileOptions` and [`BuildConfig`]:
78/// `BuildConfig` contains values that need to be retained after
79/// [`BuildContext`] is created. The other fields are no longer necessary. Think
80/// of it as `CompileOptions` are high-level settings requested on the
81/// command-line, and `BuildConfig` are low-level settings for actually
82/// driving `rustc`.
83#[derive(Debug, Clone)]
84pub struct CompileOptions {
85    /// Configuration information for a rustc build
86    pub build_config: BuildConfig,
87    /// Feature flags requested by the user.
88    pub cli_features: CliFeatures,
89    /// A set of packages to build.
90    pub spec: Packages,
91    /// Filter to apply to the root package to select which targets will be
92    /// built.
93    pub filter: CompileFilter,
94    /// Extra arguments to be passed to rustdoc (single target only)
95    pub target_rustdoc_args: Option<Vec<String>>,
96    /// The specified target will be compiled with all the available arguments,
97    /// note that this only accounts for the *final* invocation of rustc
98    pub target_rustc_args: Option<Vec<String>>,
99    /// Crate types to be passed to rustc (single target only)
100    pub target_rustc_crate_types: Option<Vec<String>>,
101    /// Whether the `--document-private-items` flags was specified and should
102    /// be forwarded to `rustdoc`.
103    pub rustdoc_document_private_items: bool,
104    /// Whether the build process should check the minimum Rust version
105    /// defined in the cargo metadata for a crate.
106    pub honor_rust_version: Option<bool>,
107}
108
109impl CompileOptions {
110    pub fn new(gctx: &GlobalContext, intent: UserIntent) -> CargoResult<CompileOptions> {
111        let jobs = None;
112        let keep_going = false;
113        Ok(CompileOptions {
114            build_config: BuildConfig::new(gctx, jobs, keep_going, &[], intent)?,
115            cli_features: CliFeatures::new_all(false),
116            spec: ops::Packages::Packages(Vec::new()),
117            filter: CompileFilter::Default {
118                required_features_filterable: false,
119            },
120            target_rustdoc_args: None,
121            target_rustc_args: None,
122            target_rustc_crate_types: None,
123            rustdoc_document_private_items: false,
124            honor_rust_version: None,
125        })
126    }
127}
128
129/// Compiles!
130///
131/// This uses the [`DefaultExecutor`]. To use a custom [`Executor`], see [`compile_with_exec`].
132pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>> {
133    let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
134    compile_with_exec(ws, options, &exec)
135}
136
137/// Like [`compile`] but allows specifying a custom [`Executor`]
138/// that will be able to intercept build calls and add custom logic.
139///
140/// [`compile`] uses [`DefaultExecutor`] which just passes calls through.
141pub fn compile_with_exec<'a>(
142    ws: &Workspace<'a>,
143    options: &CompileOptions,
144    exec: &Arc<dyn Executor>,
145) -> CargoResult<Compilation<'a>> {
146    ws.emit_warnings()?;
147    let compilation = compile_ws(ws, options, exec)?;
148    if ws.gctx().warning_handling()? == WarningHandling::Deny && compilation.lint_warning_count > 0
149    {
150        anyhow::bail!("warnings are denied by `build.warnings` configuration")
151    }
152    Ok(compilation)
153}
154
155/// Like [`compile_with_exec`] but without warnings from manifest parsing.
156#[tracing::instrument(skip_all)]
157pub fn compile_ws<'a>(
158    ws: &Workspace<'a>,
159    options: &CompileOptions,
160    exec: &Arc<dyn Executor>,
161) -> CargoResult<Compilation<'a>> {
162    let interner = UnitInterner::new();
163    let logger = BuildLogger::maybe_new(ws, &options.build_config)?;
164
165    if let Some(ref logger) = logger {
166        let rustc = ws.gctx().load_global_rustc(Some(ws))?;
167        let num_cpus = std::thread::available_parallelism()
168            .ok()
169            .map(|x| x.get() as u64);
170        logger.log(LogMessage::BuildStarted {
171            command: std::env::args_os()
172                .map(|arg| arg.to_string_lossy().into_owned())
173                .collect(),
174            cwd: ws.gctx().cwd().to_path_buf(),
175            host: rustc.host.to_string(),
176            jobs: options.build_config.jobs,
177            num_cpus,
178            profile: options.build_config.requested_profile.to_string(),
179            rustc_version: rustc.version.to_string(),
180            rustc_version_verbose: rustc.verbose_version.clone(),
181            target_dir: ws.target_dir().as_path_unlocked().to_path_buf(),
182            workspace_root: ws.root().to_path_buf(),
183        });
184    }
185
186    let bcx = create_bcx(ws, options, &interner, logger.as_ref())?;
187
188    if options.build_config.unit_graph {
189        unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.gctx())?;
190        return Compilation::new(&bcx);
191    }
192    crate::core::gc::auto_gc(bcx.gctx);
193    let build_runner = BuildRunner::new(&bcx)?;
194    if options.build_config.dry_run {
195        build_runner.dry_run()
196    } else {
197        build_runner.compile(exec)
198    }
199}
200
201/// Executes `rustc --print <VALUE>`.
202///
203/// * `print_opt_value` is the VALUE passed through.
204pub fn print<'a>(
205    ws: &Workspace<'a>,
206    options: &CompileOptions,
207    print_opt_value: &str,
208) -> CargoResult<()> {
209    let CompileOptions {
210        ref build_config,
211        ref target_rustc_args,
212        ..
213    } = *options;
214    let gctx = ws.gctx();
215    let rustc = gctx.load_global_rustc(Some(ws))?;
216    for (index, kind) in build_config.requested_kinds.iter().enumerate() {
217        if index != 0 {
218            drop_println!(gctx);
219        }
220        let target_info = TargetInfo::new(gctx, &build_config.requested_kinds, &rustc, *kind)?;
221        let mut process = rustc.process();
222        apply_env_config(gctx, &mut process)?;
223        process.args(&target_info.rustflags);
224        if let Some(args) = target_rustc_args {
225            process.args(args);
226        }
227        kind.add_target_arg(&mut process);
228        process.arg("--print").arg(print_opt_value);
229        process.exec()?;
230    }
231    Ok(())
232}
233
234/// Prepares all required information for the actual compilation.
235///
236/// For how it works and what data it collects,
237/// please see the [module-level documentation](self).
238#[tracing::instrument(skip_all)]
239pub fn create_bcx<'a, 'gctx>(
240    ws: &'a Workspace<'gctx>,
241    options: &'a CompileOptions,
242    interner: &'a UnitInterner,
243    logger: Option<&'a BuildLogger>,
244) -> CargoResult<BuildContext<'a, 'gctx>> {
245    let CompileOptions {
246        ref build_config,
247        ref spec,
248        ref cli_features,
249        ref filter,
250        ref target_rustdoc_args,
251        ref target_rustc_args,
252        ref target_rustc_crate_types,
253        rustdoc_document_private_items,
254        honor_rust_version,
255    } = *options;
256    let gctx = ws.gctx();
257
258    // Perform some pre-flight validation.
259    match build_config.intent {
260        UserIntent::Test | UserIntent::Build | UserIntent::Check { .. } | UserIntent::Bench => {
261            if ws.gctx().get_env("RUST_FLAGS").is_ok() {
262                gctx.shell().print_report(
263                    &[Level::WARNING
264                        .secondary_title("ignoring environment variable `RUST_FLAGS`")
265                        .element(Level::HELP.message("rust flags are passed via `RUSTFLAGS`"))],
266                    false,
267                )?;
268            }
269        }
270        UserIntent::Doc { .. } | UserIntent::Doctest => {
271            if ws.gctx().get_env("RUSTDOC_FLAGS").is_ok() {
272                gctx.shell().print_report(
273                    &[Level::WARNING
274                        .secondary_title("ignoring environment variable `RUSTDOC_FLAGS`")
275                        .element(
276                            Level::HELP.message("rustdoc flags are passed via `RUSTDOCFLAGS`"),
277                        )],
278                    false,
279                )?;
280            }
281        }
282    }
283    gctx.validate_term_config()?;
284
285    let mut target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?;
286
287    let specs = spec.to_package_id_specs(ws)?;
288    let has_dev_units = {
289        // Rustdoc itself doesn't need dev-dependencies. But to scrape examples from packages in the
290        // workspace, if any of those packages need dev-dependencies, then we need include dev-dependencies
291        // to scrape those packages.
292        let any_pkg_has_scrape_enabled = ws
293            .members_with_features(&specs, cli_features)?
294            .iter()
295            .any(|(pkg, _)| {
296                pkg.targets()
297                    .iter()
298                    .any(|target| target.is_example() && target.doc_scrape_examples().is_enabled())
299            });
300
301        if filter.need_dev_deps(build_config.intent)
302            || (build_config.intent.is_doc() && any_pkg_has_scrape_enabled)
303        {
304            HasDevUnits::Yes
305        } else {
306            HasDevUnits::No
307        }
308    };
309    let dry_run = false;
310
311    if let Some(logger) = logger {
312        let elapsed = ws.gctx().creation_time().elapsed().as_secs_f64();
313        logger.log(LogMessage::ResolutionStarted { elapsed });
314    }
315
316    let resolve = ops::resolve_ws_with_opts(
317        ws,
318        &mut target_data,
319        &build_config.requested_kinds,
320        cli_features,
321        &specs,
322        has_dev_units,
323        ForceAllTargets::No,
324        dry_run,
325    )?;
326    let WorkspaceResolve {
327        mut pkg_set,
328        workspace_resolve,
329        targeted_resolve: resolve,
330        specs_and_features,
331    } = resolve;
332
333    if let Some(logger) = logger {
334        let elapsed = ws.gctx().creation_time().elapsed().as_secs_f64();
335        logger.log(LogMessage::ResolutionFinished { elapsed });
336    }
337
338    let std_resolve_features = if let Some(crates) = &gctx.cli_unstable().build_std {
339        let (std_package_set, std_resolve, std_features) = standard_lib::resolve_std(
340            ws,
341            &mut target_data,
342            &build_config,
343            crates,
344            &build_config.requested_kinds,
345        )?;
346        pkg_set.add_set(std_package_set);
347        Some((std_resolve, std_features))
348    } else {
349        None
350    };
351
352    // Find the packages in the resolver that the user wants to build (those
353    // passed in with `-p` or the defaults from the workspace), and convert
354    // Vec<PackageIdSpec> to a Vec<PackageId>.
355    let to_build_ids = resolve.specs_to_ids(&specs)?;
356    // Now get the `Package` for each `PackageId`. This may trigger a download
357    // if the user specified `-p` for a dependency that is not downloaded.
358    // Dependencies will be downloaded during build_unit_dependencies.
359    let mut to_builds = pkg_set.get_many(to_build_ids)?;
360
361    // The ordering here affects some error messages coming out of cargo, so
362    // let's be test and CLI friendly by always printing in the same order if
363    // there's an error.
364    to_builds.sort_by_key(|p| p.package_id());
365
366    for pkg in to_builds.iter() {
367        pkg.manifest().print_teapot(gctx);
368
369        if build_config.intent.is_any_test()
370            && !ws.is_member(pkg)
371            && pkg.dependencies().iter().any(|dep| !dep.is_transitive())
372        {
373            anyhow::bail!(
374                "package `{}` cannot be tested because it requires dev-dependencies \
375                 and is not a member of the workspace",
376                pkg.name()
377            );
378        }
379    }
380
381    let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
382        (Some(args), _) => (Some(args.clone()), "rustc"),
383        (_, Some(args)) => (Some(args.clone()), "rustdoc"),
384        _ => (None, ""),
385    };
386
387    if extra_args.is_some() && to_builds.len() != 1 {
388        panic!(
389            "`{}` should not accept multiple `-p` flags",
390            extra_args_name
391        );
392    }
393
394    let profiles = Profiles::new(ws, build_config.requested_profile)?;
395    profiles.validate_packages(
396        ws.profiles(),
397        &mut gctx.shell(),
398        workspace_resolve.as_ref().unwrap_or(&resolve),
399    )?;
400
401    // If `--target` has not been specified, then the unit graph is built
402    // assuming `--target $HOST` was specified. See
403    // `rebuild_unit_graph_shared` for more on why this is done.
404    let explicit_host_kind = CompileKind::Target(CompileTarget::new(
405        &target_data.rustc.host,
406        gctx.cli_unstable().json_target_spec,
407    )?);
408    let explicit_host_kinds: Vec<_> = build_config
409        .requested_kinds
410        .iter()
411        .map(|kind| match kind {
412            CompileKind::Host => explicit_host_kind,
413            CompileKind::Target(t) => CompileKind::Target(*t),
414        })
415        .collect();
416
417    let mut root_units = Vec::new();
418    let mut unit_graph = HashMap::new();
419    let mut scrape_units = Vec::new();
420
421    if let Some(logger) = logger {
422        let elapsed = ws.gctx().creation_time().elapsed().as_secs_f64();
423        logger.log(LogMessage::UnitGraphStarted { elapsed });
424    }
425
426    let mut selected_dep_kinds = DepKindSet::default();
427    for SpecsAndResolvedFeatures {
428        specs,
429        resolved_features,
430    } in &specs_and_features
431    {
432        // Passing `build_config.requested_kinds` instead of
433        // `explicit_host_kinds` here so that `generate_root_units` can do
434        // its own special handling of `CompileKind::Host`. It will
435        // internally replace the host kind by the `explicit_host_kind`
436        // before setting as a unit.
437        let spec_names = specs.iter().map(|spec| spec.name()).collect::<Vec<_>>();
438        let packages = to_builds
439            .iter()
440            .filter(|package| spec_names.contains(&package.name().as_str()))
441            .cloned()
442            .collect::<Vec<_>>();
443        let generator = UnitGenerator {
444            ws,
445            packages: &packages,
446            spec,
447            target_data: &target_data,
448            filter,
449            requested_kinds: &build_config.requested_kinds,
450            explicit_host_kind,
451            intent: build_config.intent,
452            resolve: &resolve,
453            workspace_resolve: &workspace_resolve,
454            resolved_features: &resolved_features,
455            package_set: &pkg_set,
456            profiles: &profiles,
457            interner,
458            has_dev_units,
459        };
460        let (mut targeted_root_units, curr_selected_dep_kinds) = generator.generate_root_units()?;
461        // Should be fine as the loop iterate is independent of target selection
462        selected_dep_kinds = curr_selected_dep_kinds;
463
464        if let Some(args) = target_rustc_crate_types {
465            override_rustc_crate_types(&mut targeted_root_units, args, interner)?;
466        }
467
468        let should_scrape =
469            build_config.intent.is_doc() && gctx.cli_unstable().rustdoc_scrape_examples;
470        let targeted_scrape_units = if should_scrape {
471            generator.generate_scrape_units(&targeted_root_units)?
472        } else {
473            Vec::new()
474        };
475
476        let std_roots = if let Some(crates) = gctx.cli_unstable().build_std.as_ref() {
477            let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
478            standard_lib::generate_std_roots(
479                &crates,
480                &targeted_root_units,
481                std_resolve,
482                std_features,
483                &explicit_host_kinds,
484                &pkg_set,
485                interner,
486                &profiles,
487                &target_data,
488            )?
489        } else {
490            Default::default()
491        };
492
493        unit_graph.extend(build_unit_dependencies(
494            ws,
495            &pkg_set,
496            &resolve,
497            &resolved_features,
498            std_resolve_features.as_ref(),
499            &targeted_root_units,
500            &targeted_scrape_units,
501            &std_roots,
502            build_config.intent,
503            &target_data,
504            &profiles,
505            interner,
506        )?);
507        root_units.extend(targeted_root_units);
508        scrape_units.extend(targeted_scrape_units);
509    }
510
511    // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain
512    // what heuristics to use in that case.
513    if build_config.intent.wants_deps_docs() {
514        remove_duplicate_doc(build_config, &root_units, &mut unit_graph);
515    }
516
517    let host_kind_requested = build_config
518        .requested_kinds
519        .iter()
520        .any(CompileKind::is_host);
521    // Rebuild the unit graph, replacing the explicit host targets with
522    // CompileKind::Host, removing `artifact_target_for_features` and merging any dependencies
523    // shared with build and artifact dependencies.
524    //
525    // NOTE: after this point, all units and the unit graph must be immutable.
526    let (root_units, scrape_units, unit_graph) = rebuild_unit_graph_shared(
527        interner,
528        unit_graph,
529        &root_units,
530        &scrape_units,
531        host_kind_requested.then_some(explicit_host_kind),
532        build_config.compile_time_deps_only,
533    );
534
535    let units: Vec<_> = unit_graph.keys().sorted().collect();
536    let unit_to_index: HashMap<_, _> = units
537        .iter()
538        .enumerate()
539        .map(|(i, &unit)| (unit.clone(), UnitIndex(i as u64)))
540        .collect();
541
542    if let Some(logger) = logger {
543        let root_unit_indexes: HashSet<_> =
544            root_units.iter().map(|unit| unit_to_index[&unit]).collect();
545
546        for (index, unit) in units.into_iter().enumerate() {
547            let index = UnitIndex(index as u64);
548            let dependencies = unit_graph
549                .get(unit)
550                .map(|deps| {
551                    deps.iter()
552                        .filter_map(|dep| unit_to_index.get(&dep.unit).copied())
553                        .collect()
554                })
555                .unwrap_or_default();
556            logger.log(LogMessage::UnitRegistered {
557                package_id: unit.pkg.package_id().to_spec(),
558                target: (&unit.target).into(),
559                mode: unit.mode,
560                platform: target_data.short_name(&unit.kind).to_owned(),
561                index,
562                features: unit
563                    .features
564                    .iter()
565                    .map(|s| s.as_str().to_owned())
566                    .collect(),
567                requested: root_unit_indexes.contains(&index),
568                dependencies,
569            });
570        }
571        let elapsed = ws.gctx().creation_time().elapsed().as_secs_f64();
572        logger.log(LogMessage::UnitGraphFinished { elapsed });
573    }
574
575    let mut extra_compiler_args = HashMap::new();
576    if let Some(args) = extra_args {
577        if root_units.len() != 1 {
578            anyhow::bail!(
579                "extra arguments to `{}` can only be passed to one \
580                 target, consider filtering\nthe package by passing, \
581                 e.g., `--lib` or `--bin NAME` to specify a single target",
582                extra_args_name
583            );
584        }
585        extra_compiler_args.insert(root_units[0].clone(), args);
586    }
587
588    for unit in root_units
589        .iter()
590        .filter(|unit| unit.mode.is_doc() || unit.mode.is_doc_test())
591        .filter(|unit| rustdoc_document_private_items || unit.target.is_bin())
592    {
593        // Add `--document-private-items` rustdoc flag if requested or if
594        // the target is a binary. Binary crates get their private items
595        // documented by default.
596        let mut args = vec!["--document-private-items".into()];
597        if unit.target.is_bin() {
598            // This warning only makes sense if it's possible to document private items
599            // sometimes and ignore them at other times. But cargo consistently passes
600            // `--document-private-items`, so the warning isn't useful.
601            args.push("-Arustdoc::private-intra-doc-links".into());
602        }
603        extra_compiler_args
604            .entry(unit.clone())
605            .or_default()
606            .extend(args);
607    }
608
609    // Validate target src path for each root unit
610    let mut error_count: usize = 0;
611    for unit in &root_units {
612        if let Some(target_src_path) = unit.target.src_path().path() {
613            validate_target_path_as_source_file(
614                gctx,
615                target_src_path,
616                unit.target.name(),
617                unit.target.kind(),
618                unit.pkg.manifest_path(),
619                &mut error_count,
620            )?
621        }
622    }
623    if error_count > 0 {
624        let plural: &str = if error_count > 1 { "s" } else { "" };
625        anyhow::bail!(
626            "could not compile due to {error_count} previous target resolution error{plural}"
627        );
628    }
629
630    if honor_rust_version.unwrap_or(true) {
631        let rustc_version = target_data.rustc.version.clone().into();
632
633        let mut incompatible = Vec::new();
634        let mut local_incompatible = false;
635        for unit in unit_graph.keys() {
636            let Some(pkg_msrv) = unit.pkg.rust_version() else {
637                continue;
638            };
639
640            if pkg_msrv.is_compatible_with(&rustc_version) {
641                continue;
642            }
643
644            local_incompatible |= unit.is_local();
645            incompatible.push((unit, pkg_msrv));
646        }
647        if !incompatible.is_empty() {
648            use std::fmt::Write as _;
649
650            let plural = if incompatible.len() == 1 { "" } else { "s" };
651            let mut message = format!(
652                "rustc {rustc_version} is not supported by the following package{plural}:\n"
653            );
654            incompatible.sort_by_key(|(unit, _)| (unit.pkg.name(), unit.pkg.version()));
655            for (unit, msrv) in incompatible {
656                let name = &unit.pkg.name();
657                let version = &unit.pkg.version();
658                writeln!(&mut message, "  {name}@{version} requires rustc {msrv}").unwrap();
659            }
660            if ws.is_ephemeral() {
661                if ws.ignore_lock() {
662                    writeln!(
663                        &mut message,
664                        "Try re-running `cargo install` with `--locked`"
665                    )
666                    .unwrap();
667                }
668            } else if !local_incompatible {
669                writeln!(
670                    &mut message,
671                    "Either upgrade rustc or select compatible dependency versions with
672`cargo update <name>@<current-ver> --precise <compatible-ver>`
673where `<compatible-ver>` is the latest version supporting rustc {rustc_version}",
674                )
675                .unwrap();
676            }
677            return Err(anyhow::Error::msg(message));
678        }
679    }
680
681    let bcx = BuildContext::new(
682        ws,
683        logger,
684        pkg_set,
685        build_config,
686        selected_dep_kinds,
687        profiles,
688        extra_compiler_args,
689        target_data,
690        root_units,
691        unit_graph,
692        unit_to_index,
693        scrape_units,
694    )?;
695
696    Ok(bcx)
697}
698
699// Checks if a target path exists and is a source file, not a directory
700fn validate_target_path_as_source_file(
701    gctx: &GlobalContext,
702    target_path: &std::path::Path,
703    target_name: &str,
704    target_kind: &TargetKind,
705    unit_manifest_path: &std::path::Path,
706    error_count: &mut usize,
707) -> CargoResult<()> {
708    if !target_path.exists() {
709        *error_count += 1;
710
711        let err_msg = format!(
712            "can't find {} `{}` at path `{}`",
713            target_kind.description(),
714            target_name,
715            target_path.display()
716        );
717
718        let group = Group::with_title(Level::ERROR.primary_title(err_msg)).element(Origin::path(
719            unit_manifest_path.to_str().unwrap_or_default(),
720        ));
721
722        gctx.shell().print_report(&[group], true)?;
723    } else if target_path.is_dir() {
724        *error_count += 1;
725
726        // suggest setting the path to a likely entrypoint
727        let main_rs = target_path.join("main.rs");
728        let lib_rs = target_path.join("lib.rs");
729
730        let suggested_files_opt = match target_kind {
731            TargetKind::Lib(_) => {
732                if lib_rs.exists() {
733                    Some(format!("`{}`", lib_rs.display()))
734                } else {
735                    None
736                }
737            }
738            TargetKind::Bin => {
739                if main_rs.exists() {
740                    Some(format!("`{}`", main_rs.display()))
741                } else {
742                    None
743                }
744            }
745            TargetKind::Test => {
746                if main_rs.exists() {
747                    Some(format!("`{}`", main_rs.display()))
748                } else {
749                    None
750                }
751            }
752            TargetKind::ExampleBin => {
753                if main_rs.exists() {
754                    Some(format!("`{}`", main_rs.display()))
755                } else {
756                    None
757                }
758            }
759            TargetKind::Bench => {
760                if main_rs.exists() {
761                    Some(format!("`{}`", main_rs.display()))
762                } else {
763                    None
764                }
765            }
766            TargetKind::ExampleLib(_) => {
767                if lib_rs.exists() {
768                    Some(format!("`{}`", lib_rs.display()))
769                } else {
770                    None
771                }
772            }
773            TargetKind::CustomBuild => None,
774        };
775
776        let err_msg = format!(
777            "path `{}` for {} `{}` is a directory, but a source file was expected.",
778            target_path.display(),
779            target_kind.description(),
780            target_name,
781        );
782        let mut group = Group::with_title(Level::ERROR.primary_title(err_msg)).element(
783            Origin::path(unit_manifest_path.to_str().unwrap_or_default()),
784        );
785
786        if let Some(suggested_files) = suggested_files_opt {
787            group = group.element(
788                Level::HELP.message(format!("an entry point exists at {}", suggested_files)),
789            );
790        }
791
792        gctx.shell().print_report(&[group], true)?;
793    }
794
795    Ok(())
796}
797
798/// This is used to rebuild the unit graph, sharing host dependencies if possible,
799/// and applying other unit adjustments based on the whole graph.
800///
801/// This will translate any unit's `CompileKind::Target(host)` to
802/// `CompileKind::Host` if `to_host` is not `None` and the kind is equal to `to_host`.
803/// This also handles generating the unit `dep_hash`, and merging shared units if possible.
804///
805/// This is necessary because if normal dependencies used `CompileKind::Host`,
806/// there would be no way to distinguish those units from build-dependency
807/// units or artifact dependency units.
808/// This can cause a problem if a shared normal/build/artifact dependency needs
809/// to link to another dependency whose features differ based on whether or
810/// not it is a normal, build or artifact dependency. If all units used
811/// `CompileKind::Host`, then they would end up being identical, causing a
812/// collision in the `UnitGraph`, and Cargo would end up randomly choosing one
813/// value or the other.
814///
815/// The solution is to keep normal, build and artifact dependencies separate when
816/// building the unit graph, and then run this second pass which will try to
817/// combine shared dependencies safely. By adding a hash of the dependencies
818/// to the `Unit`, this allows the `CompileKind` to be changed back to `Host`
819/// and `artifact_target_for_features` to be removed without fear of an unwanted
820/// collision for build or artifact dependencies.
821///
822/// This is also responsible for adjusting the `strip` profile option to
823/// opportunistically strip if debug is 0 for all dependencies. This helps
824/// remove debuginfo added by the standard library.
825///
826/// This is also responsible for adjusting the `debug` setting for host
827/// dependencies, turning off debug if the user has not explicitly enabled it,
828/// and the unit is not shared with a target unit.
829///
830/// This is also responsible for adjusting whether each unit should be compiled
831/// or not regarding `--compile-time-deps` flag.
832fn rebuild_unit_graph_shared(
833    interner: &UnitInterner,
834    unit_graph: UnitGraph,
835    roots: &[Unit],
836    scrape_units: &[Unit],
837    to_host: Option<CompileKind>,
838    compile_time_deps_only: bool,
839) -> (Vec<Unit>, Vec<Unit>, UnitGraph) {
840    let mut result = UnitGraph::new();
841    // Map of the old unit to the new unit, used to avoid recursing into units
842    // that have already been computed to improve performance.
843    let mut memo = HashMap::new();
844    let new_roots = roots
845        .iter()
846        .map(|root| {
847            traverse_and_share(
848                interner,
849                &mut memo,
850                &mut result,
851                &unit_graph,
852                root,
853                true,
854                false,
855                to_host,
856                compile_time_deps_only,
857            )
858        })
859        .collect();
860    // If no unit in the unit graph ended up having scrape units attached as dependencies,
861    // then they won't have been discovered in traverse_and_share and hence won't be in
862    // memo. So we filter out missing scrape units.
863    let new_scrape_units = scrape_units
864        .iter()
865        .map(|unit| memo.get(unit).unwrap().clone())
866        .collect();
867    (new_roots, new_scrape_units, result)
868}
869
870/// Recursive function for rebuilding the graph.
871///
872/// This walks `unit_graph`, starting at the given `unit`. It inserts the new
873/// units into `new_graph`, and returns a new updated version of the given
874/// unit (`dep_hash` is filled in, and `kind` switched if necessary).
875fn traverse_and_share(
876    interner: &UnitInterner,
877    memo: &mut HashMap<Unit, Unit>,
878    new_graph: &mut UnitGraph,
879    unit_graph: &UnitGraph,
880    unit: &Unit,
881    unit_is_root: bool,
882    unit_is_for_host: bool,
883    to_host: Option<CompileKind>,
884    compile_time_deps_only: bool,
885) -> Unit {
886    if let Some(new_unit) = memo.get(unit) {
887        // Already computed, no need to recompute.
888        return new_unit.clone();
889    }
890    let mut dep_hash = StableHasher::new();
891    let skip_non_compile_time_deps = compile_time_deps_only
892        && (!unit.target.is_compile_time_dependency() ||
893            // Root unit is not a dependency unless other units are dependant
894            // to it.
895            unit_is_root);
896    let new_deps: Vec<_> = unit_graph[unit]
897        .iter()
898        .map(|dep| {
899            let new_dep_unit = traverse_and_share(
900                interner,
901                memo,
902                new_graph,
903                unit_graph,
904                &dep.unit,
905                false,
906                dep.unit_for.is_for_host(),
907                to_host,
908                // If we should compile the current unit, we should also compile
909                // its dependencies. And if not, we should compile compile time
910                // dependencies only.
911                skip_non_compile_time_deps,
912            );
913            new_dep_unit.hash(&mut dep_hash);
914            UnitDep {
915                unit: new_dep_unit,
916                ..dep.clone()
917            }
918        })
919        .collect();
920    // Here, we have recursively traversed this unit's dependencies, and hashed them: we can
921    // finalize the dep hash.
922    let new_dep_hash = Hasher::finish(&dep_hash);
923
924    // This is the key part of the sharing process: if the unit is a runtime dependency, whose
925    // target is the same as the host, we canonicalize the compile kind to `CompileKind::Host`.
926    // A possible host dependency counterpart to this unit would have that kind, and if such a unit
927    // exists in the current `unit_graph`, they will unify in the new unit graph map `new_graph`.
928    // The resulting unit graph will be optimized with less units, thanks to sharing these host
929    // dependencies.
930    let canonical_kind = match to_host {
931        Some(to_host) if to_host == unit.kind => CompileKind::Host,
932        _ => unit.kind,
933    };
934
935    let mut profile = unit.profile.clone();
936    if profile.strip.is_deferred() {
937        // If strip was not manually set, and all dependencies of this unit together
938        // with this unit have debuginfo turned off, we enable debuginfo stripping.
939        // This will remove pre-existing debug symbols coming from the standard library.
940        if !profile.debuginfo.is_turned_on()
941            && new_deps
942                .iter()
943                .all(|dep| !dep.unit.profile.debuginfo.is_turned_on())
944        {
945            profile.strip = profile.strip.strip_debuginfo();
946        }
947    }
948
949    // If this is a build dependency, and it's not shared with runtime dependencies, we can weaken
950    // its debuginfo level to optimize build times. We do nothing if it's an artifact dependency,
951    // as it and its debuginfo may end up embedded in the main program.
952    if unit_is_for_host
953        && to_host.is_some()
954        && profile.debuginfo.is_deferred()
955        && !unit.artifact.is_true()
956    {
957        // We create a "probe" test to see if a unit with the same explicit debuginfo level exists
958        // in the graph. This is the level we'd expect if it was set manually or the default value
959        // set by a profile for a runtime dependency: its canonical value.
960        let canonical_debuginfo = profile.debuginfo.finalize();
961        let mut canonical_profile = profile.clone();
962        canonical_profile.debuginfo = canonical_debuginfo;
963        let unit_probe = interner.intern(
964            &unit.pkg,
965            &unit.target,
966            canonical_profile,
967            to_host.unwrap(),
968            unit.mode,
969            unit.features.clone(),
970            unit.rustflags.clone(),
971            unit.rustdocflags.clone(),
972            unit.links_overrides.clone(),
973            unit.is_std,
974            unit.dep_hash,
975            unit.artifact,
976            unit.artifact_target_for_features,
977            unit.skip_non_compile_time_dep,
978        );
979
980        // We can now turn the deferred value into its actual final value.
981        profile.debuginfo = if unit_graph.contains_key(&unit_probe) {
982            // The unit is present in both build time and runtime subgraphs: we canonicalize its
983            // level to the other unit's, thus ensuring reuse between the two to optimize build times.
984            canonical_debuginfo
985        } else {
986            // The unit is only present in the build time subgraph, we can weaken its debuginfo
987            // level to optimize build times.
988            canonical_debuginfo.weaken()
989        }
990    }
991
992    let new_unit = interner.intern(
993        &unit.pkg,
994        &unit.target,
995        profile,
996        canonical_kind,
997        unit.mode,
998        unit.features.clone(),
999        unit.rustflags.clone(),
1000        unit.rustdocflags.clone(),
1001        unit.links_overrides.clone(),
1002        unit.is_std,
1003        new_dep_hash,
1004        unit.artifact,
1005        // Since `dep_hash` is now filled in, there's no need to specify the artifact target
1006        // for target-dependent feature resolution
1007        None,
1008        skip_non_compile_time_deps,
1009    );
1010    if !unit_is_root || !compile_time_deps_only {
1011        assert!(memo.insert(unit.clone(), new_unit.clone()).is_none());
1012    }
1013    new_graph.entry(new_unit.clone()).or_insert(new_deps);
1014    new_unit
1015}
1016
1017/// Removes duplicate `CompileMode::Doc` units that would cause problems with
1018/// filename collisions.
1019///
1020/// Rustdoc only separates units by crate name in the file directory
1021/// structure. If any two units with the same crate name exist, this would
1022/// cause a filename collision, causing different rustdoc invocations to stomp
1023/// on one another's files.
1024///
1025/// Unfortunately this does not remove all duplicates, as some of them are
1026/// either user error, or difficult to remove. Cases that I can think of:
1027///
1028/// - Same target name in different packages. See the `collision_doc` test.
1029/// - Different sources. See `collision_doc_sources` test.
1030///
1031/// Ideally this would not be necessary.
1032fn remove_duplicate_doc(
1033    build_config: &BuildConfig,
1034    root_units: &[Unit],
1035    unit_graph: &mut UnitGraph,
1036) {
1037    // First, create a mapping of crate_name -> Unit so we can see where the
1038    // duplicates are.
1039    let mut all_docs: HashMap<String, Vec<Unit>> = HashMap::new();
1040    for unit in unit_graph.keys() {
1041        if unit.mode.is_doc() {
1042            all_docs
1043                .entry(unit.target.crate_name())
1044                .or_default()
1045                .push(unit.clone());
1046        }
1047    }
1048    // Keep track of units to remove so that they can be efficiently removed
1049    // from the unit_deps.
1050    let mut removed_units: HashSet<Unit> = HashSet::new();
1051    let mut remove = |units: Vec<Unit>, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec<Unit> {
1052        let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) = units
1053            .into_iter()
1054            .partition(|unit| cb(unit) && !root_units.contains(unit));
1055        for unit in to_remove {
1056            tracing::debug!(
1057                "removing duplicate doc due to {} for package {} target `{}`",
1058                reason,
1059                unit.pkg,
1060                unit.target.name()
1061            );
1062            unit_graph.remove(&unit);
1063            removed_units.insert(unit);
1064        }
1065        remaining_units
1066    };
1067    // Iterate over the duplicates and try to remove them from unit_graph.
1068    for (_crate_name, mut units) in all_docs {
1069        if units.len() == 1 {
1070            continue;
1071        }
1072        // Prefer target over host if --target was not specified.
1073        if build_config
1074            .requested_kinds
1075            .iter()
1076            .all(CompileKind::is_host)
1077        {
1078            // Note these duplicates may not be real duplicates, since they
1079            // might get merged in rebuild_unit_graph_shared. Either way, it
1080            // shouldn't hurt to remove them early (although the report in the
1081            // log might be confusing).
1082            units = remove(units, "host/target merger", &|unit| unit.kind.is_host());
1083            if units.len() == 1 {
1084                continue;
1085            }
1086        }
1087        // Prefer newer versions over older.
1088        let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec<Unit>> =
1089            HashMap::new();
1090        for unit in units {
1091            let pkg_id = unit.pkg.package_id();
1092            // Note, this does not detect duplicates from different sources.
1093            source_map
1094                .entry((pkg_id.name(), pkg_id.source_id(), unit.kind))
1095                .or_default()
1096                .push(unit);
1097        }
1098        let mut remaining_units = Vec::new();
1099        for (_key, mut units) in source_map {
1100            if units.len() > 1 {
1101                units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
1102                // Remove any entries with version < newest.
1103                let newest_version = units.last().unwrap().pkg.version().clone();
1104                let keep_units = remove(units, "older version", &|unit| {
1105                    unit.pkg.version() < &newest_version
1106                });
1107                remaining_units.extend(keep_units);
1108            } else {
1109                remaining_units.extend(units);
1110            }
1111        }
1112        if remaining_units.len() == 1 {
1113            continue;
1114        }
1115        // Are there other heuristics to remove duplicates that would make
1116        // sense? Maybe prefer path sources over all others?
1117    }
1118    // Also remove units from the unit_deps so there aren't any dangling edges.
1119    for unit_deps in unit_graph.values_mut() {
1120        unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit));
1121    }
1122    // Remove any orphan units that were detached from the graph.
1123    let mut visited = HashSet::new();
1124    fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet<Unit>) {
1125        if !visited.insert(unit.clone()) {
1126            return;
1127        }
1128        for dep in &graph[unit] {
1129            visit(&dep.unit, graph, visited);
1130        }
1131    }
1132    for unit in root_units {
1133        visit(unit, unit_graph, &mut visited);
1134    }
1135    unit_graph.retain(|unit, _| visited.contains(unit));
1136}
1137
1138/// Override crate types for given units.
1139///
1140/// This is primarily used by `cargo rustc --crate-type`.
1141fn override_rustc_crate_types(
1142    units: &mut [Unit],
1143    args: &[String],
1144    interner: &UnitInterner,
1145) -> CargoResult<()> {
1146    if units.len() != 1 {
1147        anyhow::bail!(
1148            "crate types to rustc can only be passed to one \
1149            target, consider filtering\nthe package by passing, \
1150            e.g., `--lib` or `--example` to specify a single target"
1151        );
1152    }
1153
1154    let unit = &units[0];
1155    let override_unit = |f: fn(Vec<CrateType>) -> TargetKind| {
1156        let crate_types = args.iter().map(|s| s.into()).collect();
1157        let mut target = unit.target.clone();
1158        target.set_kind(f(crate_types));
1159        interner.intern(
1160            &unit.pkg,
1161            &target,
1162            unit.profile.clone(),
1163            unit.kind,
1164            unit.mode,
1165            unit.features.clone(),
1166            unit.rustflags.clone(),
1167            unit.rustdocflags.clone(),
1168            unit.links_overrides.clone(),
1169            unit.is_std,
1170            unit.dep_hash,
1171            unit.artifact,
1172            unit.artifact_target_for_features,
1173            unit.skip_non_compile_time_dep,
1174        )
1175    };
1176    units[0] = match unit.target.kind() {
1177        TargetKind::Lib(_) => override_unit(TargetKind::Lib),
1178        TargetKind::ExampleLib(_) => override_unit(TargetKind::ExampleLib),
1179        _ => {
1180            anyhow::bail!(
1181                "crate types can only be specified for libraries and example libraries.\n\
1182                Binaries, tests, and benchmarks are always the `bin` crate type"
1183            );
1184        }
1185    };
1186
1187    Ok(())
1188}
1189
1190/// Gets all of the features enabled for a package, plus its dependencies'
1191/// features.
1192///
1193/// Dependencies are added as `dep_name/feat_name` because `required-features`
1194/// wants to support that syntax.
1195pub fn resolve_all_features(
1196    resolve_with_overrides: &Resolve,
1197    resolved_features: &features::ResolvedFeatures,
1198    package_set: &PackageSet<'_>,
1199    package_id: PackageId,
1200    has_dev_units: HasDevUnits,
1201    requested_kinds: &[CompileKind],
1202    target_data: &RustcTargetData<'_>,
1203    force_all_targets: ForceAllTargets,
1204) -> HashSet<String> {
1205    let mut features: HashSet<String> = resolved_features
1206        .activated_features(package_id, FeaturesFor::NormalOrDev)
1207        .iter()
1208        .map(|s| s.to_string())
1209        .collect();
1210
1211    // Include features enabled for use by dependencies so targets can also use them with the
1212    // required-features field when deciding whether to be built or skipped.
1213    let filtered_deps = PackageSet::filter_deps(
1214        package_id,
1215        resolve_with_overrides,
1216        has_dev_units,
1217        requested_kinds,
1218        target_data,
1219        force_all_targets,
1220    );
1221    for (dep_id, deps) in filtered_deps {
1222        let is_proc_macro = package_set
1223            .get_one(dep_id)
1224            .expect("packages downloaded")
1225            .proc_macro();
1226        for dep in deps {
1227            let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build());
1228            for feature in resolved_features
1229                .activated_features_unverified(dep_id, features_for)
1230                .unwrap_or_default()
1231            {
1232                features.insert(format!("{}/{}", dep.name_in_toml(), feature));
1233            }
1234        }
1235    }
1236
1237    features
1238}