Skip to main content

cargo/ops/cargo_compile/
mod.rs

1//! The entry point for starting the compilation process for commands like
2//! `build`, `test`, `doc`, `rustc`, etc.
3//!
4//! The [`compile`] function will do all the work to compile a workspace. A
5//! rough outline is:
6//!
7//! 1. Resolve the dependency graph (see [`ops::resolve`]).
8//! 2. Download any packages needed (see [`PackageSet`]).
9//! 3. Generate a list of top-level "units" of work for the targets the user
10//!   requested on the command-line. Each [`Unit`] corresponds to a compiler
11//!   invocation. This is done in this module ([`UnitGenerator::generate_root_units`]).
12//! 4. Starting from the root [`Unit`]s, generate the [`UnitGraph`] by walking the dependency graph
13//!   from the resolver.  See also [`unit_dependencies`].
14//! 5. Construct the [`BuildContext`] with all of the information collected so
15//!   far. This is the end of the "front end" of compilation.
16//! 6. Create a [`BuildRunner`] which coordinates the compilation process
17//!   and will perform the following steps:
18//!     1. Prepare the `target` directory (see [`Layout`]).
19//!     2. Create a [`JobQueue`]. The queue checks the
20//!       fingerprint of each `Unit` to determine if it should run or be
21//!       skipped.
22//!     3. Execute the queue via [`drain_the_queue`]. Each leaf in the queue's dependency graph is
23//!        executed, and then removed from the graph when finished. This repeats until the queue is
24//!        empty.  Note that this is the only point in cargo that currently uses threads.
25//! 7. The result of the compilation is stored in the [`Compilation`] struct. This can be used for
26//!    various things, such as running tests after the compilation  has finished.
27//!
28//! **Note**: "target" inside this module generally refers to ["Cargo Target"],
29//! which corresponds to artifact that will be built in a package. Not to be
30//! confused with target-triple or target architecture.
31//!
32//! [`unit_dependencies`]: crate::core::compiler::unit_dependencies
33//! [`Layout`]: crate::core::compiler::Layout
34//! [`JobQueue`]: crate::core::compiler::job_queue
35//! [`drain_the_queue`]: crate::core::compiler::job_queue
36//! ["Cargo Target"]: https://doc.rust-lang.org/nightly/cargo/reference/cargo-targets.html
37
38use std::collections::{HashMap, HashSet};
39use std::hash::{Hash, Hasher};
40use std::sync::Arc;
41
42use crate::core::compiler::UnitIndex;
43use crate::core::compiler::UserIntent;
44use crate::core::compiler::unit_dependencies::build_unit_dependencies;
45use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph};
46use crate::core::compiler::{BuildConfig, BuildContext, BuildRunner, Compilation};
47use crate::core::compiler::{CompileKind, CompileTarget, RustcTargetData, Unit};
48use crate::core::compiler::{CrateType, TargetInfo, apply_env_config, standard_lib};
49use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
50use crate::core::profiles::Profiles;
51use crate::core::resolver::features::{self, CliFeatures, FeaturesFor};
52use crate::core::resolver::{ForceAllTargets, HasDevUnits, Resolve};
53use crate::core::{PackageId, PackageSet, SourceId, TargetKind, Workspace};
54use crate::drop_println;
55use crate::ops;
56use crate::ops::resolve::{SpecsAndResolvedFeatures, WorkspaceResolve};
57use crate::util::BuildLogger;
58use crate::util::context::{GlobalContext, WarningHandling};
59use crate::util::interning::InternedString;
60use crate::util::log_message::LogMessage;
61use crate::util::{CargoResult, StableHasher};
62
63mod compile_filter;
64use annotate_snippets::{Group, Level, Origin};
65pub use compile_filter::{CompileFilter, FilterRule, LibRule};
66
67pub(super) mod unit_generator;
68use itertools::Itertools as _;
69use unit_generator::UnitGenerator;
70
71mod packages;
72
73pub use packages::Packages;
74
75/// Contains information about how a package should be compiled.
76///
77/// Note on distinction between `CompileOptions` and [`BuildConfig`]:
78/// `BuildConfig` contains values that need to be retained after
79/// [`BuildContext`] is created. The other fields are no longer necessary. Think
80/// of it as `CompileOptions` are high-level settings requested on the
81/// command-line, and `BuildConfig` are low-level settings for actually
82/// driving `rustc`.
83#[derive(Debug, Clone)]
84pub struct CompileOptions {
85    /// Configuration information for a rustc build
86    pub build_config: BuildConfig,
87    /// Feature flags requested by the user.
88    pub cli_features: CliFeatures,
89    /// A set of packages to build.
90    pub spec: Packages,
91    /// Filter to apply to the root package to select which targets will be
92    /// built.
93    pub filter: CompileFilter,
94    /// Extra arguments to be passed to rustdoc (single target only)
95    pub target_rustdoc_args: Option<Vec<String>>,
96    /// The specified target will be compiled with all the available arguments,
97    /// note that this only accounts for the *final* invocation of rustc
98    pub target_rustc_args: Option<Vec<String>>,
99    /// Crate types to be passed to rustc (single target only)
100    pub target_rustc_crate_types: Option<Vec<String>>,
101    /// Whether the `--document-private-items` flags was specified and should
102    /// be forwarded to `rustdoc`.
103    pub rustdoc_document_private_items: bool,
104    /// Whether the build process should check the minimum Rust version
105    /// defined in the cargo metadata for a crate.
106    pub honor_rust_version: Option<bool>,
107}
108
109impl CompileOptions {
110    pub fn new(gctx: &GlobalContext, intent: UserIntent) -> CargoResult<CompileOptions> {
111        let jobs = None;
112        let keep_going = false;
113        Ok(CompileOptions {
114            build_config: BuildConfig::new(gctx, jobs, keep_going, &[], intent)?,
115            cli_features: CliFeatures::new_all(false),
116            spec: ops::Packages::Packages(Vec::new()),
117            filter: CompileFilter::Default {
118                required_features_filterable: false,
119            },
120            target_rustdoc_args: None,
121            target_rustc_args: None,
122            target_rustc_crate_types: None,
123            rustdoc_document_private_items: false,
124            honor_rust_version: None,
125        })
126    }
127}
128
129/// Compiles!
130///
131/// This uses the [`DefaultExecutor`]. To use a custom [`Executor`], see [`compile_with_exec`].
132pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>> {
133    let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
134    compile_with_exec(ws, options, &exec)
135}
136
137/// Like [`compile`] but allows specifying a custom [`Executor`]
138/// that will be able to intercept build calls and add custom logic.
139///
140/// [`compile`] uses [`DefaultExecutor`] which just passes calls through.
141pub fn compile_with_exec<'a>(
142    ws: &Workspace<'a>,
143    options: &CompileOptions,
144    exec: &Arc<dyn Executor>,
145) -> CargoResult<Compilation<'a>> {
146    ws.emit_warnings()?;
147    let compilation = compile_ws(ws, options, exec)?;
148    if ws.gctx().warning_handling()? == WarningHandling::Deny && compilation.lint_warning_count > 0
149    {
150        anyhow::bail!("warnings are denied by `build.warnings` configuration")
151    }
152    Ok(compilation)
153}
154
155/// Like [`compile_with_exec`] but without warnings from manifest parsing.
156#[tracing::instrument(skip_all)]
157pub fn compile_ws<'a>(
158    ws: &Workspace<'a>,
159    options: &CompileOptions,
160    exec: &Arc<dyn Executor>,
161) -> CargoResult<Compilation<'a>> {
162    let interner = UnitInterner::new();
163    let logger = BuildLogger::maybe_new(ws, &options.build_config)?;
164
165    if let Some(ref logger) = logger {
166        let rustc = ws.gctx().load_global_rustc(Some(ws))?;
167        let num_cpus = std::thread::available_parallelism()
168            .ok()
169            .map(|x| x.get() as u64);
170        logger.log(LogMessage::BuildStarted {
171            command: std::env::args_os()
172                .map(|arg| arg.to_string_lossy().into_owned())
173                .collect(),
174            cwd: ws.gctx().cwd().to_path_buf(),
175            host: rustc.host.to_string(),
176            jobs: options.build_config.jobs,
177            num_cpus,
178            profile: options.build_config.requested_profile.to_string(),
179            rustc_version: rustc.version.to_string(),
180            rustc_version_verbose: rustc.verbose_version.clone(),
181            target_dir: ws.target_dir().as_path_unlocked().to_path_buf(),
182            workspace_root: ws.root().to_path_buf(),
183        });
184    }
185
186    let bcx = create_bcx(ws, options, &interner, logger.as_ref())?;
187
188    if options.build_config.unit_graph {
189        unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.gctx())?;
190        return Compilation::new(&bcx);
191    }
192    crate::core::gc::auto_gc(bcx.gctx);
193    let build_runner = BuildRunner::new(&bcx)?;
194    if options.build_config.dry_run {
195        build_runner.dry_run()
196    } else {
197        build_runner.compile(exec)
198    }
199}
200
201/// Executes `rustc --print <VALUE>`.
202///
203/// * `print_opt_value` is the VALUE passed through.
204pub fn print<'a>(
205    ws: &Workspace<'a>,
206    options: &CompileOptions,
207    print_opt_value: &str,
208) -> CargoResult<()> {
209    let CompileOptions {
210        ref build_config,
211        ref target_rustc_args,
212        ..
213    } = *options;
214    let gctx = ws.gctx();
215    let rustc = gctx.load_global_rustc(Some(ws))?;
216    for (index, kind) in build_config.requested_kinds.iter().enumerate() {
217        if index != 0 {
218            drop_println!(gctx);
219        }
220        let target_info = TargetInfo::new(gctx, &build_config.requested_kinds, &rustc, *kind)?;
221        let mut process = rustc.process();
222        apply_env_config(gctx, &mut process)?;
223        process.args(&target_info.rustflags);
224        if let Some(args) = target_rustc_args {
225            process.args(args);
226        }
227        kind.add_target_arg(&mut process);
228        process.arg("--print").arg(print_opt_value);
229        process.exec()?;
230    }
231    Ok(())
232}
233
234/// Prepares all required information for the actual compilation.
235///
236/// For how it works and what data it collects,
237/// please see the [module-level documentation](self).
238#[tracing::instrument(skip_all)]
239pub fn create_bcx<'a, 'gctx>(
240    ws: &'a Workspace<'gctx>,
241    options: &'a CompileOptions,
242    interner: &'a UnitInterner,
243    logger: Option<&'a BuildLogger>,
244) -> CargoResult<BuildContext<'a, 'gctx>> {
245    let CompileOptions {
246        ref build_config,
247        ref spec,
248        ref cli_features,
249        ref filter,
250        ref target_rustdoc_args,
251        ref target_rustc_args,
252        ref target_rustc_crate_types,
253        rustdoc_document_private_items,
254        honor_rust_version,
255    } = *options;
256    let gctx = ws.gctx();
257
258    // Perform some pre-flight validation.
259    match build_config.intent {
260        UserIntent::Test | UserIntent::Build | UserIntent::Check { .. } | UserIntent::Bench => {
261            if ws.gctx().get_env("RUST_FLAGS").is_ok() {
262                gctx.shell().print_report(
263                    &[Level::WARNING
264                        .secondary_title("ignoring environment variable `RUST_FLAGS`")
265                        .element(Level::HELP.message("rust flags are passed via `RUSTFLAGS`"))],
266                    false,
267                )?;
268            }
269        }
270        UserIntent::Doc { .. } | UserIntent::Doctest => {
271            if ws.gctx().get_env("RUSTDOC_FLAGS").is_ok() {
272                gctx.shell().print_report(
273                    &[Level::WARNING
274                        .secondary_title("ignoring environment variable `RUSTDOC_FLAGS`")
275                        .element(
276                            Level::HELP.message("rustdoc flags are passed via `RUSTDOCFLAGS`"),
277                        )],
278                    false,
279                )?;
280            }
281        }
282    }
283    gctx.validate_term_config()?;
284
285    let mut target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?;
286
287    let specs = spec.to_package_id_specs(ws)?;
288    let has_dev_units = {
289        // Rustdoc itself doesn't need dev-dependencies. But to scrape examples from packages in the
290        // workspace, if any of those packages need dev-dependencies, then we need include dev-dependencies
291        // to scrape those packages.
292        let any_pkg_has_scrape_enabled = ws
293            .members_with_features(&specs, cli_features)?
294            .iter()
295            .any(|(pkg, _)| {
296                pkg.targets()
297                    .iter()
298                    .any(|target| target.is_example() && target.doc_scrape_examples().is_enabled())
299            });
300
301        if filter.need_dev_deps(build_config.intent)
302            || (build_config.intent.is_doc() && any_pkg_has_scrape_enabled)
303        {
304            HasDevUnits::Yes
305        } else {
306            HasDevUnits::No
307        }
308    };
309    let dry_run = false;
310
311    if let Some(logger) = logger {
312        let elapsed = ws.gctx().creation_time().elapsed().as_secs_f64();
313        logger.log(LogMessage::ResolutionStarted { elapsed });
314    }
315
316    let resolve = ops::resolve_ws_with_opts(
317        ws,
318        &mut target_data,
319        &build_config.requested_kinds,
320        cli_features,
321        &specs,
322        has_dev_units,
323        ForceAllTargets::No,
324        dry_run,
325    )?;
326    let WorkspaceResolve {
327        mut pkg_set,
328        workspace_resolve,
329        targeted_resolve: resolve,
330        specs_and_features,
331    } = resolve;
332
333    if let Some(logger) = logger {
334        let elapsed = ws.gctx().creation_time().elapsed().as_secs_f64();
335        logger.log(LogMessage::ResolutionFinished { elapsed });
336    }
337
338    let std_resolve_features = if let Some(crates) = &gctx.cli_unstable().build_std {
339        let (std_package_set, std_resolve, std_features) = standard_lib::resolve_std(
340            ws,
341            &mut target_data,
342            &build_config,
343            crates,
344            &build_config.requested_kinds,
345        )?;
346        pkg_set.add_set(std_package_set);
347        Some((std_resolve, std_features))
348    } else {
349        None
350    };
351
352    // Find the packages in the resolver that the user wants to build (those
353    // passed in with `-p` or the defaults from the workspace), and convert
354    // Vec<PackageIdSpec> to a Vec<PackageId>.
355    let to_build_ids = resolve.specs_to_ids(&specs)?;
356    // Now get the `Package` for each `PackageId`. This may trigger a download
357    // if the user specified `-p` for a dependency that is not downloaded.
358    // Dependencies will be downloaded during build_unit_dependencies.
359    let mut to_builds = pkg_set.get_many(to_build_ids)?;
360
361    // The ordering here affects some error messages coming out of cargo, so
362    // let's be test and CLI friendly by always printing in the same order if
363    // there's an error.
364    to_builds.sort_by_key(|p| p.package_id());
365
366    for pkg in to_builds.iter() {
367        pkg.manifest().print_teapot(gctx);
368
369        if build_config.intent.is_any_test()
370            && !ws.is_member(pkg)
371            && pkg.dependencies().iter().any(|dep| !dep.is_transitive())
372        {
373            anyhow::bail!(
374                "package `{}` cannot be tested because it requires dev-dependencies \
375                 and is not a member of the workspace",
376                pkg.name()
377            );
378        }
379    }
380
381    let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
382        (Some(args), _) => (Some(args.clone()), "rustc"),
383        (_, Some(args)) => (Some(args.clone()), "rustdoc"),
384        _ => (None, ""),
385    };
386
387    if extra_args.is_some() && to_builds.len() != 1 {
388        panic!(
389            "`{}` should not accept multiple `-p` flags",
390            extra_args_name
391        );
392    }
393
394    let profiles = Profiles::new(ws, build_config.requested_profile)?;
395    profiles.validate_packages(
396        ws.profiles(),
397        &mut gctx.shell(),
398        workspace_resolve.as_ref().unwrap_or(&resolve),
399    )?;
400
401    // If `--target` has not been specified, then the unit graph is built
402    // assuming `--target $HOST` was specified. See
403    // `rebuild_unit_graph_shared` for more on why this is done.
404    let explicit_host_kind = CompileKind::Target(CompileTarget::new(
405        &target_data.rustc.host,
406        gctx.cli_unstable().json_target_spec,
407    )?);
408    let explicit_host_kinds: Vec<_> = build_config
409        .requested_kinds
410        .iter()
411        .map(|kind| match kind {
412            CompileKind::Host => explicit_host_kind,
413            CompileKind::Target(t) => CompileKind::Target(*t),
414        })
415        .collect();
416
417    let mut root_units = Vec::new();
418    let mut unit_graph = HashMap::new();
419    let mut scrape_units = Vec::new();
420
421    if let Some(logger) = logger {
422        let elapsed = ws.gctx().creation_time().elapsed().as_secs_f64();
423        logger.log(LogMessage::UnitGraphStarted { elapsed });
424    }
425
426    for SpecsAndResolvedFeatures {
427        specs,
428        resolved_features,
429    } in &specs_and_features
430    {
431        // Passing `build_config.requested_kinds` instead of
432        // `explicit_host_kinds` here so that `generate_root_units` can do
433        // its own special handling of `CompileKind::Host`. It will
434        // internally replace the host kind by the `explicit_host_kind`
435        // before setting as a unit.
436        let spec_names = specs.iter().map(|spec| spec.name()).collect::<Vec<_>>();
437        let packages = to_builds
438            .iter()
439            .filter(|package| spec_names.contains(&package.name().as_str()))
440            .cloned()
441            .collect::<Vec<_>>();
442        let generator = UnitGenerator {
443            ws,
444            packages: &packages,
445            spec,
446            target_data: &target_data,
447            filter,
448            requested_kinds: &build_config.requested_kinds,
449            explicit_host_kind,
450            intent: build_config.intent,
451            resolve: &resolve,
452            workspace_resolve: &workspace_resolve,
453            resolved_features: &resolved_features,
454            package_set: &pkg_set,
455            profiles: &profiles,
456            interner,
457            has_dev_units,
458        };
459        let mut targeted_root_units = generator.generate_root_units()?;
460
461        if let Some(args) = target_rustc_crate_types {
462            override_rustc_crate_types(&mut targeted_root_units, args, interner)?;
463        }
464
465        let should_scrape =
466            build_config.intent.is_doc() && gctx.cli_unstable().rustdoc_scrape_examples;
467        let targeted_scrape_units = if should_scrape {
468            generator.generate_scrape_units(&targeted_root_units)?
469        } else {
470            Vec::new()
471        };
472
473        let std_roots = if let Some(crates) = gctx.cli_unstable().build_std.as_ref() {
474            let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
475            standard_lib::generate_std_roots(
476                &crates,
477                &targeted_root_units,
478                std_resolve,
479                std_features,
480                &explicit_host_kinds,
481                &pkg_set,
482                interner,
483                &profiles,
484                &target_data,
485            )?
486        } else {
487            Default::default()
488        };
489
490        unit_graph.extend(build_unit_dependencies(
491            ws,
492            &pkg_set,
493            &resolve,
494            &resolved_features,
495            std_resolve_features.as_ref(),
496            &targeted_root_units,
497            &targeted_scrape_units,
498            &std_roots,
499            build_config.intent,
500            &target_data,
501            &profiles,
502            interner,
503        )?);
504        root_units.extend(targeted_root_units);
505        scrape_units.extend(targeted_scrape_units);
506    }
507
508    // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain
509    // what heuristics to use in that case.
510    if build_config.intent.wants_deps_docs() {
511        remove_duplicate_doc(build_config, &root_units, &mut unit_graph);
512    }
513
514    let host_kind_requested = build_config
515        .requested_kinds
516        .iter()
517        .any(CompileKind::is_host);
518    // Rebuild the unit graph, replacing the explicit host targets with
519    // CompileKind::Host, removing `artifact_target_for_features` and merging any dependencies
520    // shared with build and artifact dependencies.
521    //
522    // NOTE: after this point, all units and the unit graph must be immutable.
523    let (root_units, scrape_units, unit_graph) = rebuild_unit_graph_shared(
524        interner,
525        unit_graph,
526        &root_units,
527        &scrape_units,
528        host_kind_requested.then_some(explicit_host_kind),
529        build_config.compile_time_deps_only,
530    );
531
532    let units: Vec<_> = unit_graph.keys().sorted().collect();
533    let unit_to_index: HashMap<_, _> = units
534        .iter()
535        .enumerate()
536        .map(|(i, &unit)| (unit.clone(), UnitIndex(i as u64)))
537        .collect();
538
539    if let Some(logger) = logger {
540        let root_unit_indexes: HashSet<_> =
541            root_units.iter().map(|unit| unit_to_index[&unit]).collect();
542
543        for (index, unit) in units.into_iter().enumerate() {
544            let index = UnitIndex(index as u64);
545            let dependencies = unit_graph
546                .get(unit)
547                .map(|deps| {
548                    deps.iter()
549                        .filter_map(|dep| unit_to_index.get(&dep.unit).copied())
550                        .collect()
551                })
552                .unwrap_or_default();
553            logger.log(LogMessage::UnitRegistered {
554                package_id: unit.pkg.package_id().to_spec(),
555                target: (&unit.target).into(),
556                mode: unit.mode,
557                platform: target_data.short_name(&unit.kind).to_owned(),
558                index,
559                features: unit
560                    .features
561                    .iter()
562                    .map(|s| s.as_str().to_owned())
563                    .collect(),
564                requested: root_unit_indexes.contains(&index),
565                dependencies,
566            });
567        }
568        let elapsed = ws.gctx().creation_time().elapsed().as_secs_f64();
569        logger.log(LogMessage::UnitGraphFinished { elapsed });
570    }
571
572    let mut extra_compiler_args = HashMap::new();
573    if let Some(args) = extra_args {
574        if root_units.len() != 1 {
575            anyhow::bail!(
576                "extra arguments to `{}` can only be passed to one \
577                 target, consider filtering\nthe package by passing, \
578                 e.g., `--lib` or `--bin NAME` to specify a single target",
579                extra_args_name
580            );
581        }
582        extra_compiler_args.insert(root_units[0].clone(), args);
583    }
584
585    for unit in root_units
586        .iter()
587        .filter(|unit| unit.mode.is_doc() || unit.mode.is_doc_test())
588        .filter(|unit| rustdoc_document_private_items || unit.target.is_bin())
589    {
590        // Add `--document-private-items` rustdoc flag if requested or if
591        // the target is a binary. Binary crates get their private items
592        // documented by default.
593        let mut args = vec!["--document-private-items".into()];
594        if unit.target.is_bin() {
595            // This warning only makes sense if it's possible to document private items
596            // sometimes and ignore them at other times. But cargo consistently passes
597            // `--document-private-items`, so the warning isn't useful.
598            args.push("-Arustdoc::private-intra-doc-links".into());
599        }
600        extra_compiler_args
601            .entry(unit.clone())
602            .or_default()
603            .extend(args);
604    }
605
606    // Validate target src path for each root unit
607    let mut error_count: usize = 0;
608    for unit in &root_units {
609        if let Some(target_src_path) = unit.target.src_path().path() {
610            validate_target_path_as_source_file(
611                gctx,
612                target_src_path,
613                unit.target.name(),
614                unit.target.kind(),
615                unit.pkg.manifest_path(),
616                &mut error_count,
617            )?
618        }
619    }
620    if error_count > 0 {
621        let plural: &str = if error_count > 1 { "s" } else { "" };
622        anyhow::bail!(
623            "could not compile due to {error_count} previous target resolution error{plural}"
624        );
625    }
626
627    if honor_rust_version.unwrap_or(true) {
628        let rustc_version = target_data.rustc.version.clone().into();
629
630        let mut incompatible = Vec::new();
631        let mut local_incompatible = false;
632        for unit in unit_graph.keys() {
633            let Some(pkg_msrv) = unit.pkg.rust_version() else {
634                continue;
635            };
636
637            if pkg_msrv.is_compatible_with(&rustc_version) {
638                continue;
639            }
640
641            local_incompatible |= unit.is_local();
642            incompatible.push((unit, pkg_msrv));
643        }
644        if !incompatible.is_empty() {
645            use std::fmt::Write as _;
646
647            let plural = if incompatible.len() == 1 { "" } else { "s" };
648            let mut message = format!(
649                "rustc {rustc_version} is not supported by the following package{plural}:\n"
650            );
651            incompatible.sort_by_key(|(unit, _)| (unit.pkg.name(), unit.pkg.version()));
652            for (unit, msrv) in incompatible {
653                let name = &unit.pkg.name();
654                let version = &unit.pkg.version();
655                writeln!(&mut message, "  {name}@{version} requires rustc {msrv}").unwrap();
656            }
657            if ws.is_ephemeral() {
658                if ws.ignore_lock() {
659                    writeln!(
660                        &mut message,
661                        "Try re-running `cargo install` with `--locked`"
662                    )
663                    .unwrap();
664                }
665            } else if !local_incompatible {
666                writeln!(
667                    &mut message,
668                    "Either upgrade rustc or select compatible dependency versions with
669`cargo update <name>@<current-ver> --precise <compatible-ver>`
670where `<compatible-ver>` is the latest version supporting rustc {rustc_version}",
671                )
672                .unwrap();
673            }
674            return Err(anyhow::Error::msg(message));
675        }
676    }
677
678    let bcx = BuildContext::new(
679        ws,
680        logger,
681        pkg_set,
682        build_config,
683        profiles,
684        extra_compiler_args,
685        target_data,
686        root_units,
687        unit_graph,
688        unit_to_index,
689        scrape_units,
690    )?;
691
692    Ok(bcx)
693}
694
695// Checks if a target path exists and is a source file, not a directory
696fn validate_target_path_as_source_file(
697    gctx: &GlobalContext,
698    target_path: &std::path::Path,
699    target_name: &str,
700    target_kind: &TargetKind,
701    unit_manifest_path: &std::path::Path,
702    error_count: &mut usize,
703) -> CargoResult<()> {
704    if !target_path.exists() {
705        *error_count += 1;
706
707        let err_msg = format!(
708            "can't find {} `{}` at path `{}`",
709            target_kind.description(),
710            target_name,
711            target_path.display()
712        );
713
714        let group = Group::with_title(Level::ERROR.primary_title(err_msg)).element(Origin::path(
715            unit_manifest_path.to_str().unwrap_or_default(),
716        ));
717
718        gctx.shell().print_report(&[group], true)?;
719    } else if target_path.is_dir() {
720        *error_count += 1;
721
722        // suggest setting the path to a likely entrypoint
723        let main_rs = target_path.join("main.rs");
724        let lib_rs = target_path.join("lib.rs");
725
726        let suggested_files_opt = match target_kind {
727            TargetKind::Lib(_) => {
728                if lib_rs.exists() {
729                    Some(format!("`{}`", lib_rs.display()))
730                } else {
731                    None
732                }
733            }
734            TargetKind::Bin => {
735                if main_rs.exists() {
736                    Some(format!("`{}`", main_rs.display()))
737                } else {
738                    None
739                }
740            }
741            TargetKind::Test => {
742                if main_rs.exists() {
743                    Some(format!("`{}`", main_rs.display()))
744                } else {
745                    None
746                }
747            }
748            TargetKind::ExampleBin => {
749                if main_rs.exists() {
750                    Some(format!("`{}`", main_rs.display()))
751                } else {
752                    None
753                }
754            }
755            TargetKind::Bench => {
756                if main_rs.exists() {
757                    Some(format!("`{}`", main_rs.display()))
758                } else {
759                    None
760                }
761            }
762            TargetKind::ExampleLib(_) => {
763                if lib_rs.exists() {
764                    Some(format!("`{}`", lib_rs.display()))
765                } else {
766                    None
767                }
768            }
769            TargetKind::CustomBuild => None,
770        };
771
772        let err_msg = format!(
773            "path `{}` for {} `{}` is a directory, but a source file was expected.",
774            target_path.display(),
775            target_kind.description(),
776            target_name,
777        );
778        let mut group = Group::with_title(Level::ERROR.primary_title(err_msg)).element(
779            Origin::path(unit_manifest_path.to_str().unwrap_or_default()),
780        );
781
782        if let Some(suggested_files) = suggested_files_opt {
783            group = group.element(
784                Level::HELP.message(format!("an entry point exists at {}", suggested_files)),
785            );
786        }
787
788        gctx.shell().print_report(&[group], true)?;
789    }
790
791    Ok(())
792}
793
794/// This is used to rebuild the unit graph, sharing host dependencies if possible,
795/// and applying other unit adjustments based on the whole graph.
796///
797/// This will translate any unit's `CompileKind::Target(host)` to
798/// `CompileKind::Host` if `to_host` is not `None` and the kind is equal to `to_host`.
799/// This also handles generating the unit `dep_hash`, and merging shared units if possible.
800///
801/// This is necessary because if normal dependencies used `CompileKind::Host`,
802/// there would be no way to distinguish those units from build-dependency
803/// units or artifact dependency units.
804/// This can cause a problem if a shared normal/build/artifact dependency needs
805/// to link to another dependency whose features differ based on whether or
806/// not it is a normal, build or artifact dependency. If all units used
807/// `CompileKind::Host`, then they would end up being identical, causing a
808/// collision in the `UnitGraph`, and Cargo would end up randomly choosing one
809/// value or the other.
810///
811/// The solution is to keep normal, build and artifact dependencies separate when
812/// building the unit graph, and then run this second pass which will try to
813/// combine shared dependencies safely. By adding a hash of the dependencies
814/// to the `Unit`, this allows the `CompileKind` to be changed back to `Host`
815/// and `artifact_target_for_features` to be removed without fear of an unwanted
816/// collision for build or artifact dependencies.
817///
818/// This is also responsible for adjusting the `strip` profile option to
819/// opportunistically strip if debug is 0 for all dependencies. This helps
820/// remove debuginfo added by the standard library.
821///
822/// This is also responsible for adjusting the `debug` setting for host
823/// dependencies, turning off debug if the user has not explicitly enabled it,
824/// and the unit is not shared with a target unit.
825///
826/// This is also responsible for adjusting whether each unit should be compiled
827/// or not regarding `--compile-time-deps` flag.
828fn rebuild_unit_graph_shared(
829    interner: &UnitInterner,
830    unit_graph: UnitGraph,
831    roots: &[Unit],
832    scrape_units: &[Unit],
833    to_host: Option<CompileKind>,
834    compile_time_deps_only: bool,
835) -> (Vec<Unit>, Vec<Unit>, UnitGraph) {
836    let mut result = UnitGraph::new();
837    // Map of the old unit to the new unit, used to avoid recursing into units
838    // that have already been computed to improve performance.
839    let mut memo = HashMap::new();
840    let new_roots = roots
841        .iter()
842        .map(|root| {
843            traverse_and_share(
844                interner,
845                &mut memo,
846                &mut result,
847                &unit_graph,
848                root,
849                true,
850                false,
851                to_host,
852                compile_time_deps_only,
853            )
854        })
855        .collect();
856    // If no unit in the unit graph ended up having scrape units attached as dependencies,
857    // then they won't have been discovered in traverse_and_share and hence won't be in
858    // memo. So we filter out missing scrape units.
859    let new_scrape_units = scrape_units
860        .iter()
861        .map(|unit| memo.get(unit).unwrap().clone())
862        .collect();
863    (new_roots, new_scrape_units, result)
864}
865
866/// Recursive function for rebuilding the graph.
867///
868/// This walks `unit_graph`, starting at the given `unit`. It inserts the new
869/// units into `new_graph`, and returns a new updated version of the given
870/// unit (`dep_hash` is filled in, and `kind` switched if necessary).
871fn traverse_and_share(
872    interner: &UnitInterner,
873    memo: &mut HashMap<Unit, Unit>,
874    new_graph: &mut UnitGraph,
875    unit_graph: &UnitGraph,
876    unit: &Unit,
877    unit_is_root: bool,
878    unit_is_for_host: bool,
879    to_host: Option<CompileKind>,
880    compile_time_deps_only: bool,
881) -> Unit {
882    if let Some(new_unit) = memo.get(unit) {
883        // Already computed, no need to recompute.
884        return new_unit.clone();
885    }
886    let mut dep_hash = StableHasher::new();
887    let skip_non_compile_time_deps = compile_time_deps_only
888        && (!unit.target.is_compile_time_dependency() ||
889            // Root unit is not a dependency unless other units are dependant
890            // to it.
891            unit_is_root);
892    let new_deps: Vec<_> = unit_graph[unit]
893        .iter()
894        .map(|dep| {
895            let new_dep_unit = traverse_and_share(
896                interner,
897                memo,
898                new_graph,
899                unit_graph,
900                &dep.unit,
901                false,
902                dep.unit_for.is_for_host(),
903                to_host,
904                // If we should compile the current unit, we should also compile
905                // its dependencies. And if not, we should compile compile time
906                // dependencies only.
907                skip_non_compile_time_deps,
908            );
909            new_dep_unit.hash(&mut dep_hash);
910            UnitDep {
911                unit: new_dep_unit,
912                ..dep.clone()
913            }
914        })
915        .collect();
916    // Here, we have recursively traversed this unit's dependencies, and hashed them: we can
917    // finalize the dep hash.
918    let new_dep_hash = Hasher::finish(&dep_hash);
919
920    // This is the key part of the sharing process: if the unit is a runtime dependency, whose
921    // target is the same as the host, we canonicalize the compile kind to `CompileKind::Host`.
922    // A possible host dependency counterpart to this unit would have that kind, and if such a unit
923    // exists in the current `unit_graph`, they will unify in the new unit graph map `new_graph`.
924    // The resulting unit graph will be optimized with less units, thanks to sharing these host
925    // dependencies.
926    let canonical_kind = match to_host {
927        Some(to_host) if to_host == unit.kind => CompileKind::Host,
928        _ => unit.kind,
929    };
930
931    let mut profile = unit.profile.clone();
932    if profile.strip.is_deferred() {
933        // If strip was not manually set, and all dependencies of this unit together
934        // with this unit have debuginfo turned off, we enable debuginfo stripping.
935        // This will remove pre-existing debug symbols coming from the standard library.
936        if !profile.debuginfo.is_turned_on()
937            && new_deps
938                .iter()
939                .all(|dep| !dep.unit.profile.debuginfo.is_turned_on())
940        {
941            profile.strip = profile.strip.strip_debuginfo();
942        }
943    }
944
945    // If this is a build dependency, and it's not shared with runtime dependencies, we can weaken
946    // its debuginfo level to optimize build times. We do nothing if it's an artifact dependency,
947    // as it and its debuginfo may end up embedded in the main program.
948    if unit_is_for_host
949        && to_host.is_some()
950        && profile.debuginfo.is_deferred()
951        && !unit.artifact.is_true()
952    {
953        // We create a "probe" test to see if a unit with the same explicit debuginfo level exists
954        // in the graph. This is the level we'd expect if it was set manually or the default value
955        // set by a profile for a runtime dependency: its canonical value.
956        let canonical_debuginfo = profile.debuginfo.finalize();
957        let mut canonical_profile = profile.clone();
958        canonical_profile.debuginfo = canonical_debuginfo;
959        let unit_probe = interner.intern(
960            &unit.pkg,
961            &unit.target,
962            canonical_profile,
963            to_host.unwrap(),
964            unit.mode,
965            unit.features.clone(),
966            unit.rustflags.clone(),
967            unit.rustdocflags.clone(),
968            unit.links_overrides.clone(),
969            unit.is_std,
970            unit.dep_hash,
971            unit.artifact,
972            unit.artifact_target_for_features,
973            unit.skip_non_compile_time_dep,
974        );
975
976        // We can now turn the deferred value into its actual final value.
977        profile.debuginfo = if unit_graph.contains_key(&unit_probe) {
978            // The unit is present in both build time and runtime subgraphs: we canonicalize its
979            // level to the other unit's, thus ensuring reuse between the two to optimize build times.
980            canonical_debuginfo
981        } else {
982            // The unit is only present in the build time subgraph, we can weaken its debuginfo
983            // level to optimize build times.
984            canonical_debuginfo.weaken()
985        }
986    }
987
988    let new_unit = interner.intern(
989        &unit.pkg,
990        &unit.target,
991        profile,
992        canonical_kind,
993        unit.mode,
994        unit.features.clone(),
995        unit.rustflags.clone(),
996        unit.rustdocflags.clone(),
997        unit.links_overrides.clone(),
998        unit.is_std,
999        new_dep_hash,
1000        unit.artifact,
1001        // Since `dep_hash` is now filled in, there's no need to specify the artifact target
1002        // for target-dependent feature resolution
1003        None,
1004        skip_non_compile_time_deps,
1005    );
1006    if !unit_is_root || !compile_time_deps_only {
1007        assert!(memo.insert(unit.clone(), new_unit.clone()).is_none());
1008    }
1009    new_graph.entry(new_unit.clone()).or_insert(new_deps);
1010    new_unit
1011}
1012
1013/// Removes duplicate `CompileMode::Doc` units that would cause problems with
1014/// filename collisions.
1015///
1016/// Rustdoc only separates units by crate name in the file directory
1017/// structure. If any two units with the same crate name exist, this would
1018/// cause a filename collision, causing different rustdoc invocations to stomp
1019/// on one another's files.
1020///
1021/// Unfortunately this does not remove all duplicates, as some of them are
1022/// either user error, or difficult to remove. Cases that I can think of:
1023///
1024/// - Same target name in different packages. See the `collision_doc` test.
1025/// - Different sources. See `collision_doc_sources` test.
1026///
1027/// Ideally this would not be necessary.
1028fn remove_duplicate_doc(
1029    build_config: &BuildConfig,
1030    root_units: &[Unit],
1031    unit_graph: &mut UnitGraph,
1032) {
1033    // First, create a mapping of crate_name -> Unit so we can see where the
1034    // duplicates are.
1035    let mut all_docs: HashMap<String, Vec<Unit>> = HashMap::new();
1036    for unit in unit_graph.keys() {
1037        if unit.mode.is_doc() {
1038            all_docs
1039                .entry(unit.target.crate_name())
1040                .or_default()
1041                .push(unit.clone());
1042        }
1043    }
1044    // Keep track of units to remove so that they can be efficiently removed
1045    // from the unit_deps.
1046    let mut removed_units: HashSet<Unit> = HashSet::new();
1047    let mut remove = |units: Vec<Unit>, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec<Unit> {
1048        let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) = units
1049            .into_iter()
1050            .partition(|unit| cb(unit) && !root_units.contains(unit));
1051        for unit in to_remove {
1052            tracing::debug!(
1053                "removing duplicate doc due to {} for package {} target `{}`",
1054                reason,
1055                unit.pkg,
1056                unit.target.name()
1057            );
1058            unit_graph.remove(&unit);
1059            removed_units.insert(unit);
1060        }
1061        remaining_units
1062    };
1063    // Iterate over the duplicates and try to remove them from unit_graph.
1064    for (_crate_name, mut units) in all_docs {
1065        if units.len() == 1 {
1066            continue;
1067        }
1068        // Prefer target over host if --target was not specified.
1069        if build_config
1070            .requested_kinds
1071            .iter()
1072            .all(CompileKind::is_host)
1073        {
1074            // Note these duplicates may not be real duplicates, since they
1075            // might get merged in rebuild_unit_graph_shared. Either way, it
1076            // shouldn't hurt to remove them early (although the report in the
1077            // log might be confusing).
1078            units = remove(units, "host/target merger", &|unit| unit.kind.is_host());
1079            if units.len() == 1 {
1080                continue;
1081            }
1082        }
1083        // Prefer newer versions over older.
1084        let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec<Unit>> =
1085            HashMap::new();
1086        for unit in units {
1087            let pkg_id = unit.pkg.package_id();
1088            // Note, this does not detect duplicates from different sources.
1089            source_map
1090                .entry((pkg_id.name(), pkg_id.source_id(), unit.kind))
1091                .or_default()
1092                .push(unit);
1093        }
1094        let mut remaining_units = Vec::new();
1095        for (_key, mut units) in source_map {
1096            if units.len() > 1 {
1097                units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
1098                // Remove any entries with version < newest.
1099                let newest_version = units.last().unwrap().pkg.version().clone();
1100                let keep_units = remove(units, "older version", &|unit| {
1101                    unit.pkg.version() < &newest_version
1102                });
1103                remaining_units.extend(keep_units);
1104            } else {
1105                remaining_units.extend(units);
1106            }
1107        }
1108        if remaining_units.len() == 1 {
1109            continue;
1110        }
1111        // Are there other heuristics to remove duplicates that would make
1112        // sense? Maybe prefer path sources over all others?
1113    }
1114    // Also remove units from the unit_deps so there aren't any dangling edges.
1115    for unit_deps in unit_graph.values_mut() {
1116        unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit));
1117    }
1118    // Remove any orphan units that were detached from the graph.
1119    let mut visited = HashSet::new();
1120    fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet<Unit>) {
1121        if !visited.insert(unit.clone()) {
1122            return;
1123        }
1124        for dep in &graph[unit] {
1125            visit(&dep.unit, graph, visited);
1126        }
1127    }
1128    for unit in root_units {
1129        visit(unit, unit_graph, &mut visited);
1130    }
1131    unit_graph.retain(|unit, _| visited.contains(unit));
1132}
1133
1134/// Override crate types for given units.
1135///
1136/// This is primarily used by `cargo rustc --crate-type`.
1137fn override_rustc_crate_types(
1138    units: &mut [Unit],
1139    args: &[String],
1140    interner: &UnitInterner,
1141) -> CargoResult<()> {
1142    if units.len() != 1 {
1143        anyhow::bail!(
1144            "crate types to rustc can only be passed to one \
1145            target, consider filtering\nthe package by passing, \
1146            e.g., `--lib` or `--example` to specify a single target"
1147        );
1148    }
1149
1150    let unit = &units[0];
1151    let override_unit = |f: fn(Vec<CrateType>) -> TargetKind| {
1152        let crate_types = args.iter().map(|s| s.into()).collect();
1153        let mut target = unit.target.clone();
1154        target.set_kind(f(crate_types));
1155        interner.intern(
1156            &unit.pkg,
1157            &target,
1158            unit.profile.clone(),
1159            unit.kind,
1160            unit.mode,
1161            unit.features.clone(),
1162            unit.rustflags.clone(),
1163            unit.rustdocflags.clone(),
1164            unit.links_overrides.clone(),
1165            unit.is_std,
1166            unit.dep_hash,
1167            unit.artifact,
1168            unit.artifact_target_for_features,
1169            unit.skip_non_compile_time_dep,
1170        )
1171    };
1172    units[0] = match unit.target.kind() {
1173        TargetKind::Lib(_) => override_unit(TargetKind::Lib),
1174        TargetKind::ExampleLib(_) => override_unit(TargetKind::ExampleLib),
1175        _ => {
1176            anyhow::bail!(
1177                "crate types can only be specified for libraries and example libraries.\n\
1178                Binaries, tests, and benchmarks are always the `bin` crate type"
1179            );
1180        }
1181    };
1182
1183    Ok(())
1184}
1185
1186/// Gets all of the features enabled for a package, plus its dependencies'
1187/// features.
1188///
1189/// Dependencies are added as `dep_name/feat_name` because `required-features`
1190/// wants to support that syntax.
1191pub fn resolve_all_features(
1192    resolve_with_overrides: &Resolve,
1193    resolved_features: &features::ResolvedFeatures,
1194    package_set: &PackageSet<'_>,
1195    package_id: PackageId,
1196    has_dev_units: HasDevUnits,
1197    requested_kinds: &[CompileKind],
1198    target_data: &RustcTargetData<'_>,
1199    force_all_targets: ForceAllTargets,
1200) -> HashSet<String> {
1201    let mut features: HashSet<String> = resolved_features
1202        .activated_features(package_id, FeaturesFor::NormalOrDev)
1203        .iter()
1204        .map(|s| s.to_string())
1205        .collect();
1206
1207    // Include features enabled for use by dependencies so targets can also use them with the
1208    // required-features field when deciding whether to be built or skipped.
1209    let filtered_deps = PackageSet::filter_deps(
1210        package_id,
1211        resolve_with_overrides,
1212        has_dev_units,
1213        requested_kinds,
1214        target_data,
1215        force_all_targets,
1216    );
1217    for (dep_id, deps) in filtered_deps {
1218        let is_proc_macro = package_set
1219            .get_one(dep_id)
1220            .expect("packages downloaded")
1221            .proc_macro();
1222        for dep in deps {
1223            let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build());
1224            for feature in resolved_features
1225                .activated_features_unverified(dep_id, features_for)
1226                .unwrap_or_default()
1227            {
1228                features.insert(format!("{}/{}", dep.name_in_toml(), feature));
1229            }
1230        }
1231    }
1232
1233    features
1234}