cargo/ops/cargo_compile/
mod.rs

1//! The entry point for starting the compilation process for commands like
2//! `build`, `test`, `doc`, `rustc`, etc.
3//!
4//! The [`compile`] function will do all the work to compile a workspace. A
5//! rough outline is:
6//!
7//! 1. Resolve the dependency graph (see [`ops::resolve`]).
8//! 2. Download any packages needed (see [`PackageSet`]).
9//! 3. Generate a list of top-level "units" of work for the targets the user
10//!   requested on the command-line. Each [`Unit`] corresponds to a compiler
11//!   invocation. This is done in this module ([`UnitGenerator::generate_root_units`]).
12//! 4. Starting from the root [`Unit`]s, generate the [`UnitGraph`] by walking the dependency graph
13//!   from the resolver.  See also [`unit_dependencies`].
14//! 5. Construct the [`BuildContext`] with all of the information collected so
15//!   far. This is the end of the "front end" of compilation.
16//! 6. Create a [`BuildRunner`] which coordinates the compilation process
17//!   and will perform the following steps:
18//!     1. Prepare the `target` directory (see [`Layout`]).
19//!     2. Create a [`JobQueue`]. The queue checks the
20//!       fingerprint of each `Unit` to determine if it should run or be
21//!       skipped.
22//!     3. Execute the queue via [`drain_the_queue`]. Each leaf in the queue's dependency graph is
23//!        executed, and then removed from the graph when finished. This repeats until the queue is
24//!        empty.  Note that this is the only point in cargo that currently uses threads.
25//! 7. The result of the compilation is stored in the [`Compilation`] struct. This can be used for
26//!    various things, such as running tests after the compilation  has finished.
27//!
28//! **Note**: "target" inside this module generally refers to ["Cargo Target"],
29//! which corresponds to artifact that will be built in a package. Not to be
30//! confused with target-triple or target architecture.
31//!
32//! [`unit_dependencies`]: crate::core::compiler::unit_dependencies
33//! [`Layout`]: crate::core::compiler::Layout
34//! [`JobQueue`]: crate::core::compiler::job_queue
35//! [`drain_the_queue`]: crate::core::compiler::job_queue
36//! ["Cargo Target"]: https://doc.rust-lang.org/nightly/cargo/reference/cargo-targets.html
37
38use std::collections::{HashMap, HashSet};
39use std::hash::{Hash, Hasher};
40use std::sync::Arc;
41
42use crate::core::compiler::UserIntent;
43use crate::core::compiler::unit_dependencies::build_unit_dependencies;
44use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph};
45use crate::core::compiler::{BuildConfig, BuildContext, BuildRunner, Compilation};
46use crate::core::compiler::{CompileKind, CompileTarget, RustcTargetData, Unit};
47use crate::core::compiler::{CrateType, TargetInfo, apply_env_config, standard_lib};
48use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
49use crate::core::profiles::Profiles;
50use crate::core::resolver::features::{self, CliFeatures, FeaturesFor};
51use crate::core::resolver::{HasDevUnits, Resolve};
52use crate::core::{PackageId, PackageSet, SourceId, TargetKind, Workspace};
53use crate::drop_println;
54use crate::ops;
55use crate::ops::resolve::{SpecsAndResolvedFeatures, WorkspaceResolve};
56use crate::util::BuildLogger;
57use crate::util::context::{GlobalContext, WarningHandling};
58use crate::util::interning::InternedString;
59use crate::util::log_message::LogMessage;
60use crate::util::{CargoResult, StableHasher};
61
62mod compile_filter;
63use annotate_snippets::{Group, Level, Origin};
64pub use compile_filter::{CompileFilter, FilterRule, LibRule};
65
66pub(super) mod unit_generator;
67use unit_generator::UnitGenerator;
68
69mod packages;
70
71pub use packages::Packages;
72
73/// Contains information about how a package should be compiled.
74///
75/// Note on distinction between `CompileOptions` and [`BuildConfig`]:
76/// `BuildConfig` contains values that need to be retained after
77/// [`BuildContext`] is created. The other fields are no longer necessary. Think
78/// of it as `CompileOptions` are high-level settings requested on the
79/// command-line, and `BuildConfig` are low-level settings for actually
80/// driving `rustc`.
81#[derive(Debug, Clone)]
82pub struct CompileOptions {
83    /// Configuration information for a rustc build
84    pub build_config: BuildConfig,
85    /// Feature flags requested by the user.
86    pub cli_features: CliFeatures,
87    /// A set of packages to build.
88    pub spec: Packages,
89    /// Filter to apply to the root package to select which targets will be
90    /// built.
91    pub filter: CompileFilter,
92    /// Extra arguments to be passed to rustdoc (single target only)
93    pub target_rustdoc_args: Option<Vec<String>>,
94    /// The specified target will be compiled with all the available arguments,
95    /// note that this only accounts for the *final* invocation of rustc
96    pub target_rustc_args: Option<Vec<String>>,
97    /// Crate types to be passed to rustc (single target only)
98    pub target_rustc_crate_types: Option<Vec<String>>,
99    /// Whether the `--document-private-items` flags was specified and should
100    /// be forwarded to `rustdoc`.
101    pub rustdoc_document_private_items: bool,
102    /// Whether the build process should check the minimum Rust version
103    /// defined in the cargo metadata for a crate.
104    pub honor_rust_version: Option<bool>,
105}
106
107impl CompileOptions {
108    pub fn new(gctx: &GlobalContext, intent: UserIntent) -> CargoResult<CompileOptions> {
109        let jobs = None;
110        let keep_going = false;
111        Ok(CompileOptions {
112            build_config: BuildConfig::new(gctx, jobs, keep_going, &[], intent)?,
113            cli_features: CliFeatures::new_all(false),
114            spec: ops::Packages::Packages(Vec::new()),
115            filter: CompileFilter::Default {
116                required_features_filterable: false,
117            },
118            target_rustdoc_args: None,
119            target_rustc_args: None,
120            target_rustc_crate_types: None,
121            rustdoc_document_private_items: false,
122            honor_rust_version: None,
123        })
124    }
125}
126
127/// Compiles!
128///
129/// This uses the [`DefaultExecutor`]. To use a custom [`Executor`], see [`compile_with_exec`].
130pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>> {
131    let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
132    compile_with_exec(ws, options, &exec)
133}
134
135/// Like [`compile`] but allows specifying a custom [`Executor`]
136/// that will be able to intercept build calls and add custom logic.
137///
138/// [`compile`] uses [`DefaultExecutor`] which just passes calls through.
139pub fn compile_with_exec<'a>(
140    ws: &Workspace<'a>,
141    options: &CompileOptions,
142    exec: &Arc<dyn Executor>,
143) -> CargoResult<Compilation<'a>> {
144    ws.emit_warnings()?;
145    let compilation = compile_ws(ws, options, exec)?;
146    if ws.gctx().warning_handling()? == WarningHandling::Deny && compilation.lint_warning_count > 0
147    {
148        anyhow::bail!("warnings are denied by `build.warnings` configuration")
149    }
150    Ok(compilation)
151}
152
153/// Like [`compile_with_exec`] but without warnings from manifest parsing.
154#[tracing::instrument(skip_all)]
155pub fn compile_ws<'a>(
156    ws: &Workspace<'a>,
157    options: &CompileOptions,
158    exec: &Arc<dyn Executor>,
159) -> CargoResult<Compilation<'a>> {
160    let interner = UnitInterner::new();
161    let logger = BuildLogger::maybe_new(ws)?;
162
163    if let Some(ref logger) = logger {
164        let rustc = ws.gctx().load_global_rustc(Some(ws))?;
165        let num_cpus = std::thread::available_parallelism()
166            .ok()
167            .map(|x| x.get() as u64);
168        logger.log(LogMessage::BuildStarted {
169            cwd: ws.gctx().cwd().to_path_buf(),
170            host: rustc.host.to_string(),
171            jobs: options.build_config.jobs,
172            num_cpus,
173            profile: options.build_config.requested_profile.to_string(),
174            rustc_version: rustc.version.to_string(),
175            rustc_version_verbose: rustc.verbose_version.clone(),
176            target_dir: ws.target_dir().as_path_unlocked().to_path_buf(),
177            workspace_root: ws.root().to_path_buf(),
178        });
179    }
180
181    let bcx = create_bcx(ws, options, &interner, logger.as_ref())?;
182
183    if options.build_config.unit_graph {
184        unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.gctx())?;
185        return Compilation::new(&bcx);
186    }
187    crate::core::gc::auto_gc(bcx.gctx);
188    let build_runner = BuildRunner::new(&bcx)?;
189    if options.build_config.dry_run {
190        build_runner.dry_run()
191    } else {
192        build_runner.compile(exec)
193    }
194}
195
196/// Executes `rustc --print <VALUE>`.
197///
198/// * `print_opt_value` is the VALUE passed through.
199pub fn print<'a>(
200    ws: &Workspace<'a>,
201    options: &CompileOptions,
202    print_opt_value: &str,
203) -> CargoResult<()> {
204    let CompileOptions {
205        ref build_config,
206        ref target_rustc_args,
207        ..
208    } = *options;
209    let gctx = ws.gctx();
210    let rustc = gctx.load_global_rustc(Some(ws))?;
211    for (index, kind) in build_config.requested_kinds.iter().enumerate() {
212        if index != 0 {
213            drop_println!(gctx);
214        }
215        let target_info = TargetInfo::new(gctx, &build_config.requested_kinds, &rustc, *kind)?;
216        let mut process = rustc.process();
217        apply_env_config(gctx, &mut process)?;
218        process.args(&target_info.rustflags);
219        if let Some(args) = target_rustc_args {
220            process.args(args);
221        }
222        if let CompileKind::Target(t) = kind {
223            process.arg("--target").arg(t.rustc_target());
224        }
225        process.arg("--print").arg(print_opt_value);
226        process.exec()?;
227    }
228    Ok(())
229}
230
231/// Prepares all required information for the actual compilation.
232///
233/// For how it works and what data it collects,
234/// please see the [module-level documentation](self).
235#[tracing::instrument(skip_all)]
236pub fn create_bcx<'a, 'gctx>(
237    ws: &'a Workspace<'gctx>,
238    options: &'a CompileOptions,
239    interner: &'a UnitInterner,
240    logger: Option<&'a BuildLogger>,
241) -> CargoResult<BuildContext<'a, 'gctx>> {
242    let CompileOptions {
243        ref build_config,
244        ref spec,
245        ref cli_features,
246        ref filter,
247        ref target_rustdoc_args,
248        ref target_rustc_args,
249        ref target_rustc_crate_types,
250        rustdoc_document_private_items,
251        honor_rust_version,
252    } = *options;
253    let gctx = ws.gctx();
254
255    // Perform some pre-flight validation.
256    match build_config.intent {
257        UserIntent::Test | UserIntent::Build | UserIntent::Check { .. } | UserIntent::Bench => {
258            if ws.gctx().get_env("RUST_FLAGS").is_ok() {
259                gctx.shell().print_report(
260                    &[Level::WARNING
261                        .secondary_title("ignoring environment variable `RUST_FLAGS`")
262                        .element(Level::HELP.message("rust flags are passed via `RUSTFLAGS`"))],
263                    false,
264                )?;
265            }
266        }
267        UserIntent::Doc { .. } | UserIntent::Doctest => {
268            if ws.gctx().get_env("RUSTDOC_FLAGS").is_ok() {
269                gctx.shell().print_report(
270                    &[Level::WARNING
271                        .secondary_title("ignoring environment variable `RUSTDOC_FLAGS`")
272                        .element(
273                            Level::HELP.message("rustdoc flags are passed via `RUSTDOCFLAGS`"),
274                        )],
275                    false,
276                )?;
277            }
278        }
279    }
280    gctx.validate_term_config()?;
281
282    let mut target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?;
283
284    let specs = spec.to_package_id_specs(ws)?;
285    let has_dev_units = {
286        // Rustdoc itself doesn't need dev-dependencies. But to scrape examples from packages in the
287        // workspace, if any of those packages need dev-dependencies, then we need include dev-dependencies
288        // to scrape those packages.
289        let any_pkg_has_scrape_enabled = ws
290            .members_with_features(&specs, cli_features)?
291            .iter()
292            .any(|(pkg, _)| {
293                pkg.targets()
294                    .iter()
295                    .any(|target| target.is_example() && target.doc_scrape_examples().is_enabled())
296            });
297
298        if filter.need_dev_deps(build_config.intent)
299            || (build_config.intent.is_doc() && any_pkg_has_scrape_enabled)
300        {
301            HasDevUnits::Yes
302        } else {
303            HasDevUnits::No
304        }
305    };
306    let dry_run = false;
307    let resolve = ops::resolve_ws_with_opts(
308        ws,
309        &mut target_data,
310        &build_config.requested_kinds,
311        cli_features,
312        &specs,
313        has_dev_units,
314        crate::core::resolver::features::ForceAllTargets::No,
315        dry_run,
316    )?;
317    let WorkspaceResolve {
318        mut pkg_set,
319        workspace_resolve,
320        targeted_resolve: resolve,
321        specs_and_features,
322    } = resolve;
323
324    let std_resolve_features = if let Some(crates) = &gctx.cli_unstable().build_std {
325        let (std_package_set, std_resolve, std_features) = standard_lib::resolve_std(
326            ws,
327            &mut target_data,
328            &build_config,
329            crates,
330            &build_config.requested_kinds,
331        )?;
332        pkg_set.add_set(std_package_set);
333        Some((std_resolve, std_features))
334    } else {
335        None
336    };
337
338    // Find the packages in the resolver that the user wants to build (those
339    // passed in with `-p` or the defaults from the workspace), and convert
340    // Vec<PackageIdSpec> to a Vec<PackageId>.
341    let to_build_ids = resolve.specs_to_ids(&specs)?;
342    // Now get the `Package` for each `PackageId`. This may trigger a download
343    // if the user specified `-p` for a dependency that is not downloaded.
344    // Dependencies will be downloaded during build_unit_dependencies.
345    let mut to_builds = pkg_set.get_many(to_build_ids)?;
346
347    // The ordering here affects some error messages coming out of cargo, so
348    // let's be test and CLI friendly by always printing in the same order if
349    // there's an error.
350    to_builds.sort_by_key(|p| p.package_id());
351
352    for pkg in to_builds.iter() {
353        pkg.manifest().print_teapot(gctx);
354
355        if build_config.intent.is_any_test()
356            && !ws.is_member(pkg)
357            && pkg.dependencies().iter().any(|dep| !dep.is_transitive())
358        {
359            anyhow::bail!(
360                "package `{}` cannot be tested because it requires dev-dependencies \
361                 and is not a member of the workspace",
362                pkg.name()
363            );
364        }
365    }
366
367    let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
368        (Some(args), _) => (Some(args.clone()), "rustc"),
369        (_, Some(args)) => (Some(args.clone()), "rustdoc"),
370        _ => (None, ""),
371    };
372
373    if extra_args.is_some() && to_builds.len() != 1 {
374        panic!(
375            "`{}` should not accept multiple `-p` flags",
376            extra_args_name
377        );
378    }
379
380    let profiles = Profiles::new(ws, build_config.requested_profile)?;
381    profiles.validate_packages(
382        ws.profiles(),
383        &mut gctx.shell(),
384        workspace_resolve.as_ref().unwrap_or(&resolve),
385    )?;
386
387    // If `--target` has not been specified, then the unit graph is built
388    // assuming `--target $HOST` was specified. See
389    // `rebuild_unit_graph_shared` for more on why this is done.
390    let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?);
391    let explicit_host_kinds: Vec<_> = build_config
392        .requested_kinds
393        .iter()
394        .map(|kind| match kind {
395            CompileKind::Host => explicit_host_kind,
396            CompileKind::Target(t) => CompileKind::Target(*t),
397        })
398        .collect();
399
400    let mut units = Vec::new();
401    let mut unit_graph = HashMap::new();
402    let mut scrape_units = Vec::new();
403
404    for SpecsAndResolvedFeatures {
405        specs,
406        resolved_features,
407    } in &specs_and_features
408    {
409        // Passing `build_config.requested_kinds` instead of
410        // `explicit_host_kinds` here so that `generate_root_units` can do
411        // its own special handling of `CompileKind::Host`. It will
412        // internally replace the host kind by the `explicit_host_kind`
413        // before setting as a unit.
414        let spec_names = specs.iter().map(|spec| spec.name()).collect::<Vec<_>>();
415        let packages = to_builds
416            .iter()
417            .filter(|package| spec_names.contains(&package.name().as_str()))
418            .cloned()
419            .collect::<Vec<_>>();
420        let generator = UnitGenerator {
421            ws,
422            packages: &packages,
423            spec,
424            target_data: &target_data,
425            filter,
426            requested_kinds: &build_config.requested_kinds,
427            explicit_host_kind,
428            intent: build_config.intent,
429            resolve: &resolve,
430            workspace_resolve: &workspace_resolve,
431            resolved_features: &resolved_features,
432            package_set: &pkg_set,
433            profiles: &profiles,
434            interner,
435            has_dev_units,
436        };
437        let mut targeted_root_units = generator.generate_root_units()?;
438
439        if let Some(args) = target_rustc_crate_types {
440            override_rustc_crate_types(&mut targeted_root_units, args, interner)?;
441        }
442
443        let should_scrape =
444            build_config.intent.is_doc() && gctx.cli_unstable().rustdoc_scrape_examples;
445        let targeted_scrape_units = if should_scrape {
446            generator.generate_scrape_units(&targeted_root_units)?
447        } else {
448            Vec::new()
449        };
450
451        let std_roots = if let Some(crates) = gctx.cli_unstable().build_std.as_ref() {
452            let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
453            standard_lib::generate_std_roots(
454                &crates,
455                &targeted_root_units,
456                std_resolve,
457                std_features,
458                &explicit_host_kinds,
459                &pkg_set,
460                interner,
461                &profiles,
462                &target_data,
463            )?
464        } else {
465            Default::default()
466        };
467
468        unit_graph.extend(build_unit_dependencies(
469            ws,
470            &pkg_set,
471            &resolve,
472            &resolved_features,
473            std_resolve_features.as_ref(),
474            &targeted_root_units,
475            &targeted_scrape_units,
476            &std_roots,
477            build_config.intent,
478            &target_data,
479            &profiles,
480            interner,
481        )?);
482        units.extend(targeted_root_units);
483        scrape_units.extend(targeted_scrape_units);
484    }
485
486    // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain
487    // what heuristics to use in that case.
488    if build_config.intent.wants_deps_docs() {
489        remove_duplicate_doc(build_config, &units, &mut unit_graph);
490    }
491
492    let host_kind_requested = build_config
493        .requested_kinds
494        .iter()
495        .any(CompileKind::is_host);
496    // Rebuild the unit graph, replacing the explicit host targets with
497    // CompileKind::Host, removing `artifact_target_for_features` and merging any dependencies
498    // shared with build and artifact dependencies.
499    (units, scrape_units, unit_graph) = rebuild_unit_graph_shared(
500        interner,
501        unit_graph,
502        &units,
503        &scrape_units,
504        host_kind_requested.then_some(explicit_host_kind),
505        build_config.compile_time_deps_only,
506    );
507
508    let mut extra_compiler_args = HashMap::new();
509    if let Some(args) = extra_args {
510        if units.len() != 1 {
511            anyhow::bail!(
512                "extra arguments to `{}` can only be passed to one \
513                 target, consider filtering\nthe package by passing, \
514                 e.g., `--lib` or `--bin NAME` to specify a single target",
515                extra_args_name
516            );
517        }
518        extra_compiler_args.insert(units[0].clone(), args);
519    }
520
521    for unit in units
522        .iter()
523        .filter(|unit| unit.mode.is_doc() || unit.mode.is_doc_test())
524        .filter(|unit| rustdoc_document_private_items || unit.target.is_bin())
525    {
526        // Add `--document-private-items` rustdoc flag if requested or if
527        // the target is a binary. Binary crates get their private items
528        // documented by default.
529        let mut args = vec!["--document-private-items".into()];
530        if unit.target.is_bin() {
531            // This warning only makes sense if it's possible to document private items
532            // sometimes and ignore them at other times. But cargo consistently passes
533            // `--document-private-items`, so the warning isn't useful.
534            args.push("-Arustdoc::private-intra-doc-links".into());
535        }
536        extra_compiler_args
537            .entry(unit.clone())
538            .or_default()
539            .extend(args);
540    }
541
542    // Validate target src path for each root unit
543    let mut error_count: usize = 0;
544    for unit in &units {
545        if let Some(target_src_path) = unit.target.src_path().path() {
546            validate_target_path_as_source_file(
547                gctx,
548                target_src_path,
549                unit.target.name(),
550                unit.target.kind(),
551                unit.pkg.manifest_path(),
552                &mut error_count,
553            )?
554        }
555    }
556    if error_count > 0 {
557        let plural: &str = if error_count > 1 { "s" } else { "" };
558        anyhow::bail!(
559            "could not compile due to {error_count} previous target resolution error{plural}"
560        );
561    }
562
563    if honor_rust_version.unwrap_or(true) {
564        let rustc_version = target_data.rustc.version.clone().into();
565
566        let mut incompatible = Vec::new();
567        let mut local_incompatible = false;
568        for unit in unit_graph.keys() {
569            let Some(pkg_msrv) = unit.pkg.rust_version() else {
570                continue;
571            };
572
573            if pkg_msrv.is_compatible_with(&rustc_version) {
574                continue;
575            }
576
577            local_incompatible |= unit.is_local();
578            incompatible.push((unit, pkg_msrv));
579        }
580        if !incompatible.is_empty() {
581            use std::fmt::Write as _;
582
583            let plural = if incompatible.len() == 1 { "" } else { "s" };
584            let mut message = format!(
585                "rustc {rustc_version} is not supported by the following package{plural}:\n"
586            );
587            incompatible.sort_by_key(|(unit, _)| (unit.pkg.name(), unit.pkg.version()));
588            for (unit, msrv) in incompatible {
589                let name = &unit.pkg.name();
590                let version = &unit.pkg.version();
591                writeln!(&mut message, "  {name}@{version} requires rustc {msrv}").unwrap();
592            }
593            if ws.is_ephemeral() {
594                if ws.ignore_lock() {
595                    writeln!(
596                        &mut message,
597                        "Try re-running `cargo install` with `--locked`"
598                    )
599                    .unwrap();
600                }
601            } else if !local_incompatible {
602                writeln!(
603                    &mut message,
604                    "Either upgrade rustc or select compatible dependency versions with
605`cargo update <name>@<current-ver> --precise <compatible-ver>`
606where `<compatible-ver>` is the latest version supporting rustc {rustc_version}",
607                )
608                .unwrap();
609            }
610            return Err(anyhow::Error::msg(message));
611        }
612    }
613
614    let bcx = BuildContext::new(
615        ws,
616        logger,
617        pkg_set,
618        build_config,
619        profiles,
620        extra_compiler_args,
621        target_data,
622        units,
623        unit_graph,
624        scrape_units,
625    )?;
626
627    Ok(bcx)
628}
629
630// Checks if a target path exists and is a source file, not a directory
631fn validate_target_path_as_source_file(
632    gctx: &GlobalContext,
633    target_path: &std::path::Path,
634    target_name: &str,
635    target_kind: &TargetKind,
636    unit_manifest_path: &std::path::Path,
637    error_count: &mut usize,
638) -> CargoResult<()> {
639    if !target_path.exists() {
640        *error_count += 1;
641
642        let err_msg = format!(
643            "can't find {} `{}` at path `{}`",
644            target_kind.description(),
645            target_name,
646            target_path.display()
647        );
648
649        let group = Group::with_title(Level::ERROR.primary_title(err_msg)).element(Origin::path(
650            unit_manifest_path.to_str().unwrap_or_default(),
651        ));
652
653        gctx.shell().print_report(&[group], true)?;
654    } else if target_path.is_dir() {
655        *error_count += 1;
656
657        // suggest setting the path to a likely entrypoint
658        let main_rs = target_path.join("main.rs");
659        let lib_rs = target_path.join("lib.rs");
660
661        let suggested_files_opt = match target_kind {
662            TargetKind::Lib(_) => {
663                if lib_rs.exists() {
664                    Some(format!("`{}`", lib_rs.display()))
665                } else {
666                    None
667                }
668            }
669            TargetKind::Bin => {
670                if main_rs.exists() {
671                    Some(format!("`{}`", main_rs.display()))
672                } else {
673                    None
674                }
675            }
676            TargetKind::Test => {
677                if main_rs.exists() {
678                    Some(format!("`{}`", main_rs.display()))
679                } else {
680                    None
681                }
682            }
683            TargetKind::ExampleBin => {
684                if main_rs.exists() {
685                    Some(format!("`{}`", main_rs.display()))
686                } else {
687                    None
688                }
689            }
690            TargetKind::Bench => {
691                if main_rs.exists() {
692                    Some(format!("`{}`", main_rs.display()))
693                } else {
694                    None
695                }
696            }
697            TargetKind::ExampleLib(_) => {
698                if lib_rs.exists() {
699                    Some(format!("`{}`", lib_rs.display()))
700                } else {
701                    None
702                }
703            }
704            TargetKind::CustomBuild => None,
705        };
706
707        let err_msg = format!(
708            "path `{}` for {} `{}` is a directory, but a source file was expected.",
709            target_path.display(),
710            target_kind.description(),
711            target_name,
712        );
713        let mut group = Group::with_title(Level::ERROR.primary_title(err_msg)).element(
714            Origin::path(unit_manifest_path.to_str().unwrap_or_default()),
715        );
716
717        if let Some(suggested_files) = suggested_files_opt {
718            group = group.element(
719                Level::HELP.message(format!("an entry point exists at {}", suggested_files)),
720            );
721        }
722
723        gctx.shell().print_report(&[group], true)?;
724    }
725
726    Ok(())
727}
728
729/// This is used to rebuild the unit graph, sharing host dependencies if possible,
730/// and applying other unit adjustments based on the whole graph.
731///
732/// This will translate any unit's `CompileKind::Target(host)` to
733/// `CompileKind::Host` if `to_host` is not `None` and the kind is equal to `to_host`.
734/// This also handles generating the unit `dep_hash`, and merging shared units if possible.
735///
736/// This is necessary because if normal dependencies used `CompileKind::Host`,
737/// there would be no way to distinguish those units from build-dependency
738/// units or artifact dependency units.
739/// This can cause a problem if a shared normal/build/artifact dependency needs
740/// to link to another dependency whose features differ based on whether or
741/// not it is a normal, build or artifact dependency. If all units used
742/// `CompileKind::Host`, then they would end up being identical, causing a
743/// collision in the `UnitGraph`, and Cargo would end up randomly choosing one
744/// value or the other.
745///
746/// The solution is to keep normal, build and artifact dependencies separate when
747/// building the unit graph, and then run this second pass which will try to
748/// combine shared dependencies safely. By adding a hash of the dependencies
749/// to the `Unit`, this allows the `CompileKind` to be changed back to `Host`
750/// and `artifact_target_for_features` to be removed without fear of an unwanted
751/// collision for build or artifact dependencies.
752///
753/// This is also responsible for adjusting the `strip` profile option to
754/// opportunistically strip if debug is 0 for all dependencies. This helps
755/// remove debuginfo added by the standard library.
756///
757/// This is also responsible for adjusting the `debug` setting for host
758/// dependencies, turning off debug if the user has not explicitly enabled it,
759/// and the unit is not shared with a target unit.
760///
761/// This is also responsible for adjusting whether each unit should be compiled
762/// or not regarding `--compile-time-deps` flag.
763fn rebuild_unit_graph_shared(
764    interner: &UnitInterner,
765    unit_graph: UnitGraph,
766    roots: &[Unit],
767    scrape_units: &[Unit],
768    to_host: Option<CompileKind>,
769    compile_time_deps_only: bool,
770) -> (Vec<Unit>, Vec<Unit>, UnitGraph) {
771    let mut result = UnitGraph::new();
772    // Map of the old unit to the new unit, used to avoid recursing into units
773    // that have already been computed to improve performance.
774    let mut memo = HashMap::new();
775    let new_roots = roots
776        .iter()
777        .map(|root| {
778            traverse_and_share(
779                interner,
780                &mut memo,
781                &mut result,
782                &unit_graph,
783                root,
784                true,
785                false,
786                to_host,
787                compile_time_deps_only,
788            )
789        })
790        .collect();
791    // If no unit in the unit graph ended up having scrape units attached as dependencies,
792    // then they won't have been discovered in traverse_and_share and hence won't be in
793    // memo. So we filter out missing scrape units.
794    let new_scrape_units = scrape_units
795        .iter()
796        .map(|unit| memo.get(unit).unwrap().clone())
797        .collect();
798    (new_roots, new_scrape_units, result)
799}
800
801/// Recursive function for rebuilding the graph.
802///
803/// This walks `unit_graph`, starting at the given `unit`. It inserts the new
804/// units into `new_graph`, and returns a new updated version of the given
805/// unit (`dep_hash` is filled in, and `kind` switched if necessary).
806fn traverse_and_share(
807    interner: &UnitInterner,
808    memo: &mut HashMap<Unit, Unit>,
809    new_graph: &mut UnitGraph,
810    unit_graph: &UnitGraph,
811    unit: &Unit,
812    unit_is_root: bool,
813    unit_is_for_host: bool,
814    to_host: Option<CompileKind>,
815    compile_time_deps_only: bool,
816) -> Unit {
817    if let Some(new_unit) = memo.get(unit) {
818        // Already computed, no need to recompute.
819        return new_unit.clone();
820    }
821    let mut dep_hash = StableHasher::new();
822    let skip_non_compile_time_deps = compile_time_deps_only
823        && (!unit.target.is_compile_time_dependency() ||
824            // Root unit is not a dependency unless other units are dependant
825            // to it.
826            unit_is_root);
827    let new_deps: Vec<_> = unit_graph[unit]
828        .iter()
829        .map(|dep| {
830            let new_dep_unit = traverse_and_share(
831                interner,
832                memo,
833                new_graph,
834                unit_graph,
835                &dep.unit,
836                false,
837                dep.unit_for.is_for_host(),
838                to_host,
839                // If we should compile the current unit, we should also compile
840                // its dependencies. And if not, we should compile compile time
841                // dependencies only.
842                skip_non_compile_time_deps,
843            );
844            new_dep_unit.hash(&mut dep_hash);
845            UnitDep {
846                unit: new_dep_unit,
847                ..dep.clone()
848            }
849        })
850        .collect();
851    // Here, we have recursively traversed this unit's dependencies, and hashed them: we can
852    // finalize the dep hash.
853    let new_dep_hash = Hasher::finish(&dep_hash);
854
855    // This is the key part of the sharing process: if the unit is a runtime dependency, whose
856    // target is the same as the host, we canonicalize the compile kind to `CompileKind::Host`.
857    // A possible host dependency counterpart to this unit would have that kind, and if such a unit
858    // exists in the current `unit_graph`, they will unify in the new unit graph map `new_graph`.
859    // The resulting unit graph will be optimized with less units, thanks to sharing these host
860    // dependencies.
861    let canonical_kind = match to_host {
862        Some(to_host) if to_host == unit.kind => CompileKind::Host,
863        _ => unit.kind,
864    };
865
866    let mut profile = unit.profile.clone();
867    if profile.strip.is_deferred() {
868        // If strip was not manually set, and all dependencies of this unit together
869        // with this unit have debuginfo turned off, we enable debuginfo stripping.
870        // This will remove pre-existing debug symbols coming from the standard library.
871        if !profile.debuginfo.is_turned_on()
872            && new_deps
873                .iter()
874                .all(|dep| !dep.unit.profile.debuginfo.is_turned_on())
875        {
876            profile.strip = profile.strip.strip_debuginfo();
877        }
878    }
879
880    // If this is a build dependency, and it's not shared with runtime dependencies, we can weaken
881    // its debuginfo level to optimize build times. We do nothing if it's an artifact dependency,
882    // as it and its debuginfo may end up embedded in the main program.
883    if unit_is_for_host
884        && to_host.is_some()
885        && profile.debuginfo.is_deferred()
886        && !unit.artifact.is_true()
887    {
888        // We create a "probe" test to see if a unit with the same explicit debuginfo level exists
889        // in the graph. This is the level we'd expect if it was set manually or the default value
890        // set by a profile for a runtime dependency: its canonical value.
891        let canonical_debuginfo = profile.debuginfo.finalize();
892        let mut canonical_profile = profile.clone();
893        canonical_profile.debuginfo = canonical_debuginfo;
894        let unit_probe = interner.intern(
895            &unit.pkg,
896            &unit.target,
897            canonical_profile,
898            to_host.unwrap(),
899            unit.mode,
900            unit.features.clone(),
901            unit.rustflags.clone(),
902            unit.rustdocflags.clone(),
903            unit.links_overrides.clone(),
904            unit.is_std,
905            unit.dep_hash,
906            unit.artifact,
907            unit.artifact_target_for_features,
908            unit.skip_non_compile_time_dep,
909        );
910
911        // We can now turn the deferred value into its actual final value.
912        profile.debuginfo = if unit_graph.contains_key(&unit_probe) {
913            // The unit is present in both build time and runtime subgraphs: we canonicalize its
914            // level to the other unit's, thus ensuring reuse between the two to optimize build times.
915            canonical_debuginfo
916        } else {
917            // The unit is only present in the build time subgraph, we can weaken its debuginfo
918            // level to optimize build times.
919            canonical_debuginfo.weaken()
920        }
921    }
922
923    let new_unit = interner.intern(
924        &unit.pkg,
925        &unit.target,
926        profile,
927        canonical_kind,
928        unit.mode,
929        unit.features.clone(),
930        unit.rustflags.clone(),
931        unit.rustdocflags.clone(),
932        unit.links_overrides.clone(),
933        unit.is_std,
934        new_dep_hash,
935        unit.artifact,
936        // Since `dep_hash` is now filled in, there's no need to specify the artifact target
937        // for target-dependent feature resolution
938        None,
939        skip_non_compile_time_deps,
940    );
941    if !unit_is_root || !compile_time_deps_only {
942        assert!(memo.insert(unit.clone(), new_unit.clone()).is_none());
943    }
944    new_graph.entry(new_unit.clone()).or_insert(new_deps);
945    new_unit
946}
947
948/// Removes duplicate `CompileMode::Doc` units that would cause problems with
949/// filename collisions.
950///
951/// Rustdoc only separates units by crate name in the file directory
952/// structure. If any two units with the same crate name exist, this would
953/// cause a filename collision, causing different rustdoc invocations to stomp
954/// on one another's files.
955///
956/// Unfortunately this does not remove all duplicates, as some of them are
957/// either user error, or difficult to remove. Cases that I can think of:
958///
959/// - Same target name in different packages. See the `collision_doc` test.
960/// - Different sources. See `collision_doc_sources` test.
961///
962/// Ideally this would not be necessary.
963fn remove_duplicate_doc(
964    build_config: &BuildConfig,
965    root_units: &[Unit],
966    unit_graph: &mut UnitGraph,
967) {
968    // First, create a mapping of crate_name -> Unit so we can see where the
969    // duplicates are.
970    let mut all_docs: HashMap<String, Vec<Unit>> = HashMap::new();
971    for unit in unit_graph.keys() {
972        if unit.mode.is_doc() {
973            all_docs
974                .entry(unit.target.crate_name())
975                .or_default()
976                .push(unit.clone());
977        }
978    }
979    // Keep track of units to remove so that they can be efficiently removed
980    // from the unit_deps.
981    let mut removed_units: HashSet<Unit> = HashSet::new();
982    let mut remove = |units: Vec<Unit>, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec<Unit> {
983        let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) = units
984            .into_iter()
985            .partition(|unit| cb(unit) && !root_units.contains(unit));
986        for unit in to_remove {
987            tracing::debug!(
988                "removing duplicate doc due to {} for package {} target `{}`",
989                reason,
990                unit.pkg,
991                unit.target.name()
992            );
993            unit_graph.remove(&unit);
994            removed_units.insert(unit);
995        }
996        remaining_units
997    };
998    // Iterate over the duplicates and try to remove them from unit_graph.
999    for (_crate_name, mut units) in all_docs {
1000        if units.len() == 1 {
1001            continue;
1002        }
1003        // Prefer target over host if --target was not specified.
1004        if build_config
1005            .requested_kinds
1006            .iter()
1007            .all(CompileKind::is_host)
1008        {
1009            // Note these duplicates may not be real duplicates, since they
1010            // might get merged in rebuild_unit_graph_shared. Either way, it
1011            // shouldn't hurt to remove them early (although the report in the
1012            // log might be confusing).
1013            units = remove(units, "host/target merger", &|unit| unit.kind.is_host());
1014            if units.len() == 1 {
1015                continue;
1016            }
1017        }
1018        // Prefer newer versions over older.
1019        let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec<Unit>> =
1020            HashMap::new();
1021        for unit in units {
1022            let pkg_id = unit.pkg.package_id();
1023            // Note, this does not detect duplicates from different sources.
1024            source_map
1025                .entry((pkg_id.name(), pkg_id.source_id(), unit.kind))
1026                .or_default()
1027                .push(unit);
1028        }
1029        let mut remaining_units = Vec::new();
1030        for (_key, mut units) in source_map {
1031            if units.len() > 1 {
1032                units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
1033                // Remove any entries with version < newest.
1034                let newest_version = units.last().unwrap().pkg.version().clone();
1035                let keep_units = remove(units, "older version", &|unit| {
1036                    unit.pkg.version() < &newest_version
1037                });
1038                remaining_units.extend(keep_units);
1039            } else {
1040                remaining_units.extend(units);
1041            }
1042        }
1043        if remaining_units.len() == 1 {
1044            continue;
1045        }
1046        // Are there other heuristics to remove duplicates that would make
1047        // sense? Maybe prefer path sources over all others?
1048    }
1049    // Also remove units from the unit_deps so there aren't any dangling edges.
1050    for unit_deps in unit_graph.values_mut() {
1051        unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit));
1052    }
1053    // Remove any orphan units that were detached from the graph.
1054    let mut visited = HashSet::new();
1055    fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet<Unit>) {
1056        if !visited.insert(unit.clone()) {
1057            return;
1058        }
1059        for dep in &graph[unit] {
1060            visit(&dep.unit, graph, visited);
1061        }
1062    }
1063    for unit in root_units {
1064        visit(unit, unit_graph, &mut visited);
1065    }
1066    unit_graph.retain(|unit, _| visited.contains(unit));
1067}
1068
1069/// Override crate types for given units.
1070///
1071/// This is primarily used by `cargo rustc --crate-type`.
1072fn override_rustc_crate_types(
1073    units: &mut [Unit],
1074    args: &[String],
1075    interner: &UnitInterner,
1076) -> CargoResult<()> {
1077    if units.len() != 1 {
1078        anyhow::bail!(
1079            "crate types to rustc can only be passed to one \
1080            target, consider filtering\nthe package by passing, \
1081            e.g., `--lib` or `--example` to specify a single target"
1082        );
1083    }
1084
1085    let unit = &units[0];
1086    let override_unit = |f: fn(Vec<CrateType>) -> TargetKind| {
1087        let crate_types = args.iter().map(|s| s.into()).collect();
1088        let mut target = unit.target.clone();
1089        target.set_kind(f(crate_types));
1090        interner.intern(
1091            &unit.pkg,
1092            &target,
1093            unit.profile.clone(),
1094            unit.kind,
1095            unit.mode,
1096            unit.features.clone(),
1097            unit.rustflags.clone(),
1098            unit.rustdocflags.clone(),
1099            unit.links_overrides.clone(),
1100            unit.is_std,
1101            unit.dep_hash,
1102            unit.artifact,
1103            unit.artifact_target_for_features,
1104            unit.skip_non_compile_time_dep,
1105        )
1106    };
1107    units[0] = match unit.target.kind() {
1108        TargetKind::Lib(_) => override_unit(TargetKind::Lib),
1109        TargetKind::ExampleLib(_) => override_unit(TargetKind::ExampleLib),
1110        _ => {
1111            anyhow::bail!(
1112                "crate types can only be specified for libraries and example libraries.\n\
1113                Binaries, tests, and benchmarks are always the `bin` crate type"
1114            );
1115        }
1116    };
1117
1118    Ok(())
1119}
1120
1121/// Gets all of the features enabled for a package, plus its dependencies'
1122/// features.
1123///
1124/// Dependencies are added as `dep_name/feat_name` because `required-features`
1125/// wants to support that syntax.
1126pub fn resolve_all_features(
1127    resolve_with_overrides: &Resolve,
1128    resolved_features: &features::ResolvedFeatures,
1129    package_set: &PackageSet<'_>,
1130    package_id: PackageId,
1131) -> HashSet<String> {
1132    let mut features: HashSet<String> = resolved_features
1133        .activated_features(package_id, FeaturesFor::NormalOrDev)
1134        .iter()
1135        .map(|s| s.to_string())
1136        .collect();
1137
1138    // Include features enabled for use by dependencies so targets can also use them with the
1139    // required-features field when deciding whether to be built or skipped.
1140    for (dep_id, deps) in resolve_with_overrides.deps(package_id) {
1141        let is_proc_macro = package_set
1142            .get_one(dep_id)
1143            .expect("packages downloaded")
1144            .proc_macro();
1145        for dep in deps {
1146            let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build());
1147            for feature in resolved_features
1148                .activated_features_unverified(dep_id, features_for)
1149                .unwrap_or_default()
1150            {
1151                features.insert(format!("{}/{}", dep.name_in_toml(), feature));
1152            }
1153        }
1154    }
1155
1156    features
1157}