cargo/core/compiler/build_runner/
mod.rs

1//! [`BuildRunner`] is the mutable state used during the build process.
2
3use std::collections::{BTreeSet, HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::{self, Unit, artifact};
10use crate::util::cache_lock::CacheLockMode;
11use crate::util::errors::CargoResult;
12use annotate_snippets::{Level, Message};
13use anyhow::{Context as _, bail};
14use cargo_util::paths;
15use filetime::FileTime;
16use itertools::Itertools;
17use jobserver::Client;
18
19use super::build_plan::BuildPlan;
20use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
21use super::fingerprint::{Checksum, Fingerprint};
22use super::job_queue::JobQueue;
23use super::layout::Layout;
24use super::lto::Lto;
25use super::unit_graph::UnitDep;
26use super::{
27    BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint,
28};
29
30mod compilation_files;
31use self::compilation_files::CompilationFiles;
32pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
33
34/// Collection of all the stuff that is needed to perform a build.
35///
36/// Different from the [`BuildContext`], `Context` is a _mutable_ state used
37/// throughout the entire build process. Everything is coordinated through this.
38///
39/// [`BuildContext`]: crate::core::compiler::BuildContext
40pub struct BuildRunner<'a, 'gctx> {
41    /// Mostly static information about the build task.
42    pub bcx: &'a BuildContext<'a, 'gctx>,
43    /// A large collection of information about the result of the entire compilation.
44    pub compilation: Compilation<'gctx>,
45    /// Output from build scripts, updated after each build script runs.
46    pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
47    /// Dependencies (like rerun-if-changed) declared by a build script.
48    /// This is *only* populated from the output from previous runs.
49    /// If the build script hasn't ever been run, then it must be run.
50    pub build_explicit_deps: HashMap<Unit, BuildDeps>,
51    /// Fingerprints used to detect if a unit is out-of-date.
52    pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
53    /// Cache of file mtimes to reduce filesystem hits.
54    pub mtime_cache: HashMap<PathBuf, FileTime>,
55    /// Cache of file checksums to reduce filesystem reads.
56    pub checksum_cache: HashMap<PathBuf, Checksum>,
57    /// A set used to track which units have been compiled.
58    /// A unit may appear in the job graph multiple times as a dependency of
59    /// multiple packages, but it only needs to run once.
60    pub compiled: HashSet<Unit>,
61    /// Linking information for each `Unit`.
62    /// See `build_map` for details.
63    pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
64    /// Job server client to manage concurrency with other processes.
65    pub jobserver: Client,
66    /// "Primary" packages are the ones the user selected on the command-line
67    /// with `-p` flags. If no flags are specified, then it is the defaults
68    /// based on the current directory and the default workspace members.
69    primary_packages: HashSet<PackageId>,
70    /// An abstraction of the files and directories that will be generated by
71    /// the compilation. This is `None` until after `unit_dependencies` has
72    /// been computed.
73    files: Option<CompilationFiles<'a, 'gctx>>,
74
75    /// A set of units which are compiling rlibs and are expected to produce
76    /// metadata files in addition to the rlib itself.
77    rmeta_required: HashSet<Unit>,
78
79    /// Map of the LTO-status of each unit. This indicates what sort of
80    /// compilation is happening (only object, only bitcode, both, etc), and is
81    /// precalculated early on.
82    pub lto: HashMap<Unit, Lto>,
83
84    /// Map of Doc/Docscrape units to metadata for their -Cmetadata flag.
85    /// See `Context::find_metadata_units` for more details.
86    pub metadata_for_doc_units: HashMap<Unit, Metadata>,
87
88    /// Set of metadata of Docscrape units that fail before completion, e.g.
89    /// because the target has a type error. This is in an Arc<Mutex<..>>
90    /// because it is continuously updated as the job progresses.
91    pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
92}
93
94impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
95    pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
96        // Load up the jobserver that we'll use to manage our parallelism. This
97        // is the same as the GNU make implementation of a jobserver, and
98        // intentionally so! It's hoped that we can interact with GNU make and
99        // all share the same jobserver.
100        //
101        // Note that if we don't have a jobserver in our environment then we
102        // create our own, and we create it with `n` tokens, but immediately
103        // acquire one, because one token is ourself, a running process.
104        let jobserver = match bcx.gctx.jobserver_from_env() {
105            Some(c) => c.clone(),
106            None => {
107                let client =
108                    Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
109                client.acquire_raw()?;
110                client
111            }
112        };
113
114        Ok(Self {
115            bcx,
116            compilation: Compilation::new(bcx)?,
117            build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
118            fingerprints: HashMap::new(),
119            mtime_cache: HashMap::new(),
120            checksum_cache: HashMap::new(),
121            compiled: HashSet::new(),
122            build_scripts: HashMap::new(),
123            build_explicit_deps: HashMap::new(),
124            jobserver,
125            primary_packages: HashSet::new(),
126            files: None,
127            rmeta_required: HashSet::new(),
128            lto: HashMap::new(),
129            metadata_for_doc_units: HashMap::new(),
130            failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
131        })
132    }
133
134    /// Dry-run the compilation without actually running it.
135    ///
136    /// This is expected to collect information like the location of output artifacts.
137    /// Please keep in sync with non-compilation part in [`BuildRunner::compile`].
138    pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
139        let _lock = self
140            .bcx
141            .gctx
142            .acquire_package_cache_lock(CacheLockMode::Shared)?;
143        self.lto = super::lto::generate(self.bcx)?;
144        self.prepare_units()?;
145        self.prepare()?;
146        self.check_collisions()?;
147
148        for unit in &self.bcx.roots {
149            self.collect_tests_and_executables(unit)?;
150        }
151
152        Ok(self.compilation)
153    }
154
155    /// Starts compilation, waits for it to finish, and returns information
156    /// about the result of compilation.
157    ///
158    /// See [`ops::cargo_compile`] for a higher-level view of the compile process.
159    ///
160    /// [`ops::cargo_compile`]: crate::ops::cargo_compile
161    #[tracing::instrument(skip_all)]
162    pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
163        // A shared lock is held during the duration of the build since rustc
164        // needs to read from the `src` cache, and we don't want other
165        // commands modifying the `src` cache while it is running.
166        let _lock = self
167            .bcx
168            .gctx
169            .acquire_package_cache_lock(CacheLockMode::Shared)?;
170        let mut queue = JobQueue::new(self.bcx);
171        let mut plan = BuildPlan::new();
172        let build_plan = self.bcx.build_config.build_plan;
173        self.lto = super::lto::generate(self.bcx)?;
174        self.prepare_units()?;
175        self.prepare()?;
176        custom_build::build_map(&mut self)?;
177        self.check_collisions()?;
178        self.compute_metadata_for_doc_units();
179
180        // We need to make sure that if there were any previous docs
181        // already compiled, they were compiled with the same Rustc version that we're currently
182        // using. Otherwise we must remove the `doc/` folder and compile again forcing a rebuild.
183        //
184        // This is important because the `.js`/`.html` & `.css` files that are generated by Rustc don't have
185        // any versioning (See https://github.com/rust-lang/cargo/issues/8461).
186        // Therefore, we can end up with weird bugs and behaviours if we mix different
187        // versions of these files.
188        if self.bcx.build_config.intent.is_doc() {
189            RustDocFingerprint::check_rustdoc_fingerprint(&self)?
190        }
191
192        for unit in &self.bcx.roots {
193            let force_rebuild = self.bcx.build_config.force_rebuild;
194            super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
195        }
196
197        // Now that we've got the full job queue and we've done all our
198        // fingerprint analysis to determine what to run, bust all the memoized
199        // fingerprint hashes to ensure that during the build they all get the
200        // most up-to-date values. In theory we only need to bust hashes that
201        // transitively depend on a dirty build script, but it shouldn't matter
202        // that much for performance anyway.
203        for fingerprint in self.fingerprints.values() {
204            fingerprint.clear_memoized();
205        }
206
207        // Now that we've figured out everything that we're going to do, do it!
208        queue.execute(&mut self, &mut plan)?;
209
210        if build_plan {
211            plan.set_inputs(self.build_plan_inputs()?);
212            plan.output_plan(self.bcx.gctx);
213        }
214
215        // Add `OUT_DIR` to env vars if unit has a build script.
216        let units_with_build_script = &self
217            .bcx
218            .roots
219            .iter()
220            .filter(|unit| self.build_scripts.contains_key(unit))
221            .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
222            .collect::<Vec<_>>();
223        for unit in units_with_build_script {
224            for dep in &self.bcx.unit_graph[unit] {
225                if dep.unit.mode.is_run_custom_build() {
226                    let out_dir = self
227                        .files()
228                        .build_script_out_dir(&dep.unit)
229                        .display()
230                        .to_string();
231                    let script_meta = self.get_run_build_script_metadata(&dep.unit);
232                    self.compilation
233                        .extra_env
234                        .entry(script_meta)
235                        .or_insert_with(Vec::new)
236                        .push(("OUT_DIR".to_string(), out_dir));
237                }
238            }
239        }
240
241        // Collect the result of the build into `self.compilation`.
242        for unit in &self.bcx.roots {
243            self.collect_tests_and_executables(unit)?;
244
245            // Collect information for `rustdoc --test`.
246            if unit.mode.is_doc_test() {
247                let mut unstable_opts = false;
248                let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
249                args.extend(compiler::lto_args(&self, unit));
250                args.extend(compiler::features_args(unit));
251                args.extend(compiler::check_cfg_args(unit));
252
253                let script_metas = self.find_build_script_metadatas(unit);
254                if let Some(meta_vec) = script_metas.clone() {
255                    for meta in meta_vec {
256                        if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
257                            for cfg in &output.cfgs {
258                                args.push("--cfg".into());
259                                args.push(cfg.into());
260                            }
261
262                            for check_cfg in &output.check_cfgs {
263                                args.push("--check-cfg".into());
264                                args.push(check_cfg.into());
265                            }
266
267                            for (lt, arg) in &output.linker_args {
268                                if lt.applies_to(&unit.target, unit.mode) {
269                                    args.push("-C".into());
270                                    args.push(format!("link-arg={}", arg).into());
271                                }
272                            }
273                        }
274                    }
275                }
276                args.extend(unit.rustdocflags.iter().map(Into::into));
277
278                use super::MessageFormat;
279                let format = match self.bcx.build_config.message_format {
280                    MessageFormat::Short => "short",
281                    MessageFormat::Human => "human",
282                    MessageFormat::Json { .. } => "json",
283                };
284                args.push("--error-format".into());
285                args.push(format.into());
286
287                self.compilation.to_doc_test.push(compilation::Doctest {
288                    unit: unit.clone(),
289                    args,
290                    unstable_opts,
291                    linker: self.compilation.target_linker(unit.kind).clone(),
292                    script_metas,
293                    env: artifact::get_env(&self, self.unit_deps(unit))?,
294                });
295            }
296
297            super::output_depinfo(&mut self, unit)?;
298        }
299
300        for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
301            self.compilation
302                .extra_env
303                .entry(*script_meta)
304                .or_insert_with(Vec::new)
305                .extend(output.env.iter().cloned());
306
307            for dir in output.library_paths.iter() {
308                self.compilation
309                    .native_dirs
310                    .insert(dir.clone().into_path_buf());
311            }
312        }
313        Ok(self.compilation)
314    }
315
316    fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
317        for output in self.outputs(unit)?.iter() {
318            if matches!(
319                output.flavor,
320                FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
321            ) {
322                continue;
323            }
324
325            let bindst = output.bin_dst();
326
327            if unit.mode == CompileMode::Test {
328                self.compilation
329                    .tests
330                    .push(self.unit_output(unit, &output.path));
331            } else if unit.target.is_executable() {
332                self.compilation
333                    .binaries
334                    .push(self.unit_output(unit, bindst));
335            } else if unit.target.is_cdylib()
336                && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
337            {
338                self.compilation
339                    .cdylibs
340                    .push(self.unit_output(unit, bindst));
341            }
342        }
343        Ok(())
344    }
345
346    /// Returns the executable for the specified unit (if any).
347    pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
348        let is_binary = unit.target.is_executable();
349        let is_test = unit.mode.is_any_test();
350        if !unit.mode.generates_executable() || !(is_binary || is_test) {
351            return Ok(None);
352        }
353        Ok(self
354            .outputs(unit)?
355            .iter()
356            .find(|o| o.flavor == FileFlavor::Normal)
357            .map(|output| output.bin_dst().clone()))
358    }
359
360    #[tracing::instrument(skip_all)]
361    pub fn prepare_units(&mut self) -> CargoResult<()> {
362        let dest = self.bcx.profiles.get_dir_name();
363        let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
364        let mut targets = HashMap::new();
365        for kind in self.bcx.all_kinds.iter() {
366            if let CompileKind::Target(target) = *kind {
367                let layout = Layout::new(self.bcx.ws, Some(target), &dest)?;
368                targets.insert(target, layout);
369            }
370        }
371        self.primary_packages
372            .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
373        self.compilation
374            .root_crate_names
375            .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
376
377        self.record_units_requiring_metadata();
378
379        let files = CompilationFiles::new(self, host_layout, targets);
380        self.files = Some(files);
381        Ok(())
382    }
383
384    /// Prepare this context, ensuring that all filesystem directories are in
385    /// place.
386    #[tracing::instrument(skip_all)]
387    pub fn prepare(&mut self) -> CargoResult<()> {
388        self.files
389            .as_mut()
390            .unwrap()
391            .host
392            .prepare()
393            .context("couldn't prepare build directories")?;
394        for target in self.files.as_mut().unwrap().target.values_mut() {
395            target
396                .prepare()
397                .context("couldn't prepare build directories")?;
398        }
399
400        let files = self.files.as_ref().unwrap();
401        for &kind in self.bcx.all_kinds.iter() {
402            let layout = files.layout(kind);
403            self.compilation
404                .root_output
405                .insert(kind, layout.artifact_dir().dest().to_path_buf());
406            if self.bcx.gctx.cli_unstable().build_dir_new_layout {
407                for (unit, _) in self.bcx.unit_graph.iter() {
408                    let dep_dir = self.files().deps_dir(unit);
409                    paths::create_dir_all(&dep_dir)?;
410                    self.compilation.deps_output.insert(kind, dep_dir);
411                }
412            } else {
413                self.compilation
414                    .deps_output
415                    .insert(kind, layout.build_dir().legacy_deps().to_path_buf());
416            }
417        }
418        Ok(())
419    }
420
421    pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
422        self.files.as_ref().unwrap()
423    }
424
425    /// Returns the filenames that the given unit will generate.
426    pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
427        self.files.as_ref().unwrap().outputs(unit, self.bcx)
428    }
429
430    /// Direct dependencies for the given unit.
431    pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
432        &self.bcx.unit_graph[unit]
433    }
434
435    /// Returns the `RunCustomBuild` Units associated with the given Unit.
436    ///
437    /// If the package does not have a build script, this returns None.
438    pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
439        if unit.mode.is_run_custom_build() {
440            return Some(vec![unit.clone()]);
441        }
442
443        let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
444            .iter()
445            .filter(|unit_dep| {
446                unit_dep.unit.mode.is_run_custom_build()
447                    && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
448            })
449            .map(|unit_dep| unit_dep.unit.clone())
450            .collect();
451        if build_script_units.is_empty() {
452            None
453        } else {
454            Some(build_script_units)
455        }
456    }
457
458    /// Returns the metadata hash for the `RunCustomBuild` Unit associated with
459    /// the given unit.
460    ///
461    /// If the package does not have a build script, this returns None.
462    pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
463        self.find_build_script_units(unit).map(|units| {
464            units
465                .iter()
466                .map(|u| self.get_run_build_script_metadata(u))
467                .collect()
468        })
469    }
470
471    /// Returns the metadata hash for a `RunCustomBuild` unit.
472    pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
473        assert!(unit.mode.is_run_custom_build());
474        self.files().metadata(unit).unit_id()
475    }
476
477    /// Returns the list of SBOM output file paths for a given [`Unit`].
478    pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
479        Ok(self
480            .outputs(unit)?
481            .iter()
482            .filter(|o| o.flavor == FileFlavor::Sbom)
483            .map(|o| o.path.clone())
484            .collect())
485    }
486
487    pub fn is_primary_package(&self, unit: &Unit) -> bool {
488        self.primary_packages.contains(&unit.pkg.package_id())
489    }
490
491    /// Returns the list of filenames read by cargo to generate the [`BuildContext`]
492    /// (all `Cargo.toml`, etc.).
493    pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
494        // Keep sorted for consistency.
495        let mut inputs = BTreeSet::new();
496        // Note: dev-deps are skipped if they are not present in the unit graph.
497        for unit in self.bcx.unit_graph.keys() {
498            inputs.insert(unit.pkg.manifest_path().to_path_buf());
499        }
500        Ok(inputs.into_iter().collect())
501    }
502
503    /// Returns a [`UnitOutput`] which represents some information about the
504    /// output of a unit.
505    pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
506        let script_metas = self.find_build_script_metadatas(unit);
507        UnitOutput {
508            unit: unit.clone(),
509            path: path.to_path_buf(),
510            script_metas,
511        }
512    }
513
514    /// Check if any output file name collision happens.
515    /// See <https://github.com/rust-lang/cargo/issues/6313> for more.
516    #[tracing::instrument(skip_all)]
517    fn check_collisions(&self) -> CargoResult<()> {
518        let mut output_collisions = HashMap::new();
519        let describe_collision = |unit: &Unit, other_unit: &Unit| -> String {
520            format!(
521                "the {} target `{}` in package `{}` has the same output filename as the {} target `{}` in package `{}`",
522                unit.target.kind().description(),
523                unit.target.name(),
524                unit.pkg.package_id(),
525                other_unit.target.kind().description(),
526                other_unit.target.name(),
527                other_unit.pkg.package_id(),
528            )
529        };
530        let suggestion = [
531            Level::NOTE.message("this may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>"),
532            Level::HELP.message("consider changing their names to be unique or compiling them separately")
533        ];
534        let rustdoc_suggestion = [
535            Level::NOTE.message("this is a known bug where multiple crates with the same name use the same path; see <https://github.com/rust-lang/cargo/issues/6313>")
536        ];
537        let report_collision = |unit: &Unit,
538                                other_unit: &Unit,
539                                path: &PathBuf,
540                                messages: &[Message<'_>]|
541         -> CargoResult<()> {
542            if unit.target.name() == other_unit.target.name() {
543                self.bcx.gctx.shell().print_report(
544                    &[Level::WARNING
545                        .secondary_title(format!("output filename collision at {}", path.display()))
546                        .elements(
547                            [Level::NOTE.message(describe_collision(unit, other_unit))]
548                                .into_iter()
549                                .chain(messages.iter().cloned()),
550                        )],
551                    false,
552                )
553            } else {
554                self.bcx.gctx.shell().print_report(
555                    &[Level::WARNING
556                        .secondary_title(format!("output filename collision at {}", path.display()))
557                        .elements([
558                            Level::NOTE.message(describe_collision(unit, other_unit)),
559                            Level::NOTE.message("if this looks unexpected, it may be a bug in Cargo. Please file a bug \
560                                report at https://github.com/rust-lang/cargo/issues/ with as much information as you \
561                                can provide."),
562                            Level::NOTE.message(format!("cargo {} running on `{}` target `{}`",
563                                crate::version(), self.bcx.host_triple(), self.bcx.target_data.short_name(&unit.kind))),
564                            Level::NOTE.message(format!("first unit: {unit:?}")),
565                            Level::NOTE.message(format!("second unit: {other_unit:?}")),
566                        ])],
567                    false,
568                )
569            }
570        };
571
572        fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
573            bail!(
574                "document output filename collision\n\
575                 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
576                 Only one may be documented at once since they output to the same path.\n\
577                 Consider documenting only one, renaming one, \
578                 or marking one with `doc = false` in Cargo.toml.",
579                unit.target.kind().description(),
580                unit.target.name(),
581                unit.pkg,
582                other_unit.target.kind().description(),
583                other_unit.target.name(),
584                other_unit.pkg,
585            );
586        }
587
588        let mut keys = self
589            .bcx
590            .unit_graph
591            .keys()
592            .filter(|unit| !unit.mode.is_run_custom_build())
593            .collect::<Vec<_>>();
594        // Sort for consistent error messages.
595        keys.sort_unstable();
596        // These are kept separate to retain compatibility with older
597        // versions, which generated an error when there was a duplicate lib
598        // or bin (but the old code did not check bin<->lib collisions). To
599        // retain backwards compatibility, this only generates an error for
600        // duplicate libs or duplicate bins (but not both). Ideally this
601        // shouldn't be here, but since there isn't a complete workaround,
602        // yet, this retains the old behavior.
603        let mut doc_libs = HashMap::new();
604        let mut doc_bins = HashMap::new();
605        for unit in keys {
606            if unit.mode.is_doc() && self.is_primary_package(unit) {
607                // These situations have been an error since before 1.0, so it
608                // is not a warning like the other situations.
609                if unit.target.is_lib() {
610                    if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
611                    {
612                        doc_collision_error(unit, prev)?;
613                    }
614                } else if let Some(prev) =
615                    doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
616                {
617                    doc_collision_error(unit, prev)?;
618                }
619            }
620            for output in self.outputs(unit)?.iter() {
621                if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
622                    if unit.mode.is_doc() {
623                        // See https://github.com/rust-lang/rust/issues/56169
624                        // and https://github.com/rust-lang/rust/issues/61378
625                        report_collision(unit, other_unit, &output.path, &rustdoc_suggestion)?;
626                    } else {
627                        report_collision(unit, other_unit, &output.path, &suggestion)?;
628                    }
629                }
630                if let Some(hardlink) = output.hardlink.as_ref() {
631                    if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
632                        report_collision(unit, other_unit, hardlink, &suggestion)?;
633                    }
634                }
635                if let Some(ref export_path) = output.export_path {
636                    if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
637                        self.bcx.gctx.shell().print_report(
638                            &[Level::WARNING
639                                .secondary_title(format!(
640                                    "`--artifact-dir` filename collision at {}",
641                                    export_path.display()
642                                ))
643                                .elements(
644                                    [Level::NOTE.message(describe_collision(unit, other_unit))]
645                                        .into_iter()
646                                        .chain(suggestion.iter().cloned()),
647                                )],
648                            false,
649                        )?;
650                    }
651                }
652            }
653        }
654        Ok(())
655    }
656
657    /// Records the list of units which are required to emit metadata.
658    ///
659    /// Units which depend only on the metadata of others requires the others to
660    /// actually produce metadata, so we'll record that here.
661    fn record_units_requiring_metadata(&mut self) {
662        for (key, deps) in self.bcx.unit_graph.iter() {
663            for dep in deps {
664                if self.only_requires_rmeta(key, &dep.unit) {
665                    self.rmeta_required.insert(dep.unit.clone());
666                }
667            }
668        }
669    }
670
671    /// Returns whether when `parent` depends on `dep` if it only requires the
672    /// metadata file from `dep`.
673    pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
674        // We're only a candidate for requiring an `rmeta` file if we
675        // ourselves are building an rlib,
676        !parent.requires_upstream_objects()
677            && parent.mode == CompileMode::Build
678            // Our dependency must also be built as an rlib, otherwise the
679            // object code must be useful in some fashion
680            && !dep.requires_upstream_objects()
681            && dep.mode == CompileMode::Build
682    }
683
684    /// Returns whether when `unit` is built whether it should emit metadata as
685    /// well because some compilations rely on that.
686    pub fn rmeta_required(&self, unit: &Unit) -> bool {
687        self.rmeta_required.contains(unit)
688    }
689
690    /// Finds metadata for Doc/Docscrape units.
691    ///
692    /// rustdoc needs a -Cmetadata flag in order to recognize StableCrateIds that refer to
693    /// items in the crate being documented. The -Cmetadata flag used by reverse-dependencies
694    /// will be the metadata of the Cargo unit that generated the current library's rmeta file,
695    /// which should be a Check unit.
696    ///
697    /// If the current crate has reverse-dependencies, such a Check unit should exist, and so
698    /// we use that crate's metadata. If not, we use the crate's Doc unit so at least examples
699    /// scraped from the current crate can be used when documenting the current crate.
700    #[tracing::instrument(skip_all)]
701    pub fn compute_metadata_for_doc_units(&mut self) {
702        for unit in self.bcx.unit_graph.keys() {
703            if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
704                continue;
705            }
706
707            let matching_units = self
708                .bcx
709                .unit_graph
710                .keys()
711                .filter(|other| {
712                    unit.pkg == other.pkg
713                        && unit.target == other.target
714                        && !other.mode.is_doc_scrape()
715                })
716                .collect::<Vec<_>>();
717            let metadata_unit = matching_units
718                .iter()
719                .find(|other| other.mode.is_check())
720                .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
721                .unwrap_or(&unit);
722            self.metadata_for_doc_units
723                .insert(unit.clone(), self.files().metadata(metadata_unit));
724        }
725    }
726}