cargo/core/compiler/build_runner/
mod.rs

1//! [`BuildRunner`] is the mutable state used during the build process.
2
3use std::collections::{BTreeSet, HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::{self, Unit, artifact};
10use crate::util::cache_lock::CacheLockMode;
11use crate::util::errors::CargoResult;
12use anyhow::{Context as _, bail};
13use cargo_util::paths;
14use filetime::FileTime;
15use itertools::Itertools;
16use jobserver::Client;
17
18use super::build_plan::BuildPlan;
19use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
20use super::fingerprint::{Checksum, Fingerprint};
21use super::job_queue::JobQueue;
22use super::layout::Layout;
23use super::lto::Lto;
24use super::unit_graph::UnitDep;
25use super::{
26    BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint,
27};
28
29mod compilation_files;
30use self::compilation_files::CompilationFiles;
31pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
32
33/// Collection of all the stuff that is needed to perform a build.
34///
35/// Different from the [`BuildContext`], `Context` is a _mutable_ state used
36/// throughout the entire build process. Everything is coordinated through this.
37///
38/// [`BuildContext`]: crate::core::compiler::BuildContext
39pub struct BuildRunner<'a, 'gctx> {
40    /// Mostly static information about the build task.
41    pub bcx: &'a BuildContext<'a, 'gctx>,
42    /// A large collection of information about the result of the entire compilation.
43    pub compilation: Compilation<'gctx>,
44    /// Output from build scripts, updated after each build script runs.
45    pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
46    /// Dependencies (like rerun-if-changed) declared by a build script.
47    /// This is *only* populated from the output from previous runs.
48    /// If the build script hasn't ever been run, then it must be run.
49    pub build_explicit_deps: HashMap<Unit, BuildDeps>,
50    /// Fingerprints used to detect if a unit is out-of-date.
51    pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
52    /// Cache of file mtimes to reduce filesystem hits.
53    pub mtime_cache: HashMap<PathBuf, FileTime>,
54    /// Cache of file checksums to reduce filesystem reads.
55    pub checksum_cache: HashMap<PathBuf, Checksum>,
56    /// A set used to track which units have been compiled.
57    /// A unit may appear in the job graph multiple times as a dependency of
58    /// multiple packages, but it only needs to run once.
59    pub compiled: HashSet<Unit>,
60    /// Linking information for each `Unit`.
61    /// See `build_map` for details.
62    pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
63    /// Job server client to manage concurrency with other processes.
64    pub jobserver: Client,
65    /// "Primary" packages are the ones the user selected on the command-line
66    /// with `-p` flags. If no flags are specified, then it is the defaults
67    /// based on the current directory and the default workspace members.
68    primary_packages: HashSet<PackageId>,
69    /// An abstraction of the files and directories that will be generated by
70    /// the compilation. This is `None` until after `unit_dependencies` has
71    /// been computed.
72    files: Option<CompilationFiles<'a, 'gctx>>,
73
74    /// A set of units which are compiling rlibs and are expected to produce
75    /// metadata files in addition to the rlib itself.
76    rmeta_required: HashSet<Unit>,
77
78    /// Map of the LTO-status of each unit. This indicates what sort of
79    /// compilation is happening (only object, only bitcode, both, etc), and is
80    /// precalculated early on.
81    pub lto: HashMap<Unit, Lto>,
82
83    /// Map of Doc/Docscrape units to metadata for their -Cmetadata flag.
84    /// See `Context::find_metadata_units` for more details.
85    pub metadata_for_doc_units: HashMap<Unit, Metadata>,
86
87    /// Set of metadata of Docscrape units that fail before completion, e.g.
88    /// because the target has a type error. This is in an Arc<Mutex<..>>
89    /// because it is continuously updated as the job progresses.
90    pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
91}
92
93impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
94    pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
95        // Load up the jobserver that we'll use to manage our parallelism. This
96        // is the same as the GNU make implementation of a jobserver, and
97        // intentionally so! It's hoped that we can interact with GNU make and
98        // all share the same jobserver.
99        //
100        // Note that if we don't have a jobserver in our environment then we
101        // create our own, and we create it with `n` tokens, but immediately
102        // acquire one, because one token is ourself, a running process.
103        let jobserver = match bcx.gctx.jobserver_from_env() {
104            Some(c) => c.clone(),
105            None => {
106                let client =
107                    Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
108                client.acquire_raw()?;
109                client
110            }
111        };
112
113        Ok(Self {
114            bcx,
115            compilation: Compilation::new(bcx)?,
116            build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
117            fingerprints: HashMap::new(),
118            mtime_cache: HashMap::new(),
119            checksum_cache: HashMap::new(),
120            compiled: HashSet::new(),
121            build_scripts: HashMap::new(),
122            build_explicit_deps: HashMap::new(),
123            jobserver,
124            primary_packages: HashSet::new(),
125            files: None,
126            rmeta_required: HashSet::new(),
127            lto: HashMap::new(),
128            metadata_for_doc_units: HashMap::new(),
129            failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
130        })
131    }
132
133    /// Dry-run the compilation without actually running it.
134    ///
135    /// This is expected to collect information like the location of output artifacts.
136    /// Please keep in sync with non-compilation part in [`BuildRunner::compile`].
137    pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
138        let _lock = self
139            .bcx
140            .gctx
141            .acquire_package_cache_lock(CacheLockMode::Shared)?;
142        self.lto = super::lto::generate(self.bcx)?;
143        self.prepare_units()?;
144        self.prepare()?;
145        self.check_collisions()?;
146
147        for unit in &self.bcx.roots {
148            self.collect_tests_and_executables(unit)?;
149        }
150
151        Ok(self.compilation)
152    }
153
154    /// Starts compilation, waits for it to finish, and returns information
155    /// about the result of compilation.
156    ///
157    /// See [`ops::cargo_compile`] for a higher-level view of the compile process.
158    ///
159    /// [`ops::cargo_compile`]: crate::ops::cargo_compile
160    #[tracing::instrument(skip_all)]
161    pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
162        // A shared lock is held during the duration of the build since rustc
163        // needs to read from the `src` cache, and we don't want other
164        // commands modifying the `src` cache while it is running.
165        let _lock = self
166            .bcx
167            .gctx
168            .acquire_package_cache_lock(CacheLockMode::Shared)?;
169        let mut queue = JobQueue::new(self.bcx);
170        let mut plan = BuildPlan::new();
171        let build_plan = self.bcx.build_config.build_plan;
172        self.lto = super::lto::generate(self.bcx)?;
173        self.prepare_units()?;
174        self.prepare()?;
175        custom_build::build_map(&mut self)?;
176        self.check_collisions()?;
177        self.compute_metadata_for_doc_units();
178
179        // We need to make sure that if there were any previous docs
180        // already compiled, they were compiled with the same Rustc version that we're currently
181        // using. Otherwise we must remove the `doc/` folder and compile again forcing a rebuild.
182        //
183        // This is important because the `.js`/`.html` & `.css` files that are generated by Rustc don't have
184        // any versioning (See https://github.com/rust-lang/cargo/issues/8461).
185        // Therefore, we can end up with weird bugs and behaviours if we mix different
186        // versions of these files.
187        if self.bcx.build_config.intent.is_doc() {
188            RustDocFingerprint::check_rustdoc_fingerprint(&self)?
189        }
190
191        for unit in &self.bcx.roots {
192            let force_rebuild = self.bcx.build_config.force_rebuild;
193            super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
194        }
195
196        // Now that we've got the full job queue and we've done all our
197        // fingerprint analysis to determine what to run, bust all the memoized
198        // fingerprint hashes to ensure that during the build they all get the
199        // most up-to-date values. In theory we only need to bust hashes that
200        // transitively depend on a dirty build script, but it shouldn't matter
201        // that much for performance anyway.
202        for fingerprint in self.fingerprints.values() {
203            fingerprint.clear_memoized();
204        }
205
206        // Now that we've figured out everything that we're going to do, do it!
207        queue.execute(&mut self, &mut plan)?;
208
209        if build_plan {
210            plan.set_inputs(self.build_plan_inputs()?);
211            plan.output_plan(self.bcx.gctx);
212        }
213
214        // Add `OUT_DIR` to env vars if unit has a build script.
215        let units_with_build_script = &self
216            .bcx
217            .roots
218            .iter()
219            .filter(|unit| self.build_scripts.contains_key(unit))
220            .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
221            .collect::<Vec<_>>();
222        for unit in units_with_build_script {
223            for dep in &self.bcx.unit_graph[unit] {
224                if dep.unit.mode.is_run_custom_build() {
225                    let out_dir = self
226                        .files()
227                        .build_script_out_dir(&dep.unit)
228                        .display()
229                        .to_string();
230                    let script_meta = self.get_run_build_script_metadata(&dep.unit);
231                    self.compilation
232                        .extra_env
233                        .entry(script_meta)
234                        .or_insert_with(Vec::new)
235                        .push(("OUT_DIR".to_string(), out_dir));
236                }
237            }
238        }
239
240        // Collect the result of the build into `self.compilation`.
241        for unit in &self.bcx.roots {
242            self.collect_tests_and_executables(unit)?;
243
244            // Collect information for `rustdoc --test`.
245            if unit.mode.is_doc_test() {
246                let mut unstable_opts = false;
247                let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
248                args.extend(compiler::lto_args(&self, unit));
249                args.extend(compiler::features_args(unit));
250                args.extend(compiler::check_cfg_args(unit));
251
252                let script_metas = self.find_build_script_metadatas(unit);
253                if let Some(meta_vec) = script_metas.clone() {
254                    for meta in meta_vec {
255                        if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
256                            for cfg in &output.cfgs {
257                                args.push("--cfg".into());
258                                args.push(cfg.into());
259                            }
260
261                            for check_cfg in &output.check_cfgs {
262                                args.push("--check-cfg".into());
263                                args.push(check_cfg.into());
264                            }
265
266                            for (lt, arg) in &output.linker_args {
267                                if lt.applies_to(&unit.target, unit.mode) {
268                                    args.push("-C".into());
269                                    args.push(format!("link-arg={}", arg).into());
270                                }
271                            }
272                        }
273                    }
274                }
275                args.extend(unit.rustdocflags.iter().map(Into::into));
276
277                use super::MessageFormat;
278                let format = match self.bcx.build_config.message_format {
279                    MessageFormat::Short => "short",
280                    MessageFormat::Human => "human",
281                    MessageFormat::Json { .. } => "json",
282                };
283                args.push("--error-format".into());
284                args.push(format.into());
285
286                self.compilation.to_doc_test.push(compilation::Doctest {
287                    unit: unit.clone(),
288                    args,
289                    unstable_opts,
290                    linker: self.compilation.target_linker(unit.kind).clone(),
291                    script_metas,
292                    env: artifact::get_env(&self, self.unit_deps(unit))?,
293                });
294            }
295
296            super::output_depinfo(&mut self, unit)?;
297        }
298
299        for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
300            self.compilation
301                .extra_env
302                .entry(*script_meta)
303                .or_insert_with(Vec::new)
304                .extend(output.env.iter().cloned());
305
306            for dir in output.library_paths.iter() {
307                self.compilation
308                    .native_dirs
309                    .insert(dir.clone().into_path_buf());
310            }
311        }
312        Ok(self.compilation)
313    }
314
315    fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
316        for output in self.outputs(unit)?.iter() {
317            if matches!(
318                output.flavor,
319                FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
320            ) {
321                continue;
322            }
323
324            let bindst = output.bin_dst();
325
326            if unit.mode == CompileMode::Test {
327                self.compilation
328                    .tests
329                    .push(self.unit_output(unit, &output.path));
330            } else if unit.target.is_executable() {
331                self.compilation
332                    .binaries
333                    .push(self.unit_output(unit, bindst));
334            } else if unit.target.is_cdylib()
335                && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
336            {
337                self.compilation
338                    .cdylibs
339                    .push(self.unit_output(unit, bindst));
340            }
341        }
342        Ok(())
343    }
344
345    /// Returns the executable for the specified unit (if any).
346    pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
347        let is_binary = unit.target.is_executable();
348        let is_test = unit.mode.is_any_test();
349        if !unit.mode.generates_executable() || !(is_binary || is_test) {
350            return Ok(None);
351        }
352        Ok(self
353            .outputs(unit)?
354            .iter()
355            .find(|o| o.flavor == FileFlavor::Normal)
356            .map(|output| output.bin_dst().clone()))
357    }
358
359    #[tracing::instrument(skip_all)]
360    pub fn prepare_units(&mut self) -> CargoResult<()> {
361        let dest = self.bcx.profiles.get_dir_name();
362        let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
363        let mut targets = HashMap::new();
364        for kind in self.bcx.all_kinds.iter() {
365            if let CompileKind::Target(target) = *kind {
366                let layout = Layout::new(self.bcx.ws, Some(target), &dest)?;
367                targets.insert(target, layout);
368            }
369        }
370        self.primary_packages
371            .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
372        self.compilation
373            .root_crate_names
374            .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
375
376        self.record_units_requiring_metadata();
377
378        let files = CompilationFiles::new(self, host_layout, targets);
379        self.files = Some(files);
380        Ok(())
381    }
382
383    /// Prepare this context, ensuring that all filesystem directories are in
384    /// place.
385    #[tracing::instrument(skip_all)]
386    pub fn prepare(&mut self) -> CargoResult<()> {
387        self.files
388            .as_mut()
389            .unwrap()
390            .host
391            .prepare()
392            .context("couldn't prepare build directories")?;
393        for target in self.files.as_mut().unwrap().target.values_mut() {
394            target
395                .prepare()
396                .context("couldn't prepare build directories")?;
397        }
398
399        let files = self.files.as_ref().unwrap();
400        for &kind in self.bcx.all_kinds.iter() {
401            let layout = files.layout(kind);
402            self.compilation
403                .root_output
404                .insert(kind, layout.artifact_dir().dest().to_path_buf());
405            if self.bcx.gctx.cli_unstable().build_dir_new_layout {
406                for (unit, _) in self.bcx.unit_graph.iter() {
407                    let dep_dir = self.files().deps_dir(unit);
408                    paths::create_dir_all(&dep_dir)?;
409                    self.compilation.deps_output.insert(kind, dep_dir);
410                }
411            } else {
412                self.compilation
413                    .deps_output
414                    .insert(kind, layout.build_dir().legacy_deps().to_path_buf());
415            }
416        }
417        Ok(())
418    }
419
420    pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
421        self.files.as_ref().unwrap()
422    }
423
424    /// Returns the filenames that the given unit will generate.
425    pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
426        self.files.as_ref().unwrap().outputs(unit, self.bcx)
427    }
428
429    /// Direct dependencies for the given unit.
430    pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
431        &self.bcx.unit_graph[unit]
432    }
433
434    /// Returns the `RunCustomBuild` Units associated with the given Unit.
435    ///
436    /// If the package does not have a build script, this returns None.
437    pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
438        if unit.mode.is_run_custom_build() {
439            return Some(vec![unit.clone()]);
440        }
441
442        let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
443            .iter()
444            .filter(|unit_dep| {
445                unit_dep.unit.mode.is_run_custom_build()
446                    && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
447            })
448            .map(|unit_dep| unit_dep.unit.clone())
449            .collect();
450        if build_script_units.is_empty() {
451            None
452        } else {
453            Some(build_script_units)
454        }
455    }
456
457    /// Returns the metadata hash for the `RunCustomBuild` Unit associated with
458    /// the given unit.
459    ///
460    /// If the package does not have a build script, this returns None.
461    pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
462        self.find_build_script_units(unit).map(|units| {
463            units
464                .iter()
465                .map(|u| self.get_run_build_script_metadata(u))
466                .collect()
467        })
468    }
469
470    /// Returns the metadata hash for a `RunCustomBuild` unit.
471    pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
472        assert!(unit.mode.is_run_custom_build());
473        self.files().metadata(unit).unit_id()
474    }
475
476    /// Returns the list of SBOM output file paths for a given [`Unit`].
477    pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
478        Ok(self
479            .outputs(unit)?
480            .iter()
481            .filter(|o| o.flavor == FileFlavor::Sbom)
482            .map(|o| o.path.clone())
483            .collect())
484    }
485
486    pub fn is_primary_package(&self, unit: &Unit) -> bool {
487        self.primary_packages.contains(&unit.pkg.package_id())
488    }
489
490    /// Returns the list of filenames read by cargo to generate the [`BuildContext`]
491    /// (all `Cargo.toml`, etc.).
492    pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
493        // Keep sorted for consistency.
494        let mut inputs = BTreeSet::new();
495        // Note: dev-deps are skipped if they are not present in the unit graph.
496        for unit in self.bcx.unit_graph.keys() {
497            inputs.insert(unit.pkg.manifest_path().to_path_buf());
498        }
499        Ok(inputs.into_iter().collect())
500    }
501
502    /// Returns a [`UnitOutput`] which represents some information about the
503    /// output of a unit.
504    pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
505        let script_metas = self.find_build_script_metadatas(unit);
506        UnitOutput {
507            unit: unit.clone(),
508            path: path.to_path_buf(),
509            script_metas,
510        }
511    }
512
513    /// Check if any output file name collision happens.
514    /// See <https://github.com/rust-lang/cargo/issues/6313> for more.
515    #[tracing::instrument(skip_all)]
516    fn check_collisions(&self) -> CargoResult<()> {
517        let mut output_collisions = HashMap::new();
518        let describe_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> String {
519            format!(
520                "The {} target `{}` in package `{}` has the same output \
521                     filename as the {} target `{}` in package `{}`.\n\
522                     Colliding filename is: {}\n",
523                unit.target.kind().description(),
524                unit.target.name(),
525                unit.pkg.package_id(),
526                other_unit.target.kind().description(),
527                other_unit.target.name(),
528                other_unit.pkg.package_id(),
529                path.display()
530            )
531        };
532        let suggestion = "Consider changing their names to be unique or compiling them separately.\n\
533             This may become a hard error in the future; see \
534             <https://github.com/rust-lang/cargo/issues/6313>.";
535        let rustdoc_suggestion = "This is a known bug where multiple crates with the same name use\n\
536             the same path; see <https://github.com/rust-lang/cargo/issues/6313>.";
537        let report_collision = |unit: &Unit,
538                                other_unit: &Unit,
539                                path: &PathBuf,
540                                suggestion: &str|
541         -> CargoResult<()> {
542            if unit.target.name() == other_unit.target.name() {
543                self.bcx.gctx.shell().warn(format!(
544                    "output filename collision.\n\
545                     {}\
546                     The targets should have unique names.\n\
547                     {}",
548                    describe_collision(unit, other_unit, path),
549                    suggestion
550                ))
551            } else {
552                self.bcx.gctx.shell().warn(format!(
553                    "output filename collision.\n\
554                    {}\
555                    The output filenames should be unique.\n\
556                    {}\n\
557                    If this looks unexpected, it may be a bug in Cargo. Please file a bug report at\n\
558                    https://github.com/rust-lang/cargo/issues/ with as much information as you\n\
559                    can provide.\n\
560                    cargo {} running on `{}` target `{}`\n\
561                    First unit: {:?}\n\
562                    Second unit: {:?}",
563                    describe_collision(unit, other_unit, path),
564                    suggestion,
565                    crate::version(),
566                    self.bcx.host_triple(),
567                    self.bcx.target_data.short_name(&unit.kind),
568                    unit,
569                    other_unit))
570            }
571        };
572
573        fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
574            bail!(
575                "document output filename collision\n\
576                 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
577                 Only one may be documented at once since they output to the same path.\n\
578                 Consider documenting only one, renaming one, \
579                 or marking one with `doc = false` in Cargo.toml.",
580                unit.target.kind().description(),
581                unit.target.name(),
582                unit.pkg,
583                other_unit.target.kind().description(),
584                other_unit.target.name(),
585                other_unit.pkg,
586            );
587        }
588
589        let mut keys = self
590            .bcx
591            .unit_graph
592            .keys()
593            .filter(|unit| !unit.mode.is_run_custom_build())
594            .collect::<Vec<_>>();
595        // Sort for consistent error messages.
596        keys.sort_unstable();
597        // These are kept separate to retain compatibility with older
598        // versions, which generated an error when there was a duplicate lib
599        // or bin (but the old code did not check bin<->lib collisions). To
600        // retain backwards compatibility, this only generates an error for
601        // duplicate libs or duplicate bins (but not both). Ideally this
602        // shouldn't be here, but since there isn't a complete workaround,
603        // yet, this retains the old behavior.
604        let mut doc_libs = HashMap::new();
605        let mut doc_bins = HashMap::new();
606        for unit in keys {
607            if unit.mode.is_doc() && self.is_primary_package(unit) {
608                // These situations have been an error since before 1.0, so it
609                // is not a warning like the other situations.
610                if unit.target.is_lib() {
611                    if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
612                    {
613                        doc_collision_error(unit, prev)?;
614                    }
615                } else if let Some(prev) =
616                    doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
617                {
618                    doc_collision_error(unit, prev)?;
619                }
620            }
621            for output in self.outputs(unit)?.iter() {
622                if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
623                    if unit.mode.is_doc() {
624                        // See https://github.com/rust-lang/rust/issues/56169
625                        // and https://github.com/rust-lang/rust/issues/61378
626                        report_collision(unit, other_unit, &output.path, rustdoc_suggestion)?;
627                    } else {
628                        report_collision(unit, other_unit, &output.path, suggestion)?;
629                    }
630                }
631                if let Some(hardlink) = output.hardlink.as_ref() {
632                    if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
633                        report_collision(unit, other_unit, hardlink, suggestion)?;
634                    }
635                }
636                if let Some(ref export_path) = output.export_path {
637                    if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
638                        self.bcx.gctx.shell().warn(format!(
639                            "`--artifact-dir` filename collision.\n\
640                             {}\
641                             The exported filenames should be unique.\n\
642                             {}",
643                            describe_collision(unit, other_unit, export_path),
644                            suggestion
645                        ))?;
646                    }
647                }
648            }
649        }
650        Ok(())
651    }
652
653    /// Records the list of units which are required to emit metadata.
654    ///
655    /// Units which depend only on the metadata of others requires the others to
656    /// actually produce metadata, so we'll record that here.
657    fn record_units_requiring_metadata(&mut self) {
658        for (key, deps) in self.bcx.unit_graph.iter() {
659            for dep in deps {
660                if self.only_requires_rmeta(key, &dep.unit) {
661                    self.rmeta_required.insert(dep.unit.clone());
662                }
663            }
664        }
665    }
666
667    /// Returns whether when `parent` depends on `dep` if it only requires the
668    /// metadata file from `dep`.
669    pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
670        // We're only a candidate for requiring an `rmeta` file if we
671        // ourselves are building an rlib,
672        !parent.requires_upstream_objects()
673            && parent.mode == CompileMode::Build
674            // Our dependency must also be built as an rlib, otherwise the
675            // object code must be useful in some fashion
676            && !dep.requires_upstream_objects()
677            && dep.mode == CompileMode::Build
678    }
679
680    /// Returns whether when `unit` is built whether it should emit metadata as
681    /// well because some compilations rely on that.
682    pub fn rmeta_required(&self, unit: &Unit) -> bool {
683        self.rmeta_required.contains(unit)
684    }
685
686    /// Finds metadata for Doc/Docscrape units.
687    ///
688    /// rustdoc needs a -Cmetadata flag in order to recognize StableCrateIds that refer to
689    /// items in the crate being documented. The -Cmetadata flag used by reverse-dependencies
690    /// will be the metadata of the Cargo unit that generated the current library's rmeta file,
691    /// which should be a Check unit.
692    ///
693    /// If the current crate has reverse-dependencies, such a Check unit should exist, and so
694    /// we use that crate's metadata. If not, we use the crate's Doc unit so at least examples
695    /// scraped from the current crate can be used when documenting the current crate.
696    #[tracing::instrument(skip_all)]
697    pub fn compute_metadata_for_doc_units(&mut self) {
698        for unit in self.bcx.unit_graph.keys() {
699            if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
700                continue;
701            }
702
703            let matching_units = self
704                .bcx
705                .unit_graph
706                .keys()
707                .filter(|other| {
708                    unit.pkg == other.pkg
709                        && unit.target == other.target
710                        && !other.mode.is_doc_scrape()
711                })
712                .collect::<Vec<_>>();
713            let metadata_unit = matching_units
714                .iter()
715                .find(|other| other.mode.is_check())
716                .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
717                .unwrap_or(&unit);
718            self.metadata_for_doc_units
719                .insert(unit.clone(), self.files().metadata(metadata_unit));
720        }
721    }
722}