cargo/core/compiler/build_runner/
mod.rs

1//! [`BuildRunner`] is the mutable state used during the build process.
2
3use std::collections::{HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::{self, Unit, UserIntent, artifact};
10use crate::util::cache_lock::CacheLockMode;
11use crate::util::errors::CargoResult;
12use annotate_snippets::{Level, Message};
13use anyhow::{Context as _, bail};
14use cargo_util::paths;
15use filetime::FileTime;
16use itertools::Itertools;
17use jobserver::Client;
18
19use super::RustdocFingerprint;
20use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
21use super::fingerprint::{Checksum, Fingerprint};
22use super::job_queue::JobQueue;
23use super::layout::Layout;
24use super::lto::Lto;
25use super::unit_graph::UnitDep;
26use super::{BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor};
27
28mod compilation_files;
29use self::compilation_files::CompilationFiles;
30pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
31
32/// Collection of all the stuff that is needed to perform a build.
33///
34/// Different from the [`BuildContext`], `Context` is a _mutable_ state used
35/// throughout the entire build process. Everything is coordinated through this.
36///
37/// [`BuildContext`]: crate::core::compiler::BuildContext
38pub struct BuildRunner<'a, 'gctx> {
39    /// Mostly static information about the build task.
40    pub bcx: &'a BuildContext<'a, 'gctx>,
41    /// A large collection of information about the result of the entire compilation.
42    pub compilation: Compilation<'gctx>,
43    /// Output from build scripts, updated after each build script runs.
44    pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
45    /// Dependencies (like rerun-if-changed) declared by a build script.
46    /// This is *only* populated from the output from previous runs.
47    /// If the build script hasn't ever been run, then it must be run.
48    pub build_explicit_deps: HashMap<Unit, BuildDeps>,
49    /// Fingerprints used to detect if a unit is out-of-date.
50    pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
51    /// Cache of file mtimes to reduce filesystem hits.
52    pub mtime_cache: HashMap<PathBuf, FileTime>,
53    /// Cache of file checksums to reduce filesystem reads.
54    pub checksum_cache: HashMap<PathBuf, Checksum>,
55    /// A set used to track which units have been compiled.
56    /// A unit may appear in the job graph multiple times as a dependency of
57    /// multiple packages, but it only needs to run once.
58    pub compiled: HashSet<Unit>,
59    /// Linking information for each `Unit`.
60    /// See `build_map` for details.
61    pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
62    /// Job server client to manage concurrency with other processes.
63    pub jobserver: Client,
64    /// "Primary" packages are the ones the user selected on the command-line
65    /// with `-p` flags. If no flags are specified, then it is the defaults
66    /// based on the current directory and the default workspace members.
67    primary_packages: HashSet<PackageId>,
68    /// An abstraction of the files and directories that will be generated by
69    /// the compilation. This is `None` until after `unit_dependencies` has
70    /// been computed.
71    files: Option<CompilationFiles<'a, 'gctx>>,
72
73    /// A set of units which are compiling rlibs and are expected to produce
74    /// metadata files in addition to the rlib itself.
75    rmeta_required: HashSet<Unit>,
76
77    /// Map of the LTO-status of each unit. This indicates what sort of
78    /// compilation is happening (only object, only bitcode, both, etc), and is
79    /// precalculated early on.
80    pub lto: HashMap<Unit, Lto>,
81
82    /// Map of Doc/Docscrape units to metadata for their -Cmetadata flag.
83    /// See `Context::find_metadata_units` for more details.
84    pub metadata_for_doc_units: HashMap<Unit, Metadata>,
85
86    /// Set of metadata of Docscrape units that fail before completion, e.g.
87    /// because the target has a type error. This is in an Arc<Mutex<..>>
88    /// because it is continuously updated as the job progresses.
89    pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
90}
91
92impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
93    pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
94        // Load up the jobserver that we'll use to manage our parallelism. This
95        // is the same as the GNU make implementation of a jobserver, and
96        // intentionally so! It's hoped that we can interact with GNU make and
97        // all share the same jobserver.
98        //
99        // Note that if we don't have a jobserver in our environment then we
100        // create our own, and we create it with `n` tokens, but immediately
101        // acquire one, because one token is ourself, a running process.
102        let jobserver = match bcx.gctx.jobserver_from_env() {
103            Some(c) => c.clone(),
104            None => {
105                let client =
106                    Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
107                client.acquire_raw()?;
108                client
109            }
110        };
111
112        Ok(Self {
113            bcx,
114            compilation: Compilation::new(bcx)?,
115            build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
116            fingerprints: HashMap::new(),
117            mtime_cache: HashMap::new(),
118            checksum_cache: HashMap::new(),
119            compiled: HashSet::new(),
120            build_scripts: HashMap::new(),
121            build_explicit_deps: HashMap::new(),
122            jobserver,
123            primary_packages: HashSet::new(),
124            files: None,
125            rmeta_required: HashSet::new(),
126            lto: HashMap::new(),
127            metadata_for_doc_units: HashMap::new(),
128            failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
129        })
130    }
131
132    /// Dry-run the compilation without actually running it.
133    ///
134    /// This is expected to collect information like the location of output artifacts.
135    /// Please keep in sync with non-compilation part in [`BuildRunner::compile`].
136    pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
137        let _lock = self
138            .bcx
139            .gctx
140            .acquire_package_cache_lock(CacheLockMode::Shared)?;
141        self.lto = super::lto::generate(self.bcx)?;
142        self.prepare_units()?;
143        self.prepare()?;
144        self.check_collisions()?;
145
146        for unit in &self.bcx.roots {
147            self.collect_tests_and_executables(unit)?;
148        }
149
150        Ok(self.compilation)
151    }
152
153    /// Starts compilation, waits for it to finish, and returns information
154    /// about the result of compilation.
155    ///
156    /// See [`ops::cargo_compile`] for a higher-level view of the compile process.
157    ///
158    /// [`ops::cargo_compile`]: crate::ops::cargo_compile
159    #[tracing::instrument(skip_all)]
160    pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
161        // A shared lock is held during the duration of the build since rustc
162        // needs to read from the `src` cache, and we don't want other
163        // commands modifying the `src` cache while it is running.
164        let _lock = self
165            .bcx
166            .gctx
167            .acquire_package_cache_lock(CacheLockMode::Shared)?;
168        let mut queue = JobQueue::new(self.bcx);
169        self.lto = super::lto::generate(self.bcx)?;
170        self.prepare_units()?;
171        self.prepare()?;
172        custom_build::build_map(&mut self)?;
173        self.check_collisions()?;
174        self.compute_metadata_for_doc_units();
175
176        // We need to make sure that if there were any previous docs already compiled,
177        // they were compiled with the same Rustc version that we're currently using.
178        // See the function doc comment for more.
179        if self.bcx.build_config.intent.is_doc() {
180            RustdocFingerprint::check_rustdoc_fingerprint(&self)?
181        }
182
183        for unit in &self.bcx.roots {
184            let force_rebuild = self.bcx.build_config.force_rebuild;
185            super::compile(&mut self, &mut queue, unit, exec, force_rebuild)?;
186        }
187
188        // Now that we've got the full job queue and we've done all our
189        // fingerprint analysis to determine what to run, bust all the memoized
190        // fingerprint hashes to ensure that during the build they all get the
191        // most up-to-date values. In theory we only need to bust hashes that
192        // transitively depend on a dirty build script, but it shouldn't matter
193        // that much for performance anyway.
194        for fingerprint in self.fingerprints.values() {
195            fingerprint.clear_memoized();
196        }
197
198        // Now that we've figured out everything that we're going to do, do it!
199        queue.execute(&mut self)?;
200
201        // Add `OUT_DIR` to env vars if unit has a build script.
202        let units_with_build_script = &self
203            .bcx
204            .roots
205            .iter()
206            .filter(|unit| self.build_scripts.contains_key(unit))
207            .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
208            .collect::<Vec<_>>();
209        for unit in units_with_build_script {
210            for dep in &self.bcx.unit_graph[unit] {
211                if dep.unit.mode.is_run_custom_build() {
212                    let out_dir = self
213                        .files()
214                        .build_script_out_dir(&dep.unit)
215                        .display()
216                        .to_string();
217                    let script_meta = self.get_run_build_script_metadata(&dep.unit);
218                    self.compilation
219                        .extra_env
220                        .entry(script_meta)
221                        .or_insert_with(Vec::new)
222                        .push(("OUT_DIR".to_string(), out_dir));
223                }
224            }
225        }
226
227        self.collect_doc_merge_info()?;
228
229        // Collect the result of the build into `self.compilation`.
230        for unit in &self.bcx.roots {
231            self.collect_tests_and_executables(unit)?;
232
233            // Collect information for `rustdoc --test`.
234            if unit.mode.is_doc_test() {
235                let mut unstable_opts = false;
236                let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
237                args.extend(compiler::lib_search_paths(&self, unit)?);
238                args.extend(compiler::lto_args(&self, unit));
239                args.extend(compiler::features_args(unit));
240                args.extend(compiler::check_cfg_args(unit));
241
242                let script_metas = self.find_build_script_metadatas(unit);
243                if let Some(meta_vec) = script_metas.clone() {
244                    for meta in meta_vec {
245                        if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
246                            for cfg in &output.cfgs {
247                                args.push("--cfg".into());
248                                args.push(cfg.into());
249                            }
250
251                            for check_cfg in &output.check_cfgs {
252                                args.push("--check-cfg".into());
253                                args.push(check_cfg.into());
254                            }
255
256                            for (lt, arg) in &output.linker_args {
257                                if lt.applies_to(&unit.target, unit.mode) {
258                                    args.push("-C".into());
259                                    args.push(format!("link-arg={}", arg).into());
260                                }
261                            }
262                        }
263                    }
264                }
265                args.extend(unit.rustdocflags.iter().map(Into::into));
266
267                use super::MessageFormat;
268                let format = match self.bcx.build_config.message_format {
269                    MessageFormat::Short => "short",
270                    MessageFormat::Human => "human",
271                    MessageFormat::Json { .. } => "json",
272                };
273                args.push("--error-format".into());
274                args.push(format.into());
275
276                self.compilation.to_doc_test.push(compilation::Doctest {
277                    unit: unit.clone(),
278                    args,
279                    unstable_opts,
280                    linker: self.compilation.target_linker(unit.kind).clone(),
281                    script_metas,
282                    env: artifact::get_env(&self, self.unit_deps(unit))?,
283                });
284            }
285
286            super::output_depinfo(&mut self, unit)?;
287        }
288
289        for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
290            self.compilation
291                .extra_env
292                .entry(*script_meta)
293                .or_insert_with(Vec::new)
294                .extend(output.env.iter().cloned());
295
296            for dir in output.library_paths.iter() {
297                self.compilation
298                    .native_dirs
299                    .insert(dir.clone().into_path_buf());
300            }
301        }
302        Ok(self.compilation)
303    }
304
305    fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
306        for output in self.outputs(unit)?.iter() {
307            if matches!(
308                output.flavor,
309                FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
310            ) {
311                continue;
312            }
313
314            let bindst = output.bin_dst();
315
316            if unit.mode == CompileMode::Test {
317                self.compilation
318                    .tests
319                    .push(self.unit_output(unit, &output.path));
320            } else if unit.target.is_executable() {
321                self.compilation
322                    .binaries
323                    .push(self.unit_output(unit, bindst));
324            } else if unit.target.is_cdylib()
325                && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
326            {
327                self.compilation
328                    .cdylibs
329                    .push(self.unit_output(unit, bindst));
330            }
331        }
332        Ok(())
333    }
334
335    fn collect_doc_merge_info(&mut self) -> CargoResult<()> {
336        if !self.bcx.gctx.cli_unstable().rustdoc_mergeable_info {
337            return Ok(());
338        }
339
340        if !self.bcx.build_config.intent.is_doc() {
341            return Ok(());
342        }
343
344        if self.bcx.build_config.intent.wants_doc_json_output() {
345            // rustdoc JSON output doesn't support merge (yet?)
346            return Ok(());
347        }
348
349        let mut doc_parts_map: HashMap<_, Vec<_>> = HashMap::new();
350
351        let unit_iter = if self.bcx.build_config.intent.wants_deps_docs() {
352            itertools::Either::Left(self.bcx.unit_graph.keys())
353        } else {
354            itertools::Either::Right(self.bcx.roots.iter())
355        };
356
357        for unit in unit_iter {
358            if !unit.mode.is_doc() {
359                continue;
360            }
361            // Assumption: one `rustdoc` call generates only one cross-crate info JSON.
362            let outputs = self.outputs(unit)?;
363
364            let Some(doc_parts) = outputs
365                .iter()
366                .find(|o| matches!(o.flavor, FileFlavor::DocParts))
367            else {
368                continue;
369            };
370
371            doc_parts_map
372                .entry(unit.kind)
373                .or_default()
374                .push(doc_parts.path.to_owned());
375        }
376
377        self.compilation.rustdoc_fingerprints = Some(
378            doc_parts_map
379                .into_iter()
380                .map(|(kind, doc_parts)| (kind, RustdocFingerprint::new(self, kind, doc_parts)))
381                .collect(),
382        );
383
384        Ok(())
385    }
386
387    /// Returns the executable for the specified unit (if any).
388    pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
389        let is_binary = unit.target.is_executable();
390        let is_test = unit.mode.is_any_test();
391        if !unit.mode.generates_executable() || !(is_binary || is_test) {
392            return Ok(None);
393        }
394        Ok(self
395            .outputs(unit)?
396            .iter()
397            .find(|o| o.flavor == FileFlavor::Normal)
398            .map(|output| output.bin_dst().clone()))
399    }
400
401    #[tracing::instrument(skip_all)]
402    pub fn prepare_units(&mut self) -> CargoResult<()> {
403        let dest = self.bcx.profiles.get_dir_name();
404        // We try to only lock the artifact-dir if we need to.
405        // For example, `cargo check` does not write any files to the artifact-dir so we don't need
406        // to lock it.
407        let must_take_artifact_dir_lock = match self.bcx.build_config.intent {
408            UserIntent::Check { .. } => {
409                // Generally cargo check does not need to take the artifact-dir lock but there is
410                // one exception: If check has `--timings` we still need to lock artifact-dir since
411                // we will output the report files.
412                !self.bcx.build_config.timing_outputs.is_empty()
413            }
414            UserIntent::Build
415            | UserIntent::Test
416            | UserIntent::Doc { .. }
417            | UserIntent::Doctest
418            | UserIntent::Bench => true,
419        };
420        let host_layout = Layout::new(self.bcx.ws, None, &dest, must_take_artifact_dir_lock)?;
421        let mut targets = HashMap::new();
422        for kind in self.bcx.all_kinds.iter() {
423            if let CompileKind::Target(target) = *kind {
424                let layout = Layout::new(
425                    self.bcx.ws,
426                    Some(target),
427                    &dest,
428                    must_take_artifact_dir_lock,
429                )?;
430                targets.insert(target, layout);
431            }
432        }
433        self.primary_packages
434            .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
435        self.compilation
436            .root_crate_names
437            .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
438
439        self.record_units_requiring_metadata();
440
441        let files = CompilationFiles::new(self, host_layout, targets);
442        self.files = Some(files);
443        Ok(())
444    }
445
446    /// Prepare this context, ensuring that all filesystem directories are in
447    /// place.
448    #[tracing::instrument(skip_all)]
449    pub fn prepare(&mut self) -> CargoResult<()> {
450        self.files
451            .as_mut()
452            .unwrap()
453            .host
454            .prepare()
455            .context("couldn't prepare build directories")?;
456        for target in self.files.as_mut().unwrap().target.values_mut() {
457            target
458                .prepare()
459                .context("couldn't prepare build directories")?;
460        }
461
462        let files = self.files.as_ref().unwrap();
463        for &kind in self.bcx.all_kinds.iter() {
464            let layout = files.layout(kind);
465            if let Some(artifact_dir) = layout.artifact_dir() {
466                self.compilation
467                    .root_output
468                    .insert(kind, artifact_dir.dest().to_path_buf());
469            }
470            if self.bcx.gctx.cli_unstable().build_dir_new_layout {
471                for (unit, _) in self.bcx.unit_graph.iter() {
472                    let dep_dir = self.files().deps_dir(unit);
473                    paths::create_dir_all(&dep_dir)?;
474                    self.compilation.deps_output.insert(kind, dep_dir);
475                }
476            } else {
477                self.compilation
478                    .deps_output
479                    .insert(kind, layout.build_dir().legacy_deps().to_path_buf());
480            }
481        }
482        Ok(())
483    }
484
485    pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
486        self.files.as_ref().unwrap()
487    }
488
489    /// Returns the filenames that the given unit will generate.
490    pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
491        self.files.as_ref().unwrap().outputs(unit, self.bcx)
492    }
493
494    /// Direct dependencies for the given unit.
495    pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
496        &self.bcx.unit_graph[unit]
497    }
498
499    /// Returns the `RunCustomBuild` Units associated with the given Unit.
500    ///
501    /// If the package does not have a build script, this returns None.
502    pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
503        if unit.mode.is_run_custom_build() {
504            return Some(vec![unit.clone()]);
505        }
506
507        let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
508            .iter()
509            .filter(|unit_dep| {
510                unit_dep.unit.mode.is_run_custom_build()
511                    && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
512            })
513            .map(|unit_dep| unit_dep.unit.clone())
514            .collect();
515        if build_script_units.is_empty() {
516            None
517        } else {
518            Some(build_script_units)
519        }
520    }
521
522    /// Returns the metadata hash for the `RunCustomBuild` Unit associated with
523    /// the given unit.
524    ///
525    /// If the package does not have a build script, this returns None.
526    pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
527        self.find_build_script_units(unit).map(|units| {
528            units
529                .iter()
530                .map(|u| self.get_run_build_script_metadata(u))
531                .collect()
532        })
533    }
534
535    /// Returns the metadata hash for a `RunCustomBuild` unit.
536    pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
537        assert!(unit.mode.is_run_custom_build());
538        self.files().metadata(unit).unit_id()
539    }
540
541    /// Returns the list of SBOM output file paths for a given [`Unit`].
542    pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
543        Ok(self
544            .outputs(unit)?
545            .iter()
546            .filter(|o| o.flavor == FileFlavor::Sbom)
547            .map(|o| o.path.clone())
548            .collect())
549    }
550
551    pub fn is_primary_package(&self, unit: &Unit) -> bool {
552        self.primary_packages.contains(&unit.pkg.package_id())
553    }
554
555    /// Returns a [`UnitOutput`] which represents some information about the
556    /// output of a unit.
557    pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
558        let script_metas = self.find_build_script_metadatas(unit);
559        UnitOutput {
560            unit: unit.clone(),
561            path: path.to_path_buf(),
562            script_metas,
563        }
564    }
565
566    /// Check if any output file name collision happens.
567    /// See <https://github.com/rust-lang/cargo/issues/6313> for more.
568    #[tracing::instrument(skip_all)]
569    fn check_collisions(&self) -> CargoResult<()> {
570        let mut output_collisions = HashMap::new();
571        let describe_collision = |unit: &Unit, other_unit: &Unit| -> String {
572            format!(
573                "the {} target `{}` in package `{}` has the same output filename as the {} target `{}` in package `{}`",
574                unit.target.kind().description(),
575                unit.target.name(),
576                unit.pkg.package_id(),
577                other_unit.target.kind().description(),
578                other_unit.target.name(),
579                other_unit.pkg.package_id(),
580            )
581        };
582        let suggestion = [
583            Level::NOTE.message("this may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>"),
584            Level::HELP.message("consider changing their names to be unique or compiling them separately")
585        ];
586        let rustdoc_suggestion = [
587            Level::NOTE.message("this is a known bug where multiple crates with the same name use the same path; see <https://github.com/rust-lang/cargo/issues/6313>")
588        ];
589        let report_collision = |unit: &Unit,
590                                other_unit: &Unit,
591                                path: &PathBuf,
592                                messages: &[Message<'_>]|
593         -> CargoResult<()> {
594            if unit.target.name() == other_unit.target.name() {
595                self.bcx.gctx.shell().print_report(
596                    &[Level::WARNING
597                        .secondary_title(format!("output filename collision at {}", path.display()))
598                        .elements(
599                            [Level::NOTE.message(describe_collision(unit, other_unit))]
600                                .into_iter()
601                                .chain(messages.iter().cloned()),
602                        )],
603                    false,
604                )
605            } else {
606                self.bcx.gctx.shell().print_report(
607                    &[Level::WARNING
608                        .secondary_title(format!("output filename collision at {}", path.display()))
609                        .elements([
610                            Level::NOTE.message(describe_collision(unit, other_unit)),
611                            Level::NOTE.message("if this looks unexpected, it may be a bug in Cargo. Please file a bug \
612                                report at https://github.com/rust-lang/cargo/issues/ with as much information as you \
613                                can provide."),
614                            Level::NOTE.message(format!("cargo {} running on `{}` target `{}`",
615                                crate::version(), self.bcx.host_triple(), self.bcx.target_data.short_name(&unit.kind))),
616                            Level::NOTE.message(format!("first unit: {unit:?}")),
617                            Level::NOTE.message(format!("second unit: {other_unit:?}")),
618                        ])],
619                    false,
620                )
621            }
622        };
623
624        fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
625            bail!(
626                "document output filename collision\n\
627                 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
628                 Only one may be documented at once since they output to the same path.\n\
629                 Consider documenting only one, renaming one, \
630                 or marking one with `doc = false` in Cargo.toml.",
631                unit.target.kind().description(),
632                unit.target.name(),
633                unit.pkg,
634                other_unit.target.kind().description(),
635                other_unit.target.name(),
636                other_unit.pkg,
637            );
638        }
639
640        let mut keys = self
641            .bcx
642            .unit_graph
643            .keys()
644            .filter(|unit| !unit.mode.is_run_custom_build())
645            .collect::<Vec<_>>();
646        // Sort for consistent error messages.
647        keys.sort_unstable();
648        // These are kept separate to retain compatibility with older
649        // versions, which generated an error when there was a duplicate lib
650        // or bin (but the old code did not check bin<->lib collisions). To
651        // retain backwards compatibility, this only generates an error for
652        // duplicate libs or duplicate bins (but not both). Ideally this
653        // shouldn't be here, but since there isn't a complete workaround,
654        // yet, this retains the old behavior.
655        let mut doc_libs = HashMap::new();
656        let mut doc_bins = HashMap::new();
657        for unit in keys {
658            if unit.mode.is_doc() && self.is_primary_package(unit) {
659                // These situations have been an error since before 1.0, so it
660                // is not a warning like the other situations.
661                if unit.target.is_lib() {
662                    if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
663                    {
664                        doc_collision_error(unit, prev)?;
665                    }
666                } else if let Some(prev) =
667                    doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
668                {
669                    doc_collision_error(unit, prev)?;
670                }
671            }
672            for output in self.outputs(unit)?.iter() {
673                if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
674                    if unit.mode.is_doc() {
675                        // See https://github.com/rust-lang/rust/issues/56169
676                        // and https://github.com/rust-lang/rust/issues/61378
677                        report_collision(unit, other_unit, &output.path, &rustdoc_suggestion)?;
678                    } else {
679                        report_collision(unit, other_unit, &output.path, &suggestion)?;
680                    }
681                }
682                if let Some(hardlink) = output.hardlink.as_ref() {
683                    if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
684                        report_collision(unit, other_unit, hardlink, &suggestion)?;
685                    }
686                }
687                if let Some(ref export_path) = output.export_path {
688                    if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
689                        self.bcx.gctx.shell().print_report(
690                            &[Level::WARNING
691                                .secondary_title(format!(
692                                    "`--artifact-dir` filename collision at {}",
693                                    export_path.display()
694                                ))
695                                .elements(
696                                    [Level::NOTE.message(describe_collision(unit, other_unit))]
697                                        .into_iter()
698                                        .chain(suggestion.iter().cloned()),
699                                )],
700                            false,
701                        )?;
702                    }
703                }
704            }
705        }
706        Ok(())
707    }
708
709    /// Records the list of units which are required to emit metadata.
710    ///
711    /// Units which depend only on the metadata of others requires the others to
712    /// actually produce metadata, so we'll record that here.
713    fn record_units_requiring_metadata(&mut self) {
714        for (key, deps) in self.bcx.unit_graph.iter() {
715            for dep in deps {
716                if self.only_requires_rmeta(key, &dep.unit) {
717                    self.rmeta_required.insert(dep.unit.clone());
718                }
719            }
720        }
721    }
722
723    /// Returns whether when `parent` depends on `dep` if it only requires the
724    /// metadata file from `dep`.
725    pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
726        // We're only a candidate for requiring an `rmeta` file if we
727        // ourselves are building an rlib,
728        !parent.requires_upstream_objects()
729            && parent.mode == CompileMode::Build
730            // Our dependency must also be built as an rlib, otherwise the
731            // object code must be useful in some fashion
732            && !dep.requires_upstream_objects()
733            && dep.mode == CompileMode::Build
734    }
735
736    /// Returns whether when `unit` is built whether it should emit metadata as
737    /// well because some compilations rely on that.
738    pub fn rmeta_required(&self, unit: &Unit) -> bool {
739        self.rmeta_required.contains(unit)
740    }
741
742    /// Finds metadata for Doc/Docscrape units.
743    ///
744    /// rustdoc needs a -Cmetadata flag in order to recognize StableCrateIds that refer to
745    /// items in the crate being documented. The -Cmetadata flag used by reverse-dependencies
746    /// will be the metadata of the Cargo unit that generated the current library's rmeta file,
747    /// which should be a Check unit.
748    ///
749    /// If the current crate has reverse-dependencies, such a Check unit should exist, and so
750    /// we use that crate's metadata. If not, we use the crate's Doc unit so at least examples
751    /// scraped from the current crate can be used when documenting the current crate.
752    #[tracing::instrument(skip_all)]
753    pub fn compute_metadata_for_doc_units(&mut self) {
754        for unit in self.bcx.unit_graph.keys() {
755            if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
756                continue;
757            }
758
759            let matching_units = self
760                .bcx
761                .unit_graph
762                .keys()
763                .filter(|other| {
764                    unit.pkg == other.pkg
765                        && unit.target == other.target
766                        && !other.mode.is_doc_scrape()
767                })
768                .collect::<Vec<_>>();
769            let metadata_unit = matching_units
770                .iter()
771                .find(|other| other.mode.is_check())
772                .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
773                .unwrap_or(&unit);
774            self.metadata_for_doc_units
775                .insert(unit.clone(), self.files().metadata(metadata_unit));
776        }
777    }
778}