Skip to main content

cargo/core/compiler/build_runner/
mod.rs

1//! [`BuildRunner`] is the mutable state used during the build process.
2
3use std::collections::{HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::locking::LockManager;
10use crate::core::compiler::{self, Unit, UserIntent, artifact};
11use crate::util::cache_lock::CacheLockMode;
12use crate::util::errors::CargoResult;
13use annotate_snippets::{Level, Message};
14use anyhow::{Context as _, bail};
15use cargo_util::paths;
16use filetime::FileTime;
17use itertools::Itertools;
18use jobserver::Client;
19
20use super::RustdocFingerprint;
21use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
22use super::fingerprint::{Checksum, Fingerprint};
23use super::job_queue::JobQueue;
24use super::layout::Layout;
25use super::lto::Lto;
26use super::unit_graph::UnitDep;
27use super::{BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor};
28
29mod compilation_files;
30use self::compilation_files::CompilationFiles;
31pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
32
33/// Collection of all the stuff that is needed to perform a build.
34///
35/// Different from the [`BuildContext`], `Context` is a _mutable_ state used
36/// throughout the entire build process. Everything is coordinated through this.
37///
38/// [`BuildContext`]: crate::core::compiler::BuildContext
39pub struct BuildRunner<'a, 'gctx> {
40    /// Mostly static information about the build task.
41    pub bcx: &'a BuildContext<'a, 'gctx>,
42    /// A large collection of information about the result of the entire compilation.
43    pub compilation: Compilation<'gctx>,
44    /// Output from build scripts, updated after each build script runs.
45    pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
46    /// Dependencies (like rerun-if-changed) declared by a build script.
47    /// This is *only* populated from the output from previous runs.
48    /// If the build script hasn't ever been run, then it must be run.
49    pub build_explicit_deps: HashMap<Unit, BuildDeps>,
50    /// Fingerprints used to detect if a unit is out-of-date.
51    pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
52    /// Cache of file mtimes to reduce filesystem hits.
53    pub mtime_cache: HashMap<PathBuf, FileTime>,
54    /// Cache of file checksums to reduce filesystem reads.
55    pub checksum_cache: HashMap<PathBuf, Checksum>,
56    /// A set used to track which units have been compiled.
57    /// A unit may appear in the job graph multiple times as a dependency of
58    /// multiple packages, but it only needs to run once.
59    pub compiled: HashSet<Unit>,
60    /// Linking information for each `Unit`.
61    /// See `build_map` for details.
62    pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
63    /// Job server client to manage concurrency with other processes.
64    pub jobserver: Client,
65    /// "Primary" packages are the ones the user selected on the command-line
66    /// with `-p` flags. If no flags are specified, then it is the defaults
67    /// based on the current directory and the default workspace members.
68    primary_packages: HashSet<PackageId>,
69    /// An abstraction of the files and directories that will be generated by
70    /// the compilation. This is `None` until after `unit_dependencies` has
71    /// been computed.
72    files: Option<CompilationFiles<'a, 'gctx>>,
73
74    /// A set of units which are compiling rlibs and are expected to produce
75    /// metadata files in addition to the rlib itself.
76    rmeta_required: HashSet<Unit>,
77
78    /// Map of the LTO-status of each unit. This indicates what sort of
79    /// compilation is happening (only object, only bitcode, both, etc), and is
80    /// precalculated early on.
81    pub lto: HashMap<Unit, Lto>,
82
83    /// Map of Doc/Docscrape units to metadata for their -Cmetadata flag.
84    /// See `Context::find_metadata_units` for more details.
85    pub metadata_for_doc_units: HashMap<Unit, Metadata>,
86
87    /// Set of metadata of Docscrape units that fail before completion, e.g.
88    /// because the target has a type error. This is in an Arc<Mutex<..>>
89    /// because it is continuously updated as the job progresses.
90    pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
91
92    /// Manages locks for build units when fine grain locking is enabled.
93    pub lock_manager: Arc<LockManager>,
94}
95
96impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
97    pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
98        // Load up the jobserver that we'll use to manage our parallelism. This
99        // is the same as the GNU make implementation of a jobserver, and
100        // intentionally so! It's hoped that we can interact with GNU make and
101        // all share the same jobserver.
102        //
103        // Note that if we don't have a jobserver in our environment then we
104        // create our own, and we create it with `n` tokens, but immediately
105        // acquire one, because one token is ourself, a running process.
106        let jobserver = match bcx.gctx.jobserver_from_env() {
107            Some(c) => c.clone(),
108            None => {
109                let client =
110                    Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
111                client.acquire_raw()?;
112                client
113            }
114        };
115
116        Ok(Self {
117            bcx,
118            compilation: Compilation::new(bcx)?,
119            build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
120            fingerprints: HashMap::new(),
121            mtime_cache: HashMap::new(),
122            checksum_cache: HashMap::new(),
123            compiled: HashSet::new(),
124            build_scripts: HashMap::new(),
125            build_explicit_deps: HashMap::new(),
126            jobserver,
127            primary_packages: HashSet::new(),
128            files: None,
129            rmeta_required: HashSet::new(),
130            lto: HashMap::new(),
131            metadata_for_doc_units: HashMap::new(),
132            failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
133            lock_manager: Arc::new(LockManager::new()),
134        })
135    }
136
137    /// Dry-run the compilation without actually running it.
138    ///
139    /// This is expected to collect information like the location of output artifacts.
140    /// Please keep in sync with non-compilation part in [`BuildRunner::compile`].
141    pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
142        let _lock = self
143            .bcx
144            .gctx
145            .acquire_package_cache_lock(CacheLockMode::Shared)?;
146        self.lto = super::lto::generate(self.bcx)?;
147        self.prepare_units()?;
148        self.prepare()?;
149        self.check_collisions()?;
150
151        for unit in &self.bcx.roots {
152            self.collect_tests_and_executables(unit)?;
153        }
154
155        Ok(self.compilation)
156    }
157
158    /// Starts compilation, waits for it to finish, and returns information
159    /// about the result of compilation.
160    ///
161    /// See [`ops::cargo_compile`] for a higher-level view of the compile process.
162    ///
163    /// [`ops::cargo_compile`]: crate::ops::cargo_compile
164    #[tracing::instrument(skip_all)]
165    pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
166        // A shared lock is held during the duration of the build since rustc
167        // needs to read from the `src` cache, and we don't want other
168        // commands modifying the `src` cache while it is running.
169        let _lock = self
170            .bcx
171            .gctx
172            .acquire_package_cache_lock(CacheLockMode::Shared)?;
173        let mut queue = JobQueue::new(self.bcx);
174        self.lto = super::lto::generate(self.bcx)?;
175        self.prepare_units()?;
176        self.prepare()?;
177        custom_build::build_map(&mut self)?;
178        self.check_collisions()?;
179        self.compute_metadata_for_doc_units();
180
181        // We need to make sure that if there were any previous docs already compiled,
182        // they were compiled with the same Rustc version that we're currently using.
183        // See the function doc comment for more.
184        if self.bcx.build_config.intent.is_doc() {
185            RustdocFingerprint::check_rustdoc_fingerprint(&self)?
186        }
187
188        for unit in &self.bcx.roots {
189            let force_rebuild = self.bcx.build_config.force_rebuild;
190            super::compile(&mut self, &mut queue, unit, exec, force_rebuild)?;
191        }
192
193        // Now that we've got the full job queue and we've done all our
194        // fingerprint analysis to determine what to run, bust all the memoized
195        // fingerprint hashes to ensure that during the build they all get the
196        // most up-to-date values. In theory we only need to bust hashes that
197        // transitively depend on a dirty build script, but it shouldn't matter
198        // that much for performance anyway.
199        for fingerprint in self.fingerprints.values() {
200            fingerprint.clear_memoized();
201        }
202
203        // Now that we've figured out everything that we're going to do, do it!
204        queue.execute(&mut self)?;
205
206        // Add `OUT_DIR` to env vars if unit has a build script.
207        let units_with_build_script = &self
208            .bcx
209            .roots
210            .iter()
211            .filter(|unit| self.build_scripts.contains_key(unit))
212            .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
213            .collect::<Vec<_>>();
214        for unit in units_with_build_script {
215            for dep in &self.bcx.unit_graph[unit] {
216                if dep.unit.mode.is_run_custom_build() {
217                    let out_dir = if self.bcx.gctx.cli_unstable().build_dir_new_layout {
218                        self.files().out_dir_new_layout(&dep.unit)
219                    } else {
220                        self.files().build_script_out_dir(&dep.unit)
221                    };
222                    let script_meta = self.get_run_build_script_metadata(&dep.unit);
223                    self.compilation
224                        .extra_env
225                        .entry(script_meta)
226                        .or_insert_with(Vec::new)
227                        .push(("OUT_DIR".to_string(), out_dir.display().to_string()));
228                }
229            }
230        }
231
232        self.collect_doc_merge_info()?;
233
234        // Collect the result of the build into `self.compilation`.
235        for unit in &self.bcx.roots {
236            self.collect_tests_and_executables(unit)?;
237
238            // Collect information for `rustdoc --test`.
239            if unit.mode.is_doc_test() {
240                let mut unstable_opts = false;
241                let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
242                args.extend(compiler::lib_search_paths(&self, unit)?);
243                args.extend(compiler::lto_args(&self, unit));
244                args.extend(compiler::features_args(unit));
245                args.extend(compiler::check_cfg_args(unit));
246
247                let script_metas = self.find_build_script_metadatas(unit);
248                if let Some(meta_vec) = script_metas.clone() {
249                    for meta in meta_vec {
250                        if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
251                            for cfg in &output.cfgs {
252                                args.push("--cfg".into());
253                                args.push(cfg.into());
254                            }
255
256                            for check_cfg in &output.check_cfgs {
257                                args.push("--check-cfg".into());
258                                args.push(check_cfg.into());
259                            }
260
261                            for (lt, arg) in &output.linker_args {
262                                if lt.applies_to(&unit.target, unit.mode) {
263                                    args.push("-C".into());
264                                    args.push(format!("link-arg={}", arg).into());
265                                }
266                            }
267                        }
268                    }
269                }
270                args.extend(unit.rustdocflags.iter().map(Into::into));
271
272                use super::MessageFormat;
273                let format = match self.bcx.build_config.message_format {
274                    MessageFormat::Short => "short",
275                    MessageFormat::Human => "human",
276                    MessageFormat::Json { .. } => "json",
277                };
278                args.push("--error-format".into());
279                args.push(format.into());
280
281                self.compilation.to_doc_test.push(compilation::Doctest {
282                    unit: unit.clone(),
283                    args,
284                    unstable_opts,
285                    linker: self
286                        .compilation
287                        .target_linker(unit.kind)
288                        .map(|p| p.to_path_buf()),
289                    script_metas,
290                    env: artifact::get_env(&self, unit, self.unit_deps(unit))?,
291                });
292            }
293
294            super::output_depinfo(&mut self, unit)?;
295        }
296
297        for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
298            self.compilation
299                .extra_env
300                .entry(*script_meta)
301                .or_insert_with(Vec::new)
302                .extend(output.env.iter().cloned());
303
304            for dir in output.library_paths.iter() {
305                self.compilation
306                    .native_dirs
307                    .insert(dir.clone().into_path_buf());
308            }
309        }
310        Ok(self.compilation)
311    }
312
313    fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
314        for output in self.outputs(unit)?.iter() {
315            if matches!(
316                output.flavor,
317                FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
318            ) {
319                continue;
320            }
321
322            let bindst = output.bin_dst();
323
324            if unit.mode == CompileMode::Test {
325                self.compilation
326                    .tests
327                    .push(self.unit_output(unit, &output.path)?);
328            } else if unit.target.is_executable() {
329                self.compilation
330                    .binaries
331                    .push(self.unit_output(unit, bindst)?);
332            } else if unit.target.is_cdylib()
333                && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
334            {
335                self.compilation
336                    .cdylibs
337                    .push(self.unit_output(unit, bindst)?);
338            }
339        }
340        Ok(())
341    }
342
343    fn collect_doc_merge_info(&mut self) -> CargoResult<()> {
344        if !self.bcx.gctx.cli_unstable().rustdoc_mergeable_info {
345            return Ok(());
346        }
347
348        if !self.bcx.build_config.intent.is_doc() {
349            return Ok(());
350        }
351
352        if self.bcx.build_config.intent.wants_doc_json_output() {
353            // rustdoc JSON output doesn't support merge (yet?)
354            return Ok(());
355        }
356
357        let mut doc_parts_map: HashMap<_, Vec<_>> = HashMap::new();
358
359        let unit_iter = if self.bcx.build_config.intent.wants_deps_docs() {
360            itertools::Either::Left(self.bcx.unit_graph.keys())
361        } else {
362            itertools::Either::Right(self.bcx.roots.iter())
363        };
364
365        for unit in unit_iter {
366            if !unit.mode.is_doc() {
367                continue;
368            }
369            // Assumption: one `rustdoc` call generates only one cross-crate info JSON.
370            let outputs = self.outputs(unit)?;
371
372            let Some(doc_parts) = outputs
373                .iter()
374                .find(|o| matches!(o.flavor, FileFlavor::DocParts))
375            else {
376                continue;
377            };
378
379            doc_parts_map
380                .entry(unit.kind)
381                .or_default()
382                .push(doc_parts.path.to_owned());
383        }
384
385        self.compilation.rustdoc_fingerprints = Some(
386            doc_parts_map
387                .into_iter()
388                .map(|(kind, doc_parts)| (kind, RustdocFingerprint::new(self, kind, doc_parts)))
389                .collect(),
390        );
391
392        Ok(())
393    }
394
395    /// Returns the executable for the specified unit (if any).
396    pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
397        let is_binary = unit.target.is_executable();
398        let is_test = unit.mode.is_any_test();
399        if !unit.mode.generates_executable() || !(is_binary || is_test) {
400            return Ok(None);
401        }
402        Ok(self
403            .outputs(unit)?
404            .iter()
405            .find(|o| o.flavor == FileFlavor::Normal)
406            .map(|output| output.bin_dst().clone()))
407    }
408
409    #[tracing::instrument(skip_all)]
410    pub fn prepare_units(&mut self) -> CargoResult<()> {
411        let dest = self.bcx.profiles.get_dir_name();
412        // We try to only lock the artifact-dir if we need to.
413        // For example, `cargo check` does not write any files to the artifact-dir so we don't need
414        // to lock it.
415        let must_take_artifact_dir_lock = match self.bcx.build_config.intent {
416            UserIntent::Check { .. } => {
417                // Generally cargo check does not need to take the artifact-dir lock but there is
418                // one exception: If check has `--timings` we still need to lock artifact-dir since
419                // we will output the report files.
420                self.bcx.build_config.timing_report
421            }
422            UserIntent::Build
423            | UserIntent::Test
424            | UserIntent::Doc { .. }
425            | UserIntent::Doctest
426            | UserIntent::Bench => true,
427        };
428        let host_layout =
429            Layout::new(self.bcx.ws, None, &dest, must_take_artifact_dir_lock, false)?;
430        let mut targets = HashMap::new();
431        for kind in self.bcx.all_kinds.iter() {
432            if let CompileKind::Target(target) = *kind {
433                let layout = Layout::new(
434                    self.bcx.ws,
435                    Some(target),
436                    &dest,
437                    must_take_artifact_dir_lock,
438                    false,
439                )?;
440                targets.insert(target, layout);
441            }
442        }
443        self.primary_packages
444            .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
445        self.compilation
446            .root_crate_names
447            .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
448
449        self.record_units_requiring_metadata();
450
451        let files = CompilationFiles::new(self, host_layout, targets);
452        self.files = Some(files);
453        Ok(())
454    }
455
456    /// Prepare this context, ensuring that all filesystem directories are in
457    /// place.
458    #[tracing::instrument(skip_all)]
459    pub fn prepare(&mut self) -> CargoResult<()> {
460        self.files
461            .as_mut()
462            .unwrap()
463            .host
464            .prepare()
465            .context("couldn't prepare build directories")?;
466        for target in self.files.as_mut().unwrap().target.values_mut() {
467            target
468                .prepare()
469                .context("couldn't prepare build directories")?;
470        }
471
472        let files = self.files.as_ref().unwrap();
473        for &kind in self.bcx.all_kinds.iter() {
474            let layout = files.layout(kind);
475            if let Some(artifact_dir) = layout.artifact_dir() {
476                self.compilation
477                    .root_output
478                    .insert(kind, artifact_dir.dest().to_path_buf());
479            }
480            if self.bcx.gctx.cli_unstable().build_dir_new_layout {
481                for (unit, _) in self.bcx.unit_graph.iter() {
482                    let dep_dir = self.files().deps_dir(unit);
483                    paths::create_dir_all(&dep_dir)?;
484                    self.compilation.deps_output.insert(kind, dep_dir);
485                }
486            } else {
487                self.compilation
488                    .deps_output
489                    .insert(kind, layout.build_dir().legacy_deps().to_path_buf());
490            }
491        }
492        Ok(())
493    }
494
495    pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
496        self.files.as_ref().unwrap()
497    }
498
499    /// Returns the filenames that the given unit will generate.
500    pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
501        self.files.as_ref().unwrap().outputs(unit, self.bcx)
502    }
503
504    /// Direct dependencies for the given unit.
505    pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
506        &self.bcx.unit_graph[unit]
507    }
508
509    /// Returns the `RunCustomBuild` Units associated with the given Unit.
510    ///
511    /// If the package does not have a build script, this returns None.
512    pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
513        if unit.mode.is_run_custom_build() {
514            return Some(vec![unit.clone()]);
515        }
516
517        let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
518            .iter()
519            .filter(|unit_dep| {
520                unit_dep.unit.mode.is_run_custom_build()
521                    && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
522            })
523            .map(|unit_dep| unit_dep.unit.clone())
524            .collect();
525        if build_script_units.is_empty() {
526            None
527        } else {
528            Some(build_script_units)
529        }
530    }
531
532    /// Returns the metadata hash for the `RunCustomBuild` Unit associated with
533    /// the given unit.
534    ///
535    /// If the package does not have a build script, this returns None.
536    pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
537        self.find_build_script_units(unit).map(|units| {
538            units
539                .iter()
540                .map(|u| self.get_run_build_script_metadata(u))
541                .collect()
542        })
543    }
544
545    /// Returns the metadata hash for a `RunCustomBuild` unit.
546    pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
547        assert!(unit.mode.is_run_custom_build());
548        self.files().metadata(unit).unit_id()
549    }
550
551    /// Returns the list of SBOM output file paths for a given [`Unit`].
552    pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
553        Ok(self
554            .outputs(unit)?
555            .iter()
556            .filter(|o| o.flavor == FileFlavor::Sbom)
557            .map(|o| o.path.clone())
558            .collect())
559    }
560
561    pub fn is_primary_package(&self, unit: &Unit) -> bool {
562        self.primary_packages.contains(&unit.pkg.package_id())
563    }
564
565    /// Returns a [`UnitOutput`] which represents some information about the
566    /// output of a unit.
567    pub fn unit_output(&self, unit: &Unit, path: &Path) -> CargoResult<UnitOutput> {
568        let script_metas = self.find_build_script_metadatas(unit);
569        let env = artifact::get_env(&self, unit, self.unit_deps(unit))?;
570        Ok(UnitOutput {
571            unit: unit.clone(),
572            path: path.to_path_buf(),
573            script_metas,
574            env,
575        })
576    }
577
578    /// Check if any output file name collision happens.
579    /// See <https://github.com/rust-lang/cargo/issues/6313> for more.
580    #[tracing::instrument(skip_all)]
581    fn check_collisions(&self) -> CargoResult<()> {
582        let mut output_collisions = HashMap::new();
583        let describe_collision = |unit: &Unit, other_unit: &Unit| -> String {
584            format!(
585                "the {} target `{}` in package `{}` has the same output filename as the {} target `{}` in package `{}`",
586                unit.target.kind().description(),
587                unit.target.name(),
588                unit.pkg.package_id(),
589                other_unit.target.kind().description(),
590                other_unit.target.name(),
591                other_unit.pkg.package_id(),
592            )
593        };
594        let suggestion = [
595            Level::NOTE.message("this may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>"),
596            Level::HELP.message("consider changing their names to be unique or compiling them separately")
597        ];
598        let rustdoc_suggestion = [
599            Level::NOTE.message("this is a known bug where multiple crates with the same name use the same path; see <https://github.com/rust-lang/cargo/issues/6313>")
600        ];
601        let report_collision = |unit: &Unit,
602                                other_unit: &Unit,
603                                path: &PathBuf,
604                                messages: &[Message<'_>]|
605         -> CargoResult<()> {
606            if unit.target.name() == other_unit.target.name() {
607                self.bcx.gctx.shell().print_report(
608                    &[Level::WARNING
609                        .secondary_title(format!("output filename collision at {}", path.display()))
610                        .elements(
611                            [Level::NOTE.message(describe_collision(unit, other_unit))]
612                                .into_iter()
613                                .chain(messages.iter().cloned()),
614                        )],
615                    false,
616                )
617            } else {
618                self.bcx.gctx.shell().print_report(
619                    &[Level::WARNING
620                        .secondary_title(format!("output filename collision at {}", path.display()))
621                        .elements([
622                            Level::NOTE.message(describe_collision(unit, other_unit)),
623                            Level::NOTE.message("if this looks unexpected, it may be a bug in Cargo. Please file a bug \
624                                report at https://github.com/rust-lang/cargo/issues/ with as much information as you \
625                                can provide."),
626                            Level::NOTE.message(format!("cargo {} running on `{}` target `{}`",
627                                crate::version(), self.bcx.host_triple(), self.bcx.target_data.short_name(&unit.kind))),
628                            Level::NOTE.message(format!("first unit: {unit:?}")),
629                            Level::NOTE.message(format!("second unit: {other_unit:?}")),
630                        ])],
631                    false,
632                )
633            }
634        };
635
636        fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
637            bail!(
638                "document output filename collision\n\
639                 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
640                 Only one may be documented at once since they output to the same path.\n\
641                 Consider documenting only one, renaming one, \
642                 or marking one with `doc = false` in Cargo.toml.",
643                unit.target.kind().description(),
644                unit.target.name(),
645                unit.pkg,
646                other_unit.target.kind().description(),
647                other_unit.target.name(),
648                other_unit.pkg,
649            );
650        }
651
652        let mut keys = self
653            .bcx
654            .unit_graph
655            .keys()
656            .filter(|unit| !unit.mode.is_run_custom_build())
657            .collect::<Vec<_>>();
658        // Sort for consistent error messages.
659        keys.sort_unstable();
660        // These are kept separate to retain compatibility with older
661        // versions, which generated an error when there was a duplicate lib
662        // or bin (but the old code did not check bin<->lib collisions). To
663        // retain backwards compatibility, this only generates an error for
664        // duplicate libs or duplicate bins (but not both). Ideally this
665        // shouldn't be here, but since there isn't a complete workaround,
666        // yet, this retains the old behavior.
667        let mut doc_libs = HashMap::new();
668        let mut doc_bins = HashMap::new();
669        for unit in keys {
670            if unit.mode.is_doc() && self.is_primary_package(unit) {
671                // These situations have been an error since before 1.0, so it
672                // is not a warning like the other situations.
673                if unit.target.is_lib() {
674                    if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
675                    {
676                        doc_collision_error(unit, prev)?;
677                    }
678                } else if let Some(prev) =
679                    doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
680                {
681                    doc_collision_error(unit, prev)?;
682                }
683            }
684            for output in self.outputs(unit)?.iter() {
685                if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
686                    if unit.mode.is_doc() {
687                        // See https://github.com/rust-lang/rust/issues/56169
688                        // and https://github.com/rust-lang/rust/issues/61378
689                        report_collision(unit, other_unit, &output.path, &rustdoc_suggestion)?;
690                    } else {
691                        report_collision(unit, other_unit, &output.path, &suggestion)?;
692                    }
693                }
694                if let Some(hardlink) = output.hardlink.as_ref() {
695                    if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
696                        report_collision(unit, other_unit, hardlink, &suggestion)?;
697                    }
698                }
699                if let Some(ref export_path) = output.export_path {
700                    if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
701                        self.bcx.gctx.shell().print_report(
702                            &[Level::WARNING
703                                .secondary_title(format!(
704                                    "`--artifact-dir` filename collision at {}",
705                                    export_path.display()
706                                ))
707                                .elements(
708                                    [Level::NOTE.message(describe_collision(unit, other_unit))]
709                                        .into_iter()
710                                        .chain(suggestion.iter().cloned()),
711                                )],
712                            false,
713                        )?;
714                    }
715                }
716            }
717        }
718        Ok(())
719    }
720
721    /// Records the list of units which are required to emit metadata.
722    ///
723    /// Units which depend only on the metadata of others requires the others to
724    /// actually produce metadata, so we'll record that here.
725    fn record_units_requiring_metadata(&mut self) {
726        for (key, deps) in self.bcx.unit_graph.iter() {
727            for dep in deps {
728                if self.only_requires_rmeta(key, &dep.unit) {
729                    self.rmeta_required.insert(dep.unit.clone());
730                }
731            }
732        }
733    }
734
735    /// Returns whether when `parent` depends on `dep` if it only requires the
736    /// metadata file from `dep`.
737    pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
738        // We're only a candidate for requiring an `rmeta` file if we
739        // ourselves are building an rlib,
740        !parent.requires_upstream_objects()
741            && parent.mode == CompileMode::Build
742            // Our dependency must also be built as an rlib, otherwise the
743            // object code must be useful in some fashion
744            && !dep.requires_upstream_objects()
745            && dep.mode == CompileMode::Build
746    }
747
748    /// Returns whether when `unit` is built whether it should emit metadata as
749    /// well because some compilations rely on that.
750    pub fn rmeta_required(&self, unit: &Unit) -> bool {
751        self.rmeta_required.contains(unit)
752    }
753
754    /// Finds metadata for Doc/Docscrape units.
755    ///
756    /// rustdoc needs a -Cmetadata flag in order to recognize StableCrateIds that refer to
757    /// items in the crate being documented. The -Cmetadata flag used by reverse-dependencies
758    /// will be the metadata of the Cargo unit that generated the current library's rmeta file,
759    /// which should be a Check unit.
760    ///
761    /// If the current crate has reverse-dependencies, such a Check unit should exist, and so
762    /// we use that crate's metadata. If not, we use the crate's Doc unit so at least examples
763    /// scraped from the current crate can be used when documenting the current crate.
764    #[tracing::instrument(skip_all)]
765    pub fn compute_metadata_for_doc_units(&mut self) {
766        for unit in self.bcx.unit_graph.keys() {
767            if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
768                continue;
769            }
770
771            let matching_units = self
772                .bcx
773                .unit_graph
774                .keys()
775                .filter(|other| {
776                    unit.pkg == other.pkg
777                        && unit.target == other.target
778                        && !other.mode.is_doc_scrape()
779                })
780                .collect::<Vec<_>>();
781            let metadata_unit = matching_units
782                .iter()
783                .find(|other| other.mode.is_check())
784                .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
785                .unwrap_or(&unit);
786            self.metadata_for_doc_units
787                .insert(unit.clone(), self.files().metadata(metadata_unit));
788        }
789    }
790}