1use std::collections::{HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::{self, Unit, UserIntent, artifact};
10use crate::util::cache_lock::CacheLockMode;
11use crate::util::errors::CargoResult;
12use annotate_snippets::{Level, Message};
13use anyhow::{Context as _, bail};
14use cargo_util::paths;
15use filetime::FileTime;
16use itertools::Itertools;
17use jobserver::Client;
18
19use super::RustdocFingerprint;
20use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
21use super::fingerprint::{Checksum, Fingerprint};
22use super::job_queue::JobQueue;
23use super::layout::Layout;
24use super::lto::Lto;
25use super::unit_graph::UnitDep;
26use super::{BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor};
27
28mod compilation_files;
29use self::compilation_files::CompilationFiles;
30pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
31
32pub struct BuildRunner<'a, 'gctx> {
39 pub bcx: &'a BuildContext<'a, 'gctx>,
41 pub compilation: Compilation<'gctx>,
43 pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
45 pub build_explicit_deps: HashMap<Unit, BuildDeps>,
49 pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
51 pub mtime_cache: HashMap<PathBuf, FileTime>,
53 pub checksum_cache: HashMap<PathBuf, Checksum>,
55 pub compiled: HashSet<Unit>,
59 pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
62 pub jobserver: Client,
64 primary_packages: HashSet<PackageId>,
68 files: Option<CompilationFiles<'a, 'gctx>>,
72
73 rmeta_required: HashSet<Unit>,
76
77 pub lto: HashMap<Unit, Lto>,
81
82 pub metadata_for_doc_units: HashMap<Unit, Metadata>,
85
86 pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
90}
91
92impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
93 pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
94 let jobserver = match bcx.gctx.jobserver_from_env() {
103 Some(c) => c.clone(),
104 None => {
105 let client =
106 Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
107 client.acquire_raw()?;
108 client
109 }
110 };
111
112 Ok(Self {
113 bcx,
114 compilation: Compilation::new(bcx)?,
115 build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
116 fingerprints: HashMap::new(),
117 mtime_cache: HashMap::new(),
118 checksum_cache: HashMap::new(),
119 compiled: HashSet::new(),
120 build_scripts: HashMap::new(),
121 build_explicit_deps: HashMap::new(),
122 jobserver,
123 primary_packages: HashSet::new(),
124 files: None,
125 rmeta_required: HashSet::new(),
126 lto: HashMap::new(),
127 metadata_for_doc_units: HashMap::new(),
128 failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
129 })
130 }
131
132 pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
137 let _lock = self
138 .bcx
139 .gctx
140 .acquire_package_cache_lock(CacheLockMode::Shared)?;
141 self.lto = super::lto::generate(self.bcx)?;
142 self.prepare_units()?;
143 self.prepare()?;
144 self.check_collisions()?;
145
146 for unit in &self.bcx.roots {
147 self.collect_tests_and_executables(unit)?;
148 }
149
150 Ok(self.compilation)
151 }
152
153 #[tracing::instrument(skip_all)]
160 pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
161 let _lock = self
165 .bcx
166 .gctx
167 .acquire_package_cache_lock(CacheLockMode::Shared)?;
168 let mut queue = JobQueue::new(self.bcx);
169 self.lto = super::lto::generate(self.bcx)?;
170 self.prepare_units()?;
171 self.prepare()?;
172 custom_build::build_map(&mut self)?;
173 self.check_collisions()?;
174 self.compute_metadata_for_doc_units();
175
176 if self.bcx.build_config.intent.is_doc() {
180 RustdocFingerprint::check_rustdoc_fingerprint(&self)?
181 }
182
183 for unit in &self.bcx.roots {
184 let force_rebuild = self.bcx.build_config.force_rebuild;
185 super::compile(&mut self, &mut queue, unit, exec, force_rebuild)?;
186 }
187
188 for fingerprint in self.fingerprints.values() {
195 fingerprint.clear_memoized();
196 }
197
198 queue.execute(&mut self)?;
200
201 let units_with_build_script = &self
203 .bcx
204 .roots
205 .iter()
206 .filter(|unit| self.build_scripts.contains_key(unit))
207 .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
208 .collect::<Vec<_>>();
209 for unit in units_with_build_script {
210 for dep in &self.bcx.unit_graph[unit] {
211 if dep.unit.mode.is_run_custom_build() {
212 let out_dir = self
213 .files()
214 .build_script_out_dir(&dep.unit)
215 .display()
216 .to_string();
217 let script_meta = self.get_run_build_script_metadata(&dep.unit);
218 self.compilation
219 .extra_env
220 .entry(script_meta)
221 .or_insert_with(Vec::new)
222 .push(("OUT_DIR".to_string(), out_dir));
223 }
224 }
225 }
226
227 self.collect_doc_merge_info()?;
228
229 for unit in &self.bcx.roots {
231 self.collect_tests_and_executables(unit)?;
232
233 if unit.mode.is_doc_test() {
235 let mut unstable_opts = false;
236 let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
237 args.extend(compiler::lto_args(&self, unit));
238 args.extend(compiler::features_args(unit));
239 args.extend(compiler::check_cfg_args(unit));
240
241 let script_metas = self.find_build_script_metadatas(unit);
242 if let Some(meta_vec) = script_metas.clone() {
243 for meta in meta_vec {
244 if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
245 for cfg in &output.cfgs {
246 args.push("--cfg".into());
247 args.push(cfg.into());
248 }
249
250 for check_cfg in &output.check_cfgs {
251 args.push("--check-cfg".into());
252 args.push(check_cfg.into());
253 }
254
255 for (lt, arg) in &output.linker_args {
256 if lt.applies_to(&unit.target, unit.mode) {
257 args.push("-C".into());
258 args.push(format!("link-arg={}", arg).into());
259 }
260 }
261 }
262 }
263 }
264 args.extend(unit.rustdocflags.iter().map(Into::into));
265
266 use super::MessageFormat;
267 let format = match self.bcx.build_config.message_format {
268 MessageFormat::Short => "short",
269 MessageFormat::Human => "human",
270 MessageFormat::Json { .. } => "json",
271 };
272 args.push("--error-format".into());
273 args.push(format.into());
274
275 self.compilation.to_doc_test.push(compilation::Doctest {
276 unit: unit.clone(),
277 args,
278 unstable_opts,
279 linker: self.compilation.target_linker(unit.kind).clone(),
280 script_metas,
281 env: artifact::get_env(&self, self.unit_deps(unit))?,
282 });
283 }
284
285 super::output_depinfo(&mut self, unit)?;
286 }
287
288 for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
289 self.compilation
290 .extra_env
291 .entry(*script_meta)
292 .or_insert_with(Vec::new)
293 .extend(output.env.iter().cloned());
294
295 for dir in output.library_paths.iter() {
296 self.compilation
297 .native_dirs
298 .insert(dir.clone().into_path_buf());
299 }
300 }
301 Ok(self.compilation)
302 }
303
304 fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
305 for output in self.outputs(unit)?.iter() {
306 if matches!(
307 output.flavor,
308 FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
309 ) {
310 continue;
311 }
312
313 let bindst = output.bin_dst();
314
315 if unit.mode == CompileMode::Test {
316 self.compilation
317 .tests
318 .push(self.unit_output(unit, &output.path));
319 } else if unit.target.is_executable() {
320 self.compilation
321 .binaries
322 .push(self.unit_output(unit, bindst));
323 } else if unit.target.is_cdylib()
324 && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
325 {
326 self.compilation
327 .cdylibs
328 .push(self.unit_output(unit, bindst));
329 }
330 }
331 Ok(())
332 }
333
334 fn collect_doc_merge_info(&mut self) -> CargoResult<()> {
335 if !self.bcx.gctx.cli_unstable().rustdoc_mergeable_info {
336 return Ok(());
337 }
338
339 if !self.bcx.build_config.intent.is_doc() {
340 return Ok(());
341 }
342
343 if self.bcx.build_config.intent.wants_doc_json_output() {
344 return Ok(());
346 }
347
348 let mut doc_parts_map: HashMap<_, Vec<_>> = HashMap::new();
349
350 let unit_iter = if self.bcx.build_config.intent.wants_deps_docs() {
351 itertools::Either::Left(self.bcx.unit_graph.keys())
352 } else {
353 itertools::Either::Right(self.bcx.roots.iter())
354 };
355
356 for unit in unit_iter {
357 if !unit.mode.is_doc() {
358 continue;
359 }
360 let outputs = self.outputs(unit)?;
362
363 let Some(doc_parts) = outputs
364 .iter()
365 .find(|o| matches!(o.flavor, FileFlavor::DocParts))
366 else {
367 continue;
368 };
369
370 doc_parts_map
371 .entry(unit.kind)
372 .or_default()
373 .push(doc_parts.path.to_owned());
374 }
375
376 self.compilation.rustdoc_fingerprints = Some(
377 doc_parts_map
378 .into_iter()
379 .map(|(kind, doc_parts)| (kind, RustdocFingerprint::new(self, kind, doc_parts)))
380 .collect(),
381 );
382
383 Ok(())
384 }
385
386 pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
388 let is_binary = unit.target.is_executable();
389 let is_test = unit.mode.is_any_test();
390 if !unit.mode.generates_executable() || !(is_binary || is_test) {
391 return Ok(None);
392 }
393 Ok(self
394 .outputs(unit)?
395 .iter()
396 .find(|o| o.flavor == FileFlavor::Normal)
397 .map(|output| output.bin_dst().clone()))
398 }
399
400 #[tracing::instrument(skip_all)]
401 pub fn prepare_units(&mut self) -> CargoResult<()> {
402 let dest = self.bcx.profiles.get_dir_name();
403 let must_take_artifact_dir_lock = match self.bcx.build_config.intent {
407 UserIntent::Check { .. } => {
408 !self.bcx.build_config.timing_outputs.is_empty()
412 }
413 UserIntent::Build
414 | UserIntent::Test
415 | UserIntent::Doc { .. }
416 | UserIntent::Doctest
417 | UserIntent::Bench => true,
418 };
419 let host_layout = Layout::new(self.bcx.ws, None, &dest, must_take_artifact_dir_lock)?;
420 let mut targets = HashMap::new();
421 for kind in self.bcx.all_kinds.iter() {
422 if let CompileKind::Target(target) = *kind {
423 let layout = Layout::new(
424 self.bcx.ws,
425 Some(target),
426 &dest,
427 must_take_artifact_dir_lock,
428 )?;
429 targets.insert(target, layout);
430 }
431 }
432 self.primary_packages
433 .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
434 self.compilation
435 .root_crate_names
436 .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
437
438 self.record_units_requiring_metadata();
439
440 let files = CompilationFiles::new(self, host_layout, targets);
441 self.files = Some(files);
442 Ok(())
443 }
444
445 #[tracing::instrument(skip_all)]
448 pub fn prepare(&mut self) -> CargoResult<()> {
449 self.files
450 .as_mut()
451 .unwrap()
452 .host
453 .prepare()
454 .context("couldn't prepare build directories")?;
455 for target in self.files.as_mut().unwrap().target.values_mut() {
456 target
457 .prepare()
458 .context("couldn't prepare build directories")?;
459 }
460
461 let files = self.files.as_ref().unwrap();
462 for &kind in self.bcx.all_kinds.iter() {
463 let layout = files.layout(kind);
464 if let Some(artifact_dir) = layout.artifact_dir() {
465 self.compilation
466 .root_output
467 .insert(kind, artifact_dir.dest().to_path_buf());
468 }
469 if self.bcx.gctx.cli_unstable().build_dir_new_layout {
470 for (unit, _) in self.bcx.unit_graph.iter() {
471 let dep_dir = self.files().deps_dir(unit);
472 paths::create_dir_all(&dep_dir)?;
473 self.compilation.deps_output.insert(kind, dep_dir);
474 }
475 } else {
476 self.compilation
477 .deps_output
478 .insert(kind, layout.build_dir().legacy_deps().to_path_buf());
479 }
480 }
481 Ok(())
482 }
483
484 pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
485 self.files.as_ref().unwrap()
486 }
487
488 pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
490 self.files.as_ref().unwrap().outputs(unit, self.bcx)
491 }
492
493 pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
495 &self.bcx.unit_graph[unit]
496 }
497
498 pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
502 if unit.mode.is_run_custom_build() {
503 return Some(vec![unit.clone()]);
504 }
505
506 let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
507 .iter()
508 .filter(|unit_dep| {
509 unit_dep.unit.mode.is_run_custom_build()
510 && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
511 })
512 .map(|unit_dep| unit_dep.unit.clone())
513 .collect();
514 if build_script_units.is_empty() {
515 None
516 } else {
517 Some(build_script_units)
518 }
519 }
520
521 pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
526 self.find_build_script_units(unit).map(|units| {
527 units
528 .iter()
529 .map(|u| self.get_run_build_script_metadata(u))
530 .collect()
531 })
532 }
533
534 pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
536 assert!(unit.mode.is_run_custom_build());
537 self.files().metadata(unit).unit_id()
538 }
539
540 pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
542 Ok(self
543 .outputs(unit)?
544 .iter()
545 .filter(|o| o.flavor == FileFlavor::Sbom)
546 .map(|o| o.path.clone())
547 .collect())
548 }
549
550 pub fn is_primary_package(&self, unit: &Unit) -> bool {
551 self.primary_packages.contains(&unit.pkg.package_id())
552 }
553
554 pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
557 let script_metas = self.find_build_script_metadatas(unit);
558 UnitOutput {
559 unit: unit.clone(),
560 path: path.to_path_buf(),
561 script_metas,
562 }
563 }
564
565 #[tracing::instrument(skip_all)]
568 fn check_collisions(&self) -> CargoResult<()> {
569 let mut output_collisions = HashMap::new();
570 let describe_collision = |unit: &Unit, other_unit: &Unit| -> String {
571 format!(
572 "the {} target `{}` in package `{}` has the same output filename as the {} target `{}` in package `{}`",
573 unit.target.kind().description(),
574 unit.target.name(),
575 unit.pkg.package_id(),
576 other_unit.target.kind().description(),
577 other_unit.target.name(),
578 other_unit.pkg.package_id(),
579 )
580 };
581 let suggestion = [
582 Level::NOTE.message("this may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>"),
583 Level::HELP.message("consider changing their names to be unique or compiling them separately")
584 ];
585 let rustdoc_suggestion = [
586 Level::NOTE.message("this is a known bug where multiple crates with the same name use the same path; see <https://github.com/rust-lang/cargo/issues/6313>")
587 ];
588 let report_collision = |unit: &Unit,
589 other_unit: &Unit,
590 path: &PathBuf,
591 messages: &[Message<'_>]|
592 -> CargoResult<()> {
593 if unit.target.name() == other_unit.target.name() {
594 self.bcx.gctx.shell().print_report(
595 &[Level::WARNING
596 .secondary_title(format!("output filename collision at {}", path.display()))
597 .elements(
598 [Level::NOTE.message(describe_collision(unit, other_unit))]
599 .into_iter()
600 .chain(messages.iter().cloned()),
601 )],
602 false,
603 )
604 } else {
605 self.bcx.gctx.shell().print_report(
606 &[Level::WARNING
607 .secondary_title(format!("output filename collision at {}", path.display()))
608 .elements([
609 Level::NOTE.message(describe_collision(unit, other_unit)),
610 Level::NOTE.message("if this looks unexpected, it may be a bug in Cargo. Please file a bug \
611 report at https://github.com/rust-lang/cargo/issues/ with as much information as you \
612 can provide."),
613 Level::NOTE.message(format!("cargo {} running on `{}` target `{}`",
614 crate::version(), self.bcx.host_triple(), self.bcx.target_data.short_name(&unit.kind))),
615 Level::NOTE.message(format!("first unit: {unit:?}")),
616 Level::NOTE.message(format!("second unit: {other_unit:?}")),
617 ])],
618 false,
619 )
620 }
621 };
622
623 fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
624 bail!(
625 "document output filename collision\n\
626 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
627 Only one may be documented at once since they output to the same path.\n\
628 Consider documenting only one, renaming one, \
629 or marking one with `doc = false` in Cargo.toml.",
630 unit.target.kind().description(),
631 unit.target.name(),
632 unit.pkg,
633 other_unit.target.kind().description(),
634 other_unit.target.name(),
635 other_unit.pkg,
636 );
637 }
638
639 let mut keys = self
640 .bcx
641 .unit_graph
642 .keys()
643 .filter(|unit| !unit.mode.is_run_custom_build())
644 .collect::<Vec<_>>();
645 keys.sort_unstable();
647 let mut doc_libs = HashMap::new();
655 let mut doc_bins = HashMap::new();
656 for unit in keys {
657 if unit.mode.is_doc() && self.is_primary_package(unit) {
658 if unit.target.is_lib() {
661 if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
662 {
663 doc_collision_error(unit, prev)?;
664 }
665 } else if let Some(prev) =
666 doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
667 {
668 doc_collision_error(unit, prev)?;
669 }
670 }
671 for output in self.outputs(unit)?.iter() {
672 if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
673 if unit.mode.is_doc() {
674 report_collision(unit, other_unit, &output.path, &rustdoc_suggestion)?;
677 } else {
678 report_collision(unit, other_unit, &output.path, &suggestion)?;
679 }
680 }
681 if let Some(hardlink) = output.hardlink.as_ref() {
682 if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
683 report_collision(unit, other_unit, hardlink, &suggestion)?;
684 }
685 }
686 if let Some(ref export_path) = output.export_path {
687 if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
688 self.bcx.gctx.shell().print_report(
689 &[Level::WARNING
690 .secondary_title(format!(
691 "`--artifact-dir` filename collision at {}",
692 export_path.display()
693 ))
694 .elements(
695 [Level::NOTE.message(describe_collision(unit, other_unit))]
696 .into_iter()
697 .chain(suggestion.iter().cloned()),
698 )],
699 false,
700 )?;
701 }
702 }
703 }
704 }
705 Ok(())
706 }
707
708 fn record_units_requiring_metadata(&mut self) {
713 for (key, deps) in self.bcx.unit_graph.iter() {
714 for dep in deps {
715 if self.only_requires_rmeta(key, &dep.unit) {
716 self.rmeta_required.insert(dep.unit.clone());
717 }
718 }
719 }
720 }
721
722 pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
725 !parent.requires_upstream_objects()
728 && parent.mode == CompileMode::Build
729 && !dep.requires_upstream_objects()
732 && dep.mode == CompileMode::Build
733 }
734
735 pub fn rmeta_required(&self, unit: &Unit) -> bool {
738 self.rmeta_required.contains(unit)
739 }
740
741 #[tracing::instrument(skip_all)]
752 pub fn compute_metadata_for_doc_units(&mut self) {
753 for unit in self.bcx.unit_graph.keys() {
754 if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
755 continue;
756 }
757
758 let matching_units = self
759 .bcx
760 .unit_graph
761 .keys()
762 .filter(|other| {
763 unit.pkg == other.pkg
764 && unit.target == other.target
765 && !other.mode.is_doc_scrape()
766 })
767 .collect::<Vec<_>>();
768 let metadata_unit = matching_units
769 .iter()
770 .find(|other| other.mode.is_check())
771 .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
772 .unwrap_or(&unit);
773 self.metadata_for_doc_units
774 .insert(unit.clone(), self.files().metadata(metadata_unit));
775 }
776 }
777}