1use std::collections::{HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::{self, Unit, UserIntent, artifact};
10use crate::util::cache_lock::CacheLockMode;
11use crate::util::errors::CargoResult;
12use annotate_snippets::{Level, Message};
13use anyhow::{Context as _, bail};
14use cargo_util::paths;
15use filetime::FileTime;
16use itertools::Itertools;
17use jobserver::Client;
18
19use super::RustdocFingerprint;
20use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
21use super::fingerprint::{Checksum, Fingerprint};
22use super::job_queue::JobQueue;
23use super::layout::Layout;
24use super::lto::Lto;
25use super::unit_graph::UnitDep;
26use super::{BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor};
27
28mod compilation_files;
29use self::compilation_files::CompilationFiles;
30pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
31
32pub struct BuildRunner<'a, 'gctx> {
39 pub bcx: &'a BuildContext<'a, 'gctx>,
41 pub compilation: Compilation<'gctx>,
43 pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
45 pub build_explicit_deps: HashMap<Unit, BuildDeps>,
49 pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
51 pub mtime_cache: HashMap<PathBuf, FileTime>,
53 pub checksum_cache: HashMap<PathBuf, Checksum>,
55 pub compiled: HashSet<Unit>,
59 pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
62 pub jobserver: Client,
64 primary_packages: HashSet<PackageId>,
68 files: Option<CompilationFiles<'a, 'gctx>>,
72
73 rmeta_required: HashSet<Unit>,
76
77 pub lto: HashMap<Unit, Lto>,
81
82 pub metadata_for_doc_units: HashMap<Unit, Metadata>,
85
86 pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
90}
91
92impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
93 pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
94 let jobserver = match bcx.gctx.jobserver_from_env() {
103 Some(c) => c.clone(),
104 None => {
105 let client =
106 Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
107 client.acquire_raw()?;
108 client
109 }
110 };
111
112 Ok(Self {
113 bcx,
114 compilation: Compilation::new(bcx)?,
115 build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
116 fingerprints: HashMap::new(),
117 mtime_cache: HashMap::new(),
118 checksum_cache: HashMap::new(),
119 compiled: HashSet::new(),
120 build_scripts: HashMap::new(),
121 build_explicit_deps: HashMap::new(),
122 jobserver,
123 primary_packages: HashSet::new(),
124 files: None,
125 rmeta_required: HashSet::new(),
126 lto: HashMap::new(),
127 metadata_for_doc_units: HashMap::new(),
128 failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
129 })
130 }
131
132 pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
137 let _lock = self
138 .bcx
139 .gctx
140 .acquire_package_cache_lock(CacheLockMode::Shared)?;
141 self.lto = super::lto::generate(self.bcx)?;
142 self.prepare_units()?;
143 self.prepare()?;
144 self.check_collisions()?;
145
146 for unit in &self.bcx.roots {
147 self.collect_tests_and_executables(unit)?;
148 }
149
150 Ok(self.compilation)
151 }
152
153 #[tracing::instrument(skip_all)]
160 pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
161 let _lock = self
165 .bcx
166 .gctx
167 .acquire_package_cache_lock(CacheLockMode::Shared)?;
168 let mut queue = JobQueue::new(self.bcx);
169 self.lto = super::lto::generate(self.bcx)?;
170 self.prepare_units()?;
171 self.prepare()?;
172 custom_build::build_map(&mut self)?;
173 self.check_collisions()?;
174 self.compute_metadata_for_doc_units();
175
176 if self.bcx.build_config.intent.is_doc() {
180 RustdocFingerprint::check_rustdoc_fingerprint(&self)?
181 }
182
183 for unit in &self.bcx.roots {
184 let force_rebuild = self.bcx.build_config.force_rebuild;
185 super::compile(&mut self, &mut queue, unit, exec, force_rebuild)?;
186 }
187
188 for fingerprint in self.fingerprints.values() {
195 fingerprint.clear_memoized();
196 }
197
198 queue.execute(&mut self)?;
200
201 let units_with_build_script = &self
203 .bcx
204 .roots
205 .iter()
206 .filter(|unit| self.build_scripts.contains_key(unit))
207 .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
208 .collect::<Vec<_>>();
209 for unit in units_with_build_script {
210 for dep in &self.bcx.unit_graph[unit] {
211 if dep.unit.mode.is_run_custom_build() {
212 let out_dir = self
213 .files()
214 .build_script_out_dir(&dep.unit)
215 .display()
216 .to_string();
217 let script_meta = self.get_run_build_script_metadata(&dep.unit);
218 self.compilation
219 .extra_env
220 .entry(script_meta)
221 .or_insert_with(Vec::new)
222 .push(("OUT_DIR".to_string(), out_dir));
223 }
224 }
225 }
226
227 self.collect_doc_merge_info()?;
228
229 for unit in &self.bcx.roots {
231 self.collect_tests_and_executables(unit)?;
232
233 if unit.mode.is_doc_test() {
235 let mut unstable_opts = false;
236 let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
237 args.extend(compiler::lib_search_paths(&self, unit)?);
238 args.extend(compiler::lto_args(&self, unit));
239 args.extend(compiler::features_args(unit));
240 args.extend(compiler::check_cfg_args(unit));
241
242 let script_metas = self.find_build_script_metadatas(unit);
243 if let Some(meta_vec) = script_metas.clone() {
244 for meta in meta_vec {
245 if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
246 for cfg in &output.cfgs {
247 args.push("--cfg".into());
248 args.push(cfg.into());
249 }
250
251 for check_cfg in &output.check_cfgs {
252 args.push("--check-cfg".into());
253 args.push(check_cfg.into());
254 }
255
256 for (lt, arg) in &output.linker_args {
257 if lt.applies_to(&unit.target, unit.mode) {
258 args.push("-C".into());
259 args.push(format!("link-arg={}", arg).into());
260 }
261 }
262 }
263 }
264 }
265 args.extend(unit.rustdocflags.iter().map(Into::into));
266
267 use super::MessageFormat;
268 let format = match self.bcx.build_config.message_format {
269 MessageFormat::Short => "short",
270 MessageFormat::Human => "human",
271 MessageFormat::Json { .. } => "json",
272 };
273 args.push("--error-format".into());
274 args.push(format.into());
275
276 self.compilation.to_doc_test.push(compilation::Doctest {
277 unit: unit.clone(),
278 args,
279 unstable_opts,
280 linker: self.compilation.target_linker(unit.kind).clone(),
281 script_metas,
282 env: artifact::get_env(&self, self.unit_deps(unit))?,
283 });
284 }
285
286 super::output_depinfo(&mut self, unit)?;
287 }
288
289 for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
290 self.compilation
291 .extra_env
292 .entry(*script_meta)
293 .or_insert_with(Vec::new)
294 .extend(output.env.iter().cloned());
295
296 for dir in output.library_paths.iter() {
297 self.compilation
298 .native_dirs
299 .insert(dir.clone().into_path_buf());
300 }
301 }
302 Ok(self.compilation)
303 }
304
305 fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
306 for output in self.outputs(unit)?.iter() {
307 if matches!(
308 output.flavor,
309 FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
310 ) {
311 continue;
312 }
313
314 let bindst = output.bin_dst();
315
316 if unit.mode == CompileMode::Test {
317 self.compilation
318 .tests
319 .push(self.unit_output(unit, &output.path));
320 } else if unit.target.is_executable() {
321 self.compilation
322 .binaries
323 .push(self.unit_output(unit, bindst));
324 } else if unit.target.is_cdylib()
325 && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
326 {
327 self.compilation
328 .cdylibs
329 .push(self.unit_output(unit, bindst));
330 }
331 }
332 Ok(())
333 }
334
335 fn collect_doc_merge_info(&mut self) -> CargoResult<()> {
336 if !self.bcx.gctx.cli_unstable().rustdoc_mergeable_info {
337 return Ok(());
338 }
339
340 if !self.bcx.build_config.intent.is_doc() {
341 return Ok(());
342 }
343
344 if self.bcx.build_config.intent.wants_doc_json_output() {
345 return Ok(());
347 }
348
349 let mut doc_parts_map: HashMap<_, Vec<_>> = HashMap::new();
350
351 let unit_iter = if self.bcx.build_config.intent.wants_deps_docs() {
352 itertools::Either::Left(self.bcx.unit_graph.keys())
353 } else {
354 itertools::Either::Right(self.bcx.roots.iter())
355 };
356
357 for unit in unit_iter {
358 if !unit.mode.is_doc() {
359 continue;
360 }
361 let outputs = self.outputs(unit)?;
363
364 let Some(doc_parts) = outputs
365 .iter()
366 .find(|o| matches!(o.flavor, FileFlavor::DocParts))
367 else {
368 continue;
369 };
370
371 doc_parts_map
372 .entry(unit.kind)
373 .or_default()
374 .push(doc_parts.path.to_owned());
375 }
376
377 self.compilation.rustdoc_fingerprints = Some(
378 doc_parts_map
379 .into_iter()
380 .map(|(kind, doc_parts)| (kind, RustdocFingerprint::new(self, kind, doc_parts)))
381 .collect(),
382 );
383
384 Ok(())
385 }
386
387 pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
389 let is_binary = unit.target.is_executable();
390 let is_test = unit.mode.is_any_test();
391 if !unit.mode.generates_executable() || !(is_binary || is_test) {
392 return Ok(None);
393 }
394 Ok(self
395 .outputs(unit)?
396 .iter()
397 .find(|o| o.flavor == FileFlavor::Normal)
398 .map(|output| output.bin_dst().clone()))
399 }
400
401 #[tracing::instrument(skip_all)]
402 pub fn prepare_units(&mut self) -> CargoResult<()> {
403 let dest = self.bcx.profiles.get_dir_name();
404 let must_take_artifact_dir_lock = match self.bcx.build_config.intent {
408 UserIntent::Check { .. } => {
409 !self.bcx.build_config.timing_outputs.is_empty()
413 }
414 UserIntent::Build
415 | UserIntent::Test
416 | UserIntent::Doc { .. }
417 | UserIntent::Doctest
418 | UserIntent::Bench => true,
419 };
420 let host_layout = Layout::new(self.bcx.ws, None, &dest, must_take_artifact_dir_lock)?;
421 let mut targets = HashMap::new();
422 for kind in self.bcx.all_kinds.iter() {
423 if let CompileKind::Target(target) = *kind {
424 let layout = Layout::new(
425 self.bcx.ws,
426 Some(target),
427 &dest,
428 must_take_artifact_dir_lock,
429 )?;
430 targets.insert(target, layout);
431 }
432 }
433 self.primary_packages
434 .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
435 self.compilation
436 .root_crate_names
437 .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
438
439 self.record_units_requiring_metadata();
440
441 let files = CompilationFiles::new(self, host_layout, targets);
442 self.files = Some(files);
443 Ok(())
444 }
445
446 #[tracing::instrument(skip_all)]
449 pub fn prepare(&mut self) -> CargoResult<()> {
450 self.files
451 .as_mut()
452 .unwrap()
453 .host
454 .prepare()
455 .context("couldn't prepare build directories")?;
456 for target in self.files.as_mut().unwrap().target.values_mut() {
457 target
458 .prepare()
459 .context("couldn't prepare build directories")?;
460 }
461
462 let files = self.files.as_ref().unwrap();
463 for &kind in self.bcx.all_kinds.iter() {
464 let layout = files.layout(kind);
465 if let Some(artifact_dir) = layout.artifact_dir() {
466 self.compilation
467 .root_output
468 .insert(kind, artifact_dir.dest().to_path_buf());
469 }
470 if self.bcx.gctx.cli_unstable().build_dir_new_layout {
471 for (unit, _) in self.bcx.unit_graph.iter() {
472 let dep_dir = self.files().deps_dir(unit);
473 paths::create_dir_all(&dep_dir)?;
474 self.compilation.deps_output.insert(kind, dep_dir);
475 }
476 } else {
477 self.compilation
478 .deps_output
479 .insert(kind, layout.build_dir().legacy_deps().to_path_buf());
480 }
481 }
482 Ok(())
483 }
484
485 pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
486 self.files.as_ref().unwrap()
487 }
488
489 pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
491 self.files.as_ref().unwrap().outputs(unit, self.bcx)
492 }
493
494 pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
496 &self.bcx.unit_graph[unit]
497 }
498
499 pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
503 if unit.mode.is_run_custom_build() {
504 return Some(vec![unit.clone()]);
505 }
506
507 let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
508 .iter()
509 .filter(|unit_dep| {
510 unit_dep.unit.mode.is_run_custom_build()
511 && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
512 })
513 .map(|unit_dep| unit_dep.unit.clone())
514 .collect();
515 if build_script_units.is_empty() {
516 None
517 } else {
518 Some(build_script_units)
519 }
520 }
521
522 pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
527 self.find_build_script_units(unit).map(|units| {
528 units
529 .iter()
530 .map(|u| self.get_run_build_script_metadata(u))
531 .collect()
532 })
533 }
534
535 pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
537 assert!(unit.mode.is_run_custom_build());
538 self.files().metadata(unit).unit_id()
539 }
540
541 pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
543 Ok(self
544 .outputs(unit)?
545 .iter()
546 .filter(|o| o.flavor == FileFlavor::Sbom)
547 .map(|o| o.path.clone())
548 .collect())
549 }
550
551 pub fn is_primary_package(&self, unit: &Unit) -> bool {
552 self.primary_packages.contains(&unit.pkg.package_id())
553 }
554
555 pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
558 let script_metas = self.find_build_script_metadatas(unit);
559 UnitOutput {
560 unit: unit.clone(),
561 path: path.to_path_buf(),
562 script_metas,
563 }
564 }
565
566 #[tracing::instrument(skip_all)]
569 fn check_collisions(&self) -> CargoResult<()> {
570 let mut output_collisions = HashMap::new();
571 let describe_collision = |unit: &Unit, other_unit: &Unit| -> String {
572 format!(
573 "the {} target `{}` in package `{}` has the same output filename as the {} target `{}` in package `{}`",
574 unit.target.kind().description(),
575 unit.target.name(),
576 unit.pkg.package_id(),
577 other_unit.target.kind().description(),
578 other_unit.target.name(),
579 other_unit.pkg.package_id(),
580 )
581 };
582 let suggestion = [
583 Level::NOTE.message("this may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>"),
584 Level::HELP.message("consider changing their names to be unique or compiling them separately")
585 ];
586 let rustdoc_suggestion = [
587 Level::NOTE.message("this is a known bug where multiple crates with the same name use the same path; see <https://github.com/rust-lang/cargo/issues/6313>")
588 ];
589 let report_collision = |unit: &Unit,
590 other_unit: &Unit,
591 path: &PathBuf,
592 messages: &[Message<'_>]|
593 -> CargoResult<()> {
594 if unit.target.name() == other_unit.target.name() {
595 self.bcx.gctx.shell().print_report(
596 &[Level::WARNING
597 .secondary_title(format!("output filename collision at {}", path.display()))
598 .elements(
599 [Level::NOTE.message(describe_collision(unit, other_unit))]
600 .into_iter()
601 .chain(messages.iter().cloned()),
602 )],
603 false,
604 )
605 } else {
606 self.bcx.gctx.shell().print_report(
607 &[Level::WARNING
608 .secondary_title(format!("output filename collision at {}", path.display()))
609 .elements([
610 Level::NOTE.message(describe_collision(unit, other_unit)),
611 Level::NOTE.message("if this looks unexpected, it may be a bug in Cargo. Please file a bug \
612 report at https://github.com/rust-lang/cargo/issues/ with as much information as you \
613 can provide."),
614 Level::NOTE.message(format!("cargo {} running on `{}` target `{}`",
615 crate::version(), self.bcx.host_triple(), self.bcx.target_data.short_name(&unit.kind))),
616 Level::NOTE.message(format!("first unit: {unit:?}")),
617 Level::NOTE.message(format!("second unit: {other_unit:?}")),
618 ])],
619 false,
620 )
621 }
622 };
623
624 fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
625 bail!(
626 "document output filename collision\n\
627 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
628 Only one may be documented at once since they output to the same path.\n\
629 Consider documenting only one, renaming one, \
630 or marking one with `doc = false` in Cargo.toml.",
631 unit.target.kind().description(),
632 unit.target.name(),
633 unit.pkg,
634 other_unit.target.kind().description(),
635 other_unit.target.name(),
636 other_unit.pkg,
637 );
638 }
639
640 let mut keys = self
641 .bcx
642 .unit_graph
643 .keys()
644 .filter(|unit| !unit.mode.is_run_custom_build())
645 .collect::<Vec<_>>();
646 keys.sort_unstable();
648 let mut doc_libs = HashMap::new();
656 let mut doc_bins = HashMap::new();
657 for unit in keys {
658 if unit.mode.is_doc() && self.is_primary_package(unit) {
659 if unit.target.is_lib() {
662 if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
663 {
664 doc_collision_error(unit, prev)?;
665 }
666 } else if let Some(prev) =
667 doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
668 {
669 doc_collision_error(unit, prev)?;
670 }
671 }
672 for output in self.outputs(unit)?.iter() {
673 if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
674 if unit.mode.is_doc() {
675 report_collision(unit, other_unit, &output.path, &rustdoc_suggestion)?;
678 } else {
679 report_collision(unit, other_unit, &output.path, &suggestion)?;
680 }
681 }
682 if let Some(hardlink) = output.hardlink.as_ref() {
683 if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
684 report_collision(unit, other_unit, hardlink, &suggestion)?;
685 }
686 }
687 if let Some(ref export_path) = output.export_path {
688 if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
689 self.bcx.gctx.shell().print_report(
690 &[Level::WARNING
691 .secondary_title(format!(
692 "`--artifact-dir` filename collision at {}",
693 export_path.display()
694 ))
695 .elements(
696 [Level::NOTE.message(describe_collision(unit, other_unit))]
697 .into_iter()
698 .chain(suggestion.iter().cloned()),
699 )],
700 false,
701 )?;
702 }
703 }
704 }
705 }
706 Ok(())
707 }
708
709 fn record_units_requiring_metadata(&mut self) {
714 for (key, deps) in self.bcx.unit_graph.iter() {
715 for dep in deps {
716 if self.only_requires_rmeta(key, &dep.unit) {
717 self.rmeta_required.insert(dep.unit.clone());
718 }
719 }
720 }
721 }
722
723 pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
726 !parent.requires_upstream_objects()
729 && parent.mode == CompileMode::Build
730 && !dep.requires_upstream_objects()
733 && dep.mode == CompileMode::Build
734 }
735
736 pub fn rmeta_required(&self, unit: &Unit) -> bool {
739 self.rmeta_required.contains(unit)
740 }
741
742 #[tracing::instrument(skip_all)]
753 pub fn compute_metadata_for_doc_units(&mut self) {
754 for unit in self.bcx.unit_graph.keys() {
755 if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
756 continue;
757 }
758
759 let matching_units = self
760 .bcx
761 .unit_graph
762 .keys()
763 .filter(|other| {
764 unit.pkg == other.pkg
765 && unit.target == other.target
766 && !other.mode.is_doc_scrape()
767 })
768 .collect::<Vec<_>>();
769 let metadata_unit = matching_units
770 .iter()
771 .find(|other| other.mode.is_check())
772 .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
773 .unwrap_or(&unit);
774 self.metadata_for_doc_units
775 .insert(unit.clone(), self.files().metadata(metadata_unit));
776 }
777 }
778}