1use std::collections::{HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::locking::LockManager;
10use crate::core::compiler::{self, Unit, UserIntent, artifact};
11use crate::util::cache_lock::CacheLockMode;
12use crate::util::errors::CargoResult;
13use annotate_snippets::{Level, Message};
14use anyhow::{Context as _, bail};
15use cargo_util::paths;
16use filetime::FileTime;
17use itertools::Itertools;
18use jobserver::Client;
19
20use super::RustdocFingerprint;
21use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
22use super::fingerprint::{Checksum, Fingerprint};
23use super::job_queue::JobQueue;
24use super::layout::Layout;
25use super::lto::Lto;
26use super::unit_graph::UnitDep;
27use super::{BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor};
28
29mod compilation_files;
30use self::compilation_files::CompilationFiles;
31pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
32
33pub struct BuildRunner<'a, 'gctx> {
40 pub bcx: &'a BuildContext<'a, 'gctx>,
42 pub compilation: Compilation<'gctx>,
44 pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
46 pub build_explicit_deps: HashMap<Unit, BuildDeps>,
50 pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
52 pub mtime_cache: HashMap<PathBuf, FileTime>,
54 pub checksum_cache: HashMap<PathBuf, Checksum>,
56 pub compiled: HashSet<Unit>,
60 pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
63 pub jobserver: Client,
65 primary_packages: HashSet<PackageId>,
69 files: Option<CompilationFiles<'a, 'gctx>>,
73
74 rmeta_required: HashSet<Unit>,
77
78 pub lto: HashMap<Unit, Lto>,
82
83 pub metadata_for_doc_units: HashMap<Unit, Metadata>,
86
87 pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
91
92 pub lock_manager: Arc<LockManager>,
94}
95
96impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
97 pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
98 let jobserver = match bcx.gctx.jobserver_from_env() {
107 Some(c) => c.clone(),
108 None => {
109 let client =
110 Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
111 client.acquire_raw()?;
112 client
113 }
114 };
115
116 Ok(Self {
117 bcx,
118 compilation: Compilation::new(bcx)?,
119 build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
120 fingerprints: HashMap::new(),
121 mtime_cache: HashMap::new(),
122 checksum_cache: HashMap::new(),
123 compiled: HashSet::new(),
124 build_scripts: HashMap::new(),
125 build_explicit_deps: HashMap::new(),
126 jobserver,
127 primary_packages: HashSet::new(),
128 files: None,
129 rmeta_required: HashSet::new(),
130 lto: HashMap::new(),
131 metadata_for_doc_units: HashMap::new(),
132 failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
133 lock_manager: Arc::new(LockManager::new()),
134 })
135 }
136
137 pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
142 let _lock = self
143 .bcx
144 .gctx
145 .acquire_package_cache_lock(CacheLockMode::Shared)?;
146 self.lto = super::lto::generate(self.bcx)?;
147 self.prepare_units()?;
148 self.prepare()?;
149 self.check_collisions()?;
150
151 for unit in &self.bcx.roots {
152 self.collect_tests_and_executables(unit)?;
153 }
154
155 Ok(self.compilation)
156 }
157
158 #[tracing::instrument(skip_all)]
165 pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
166 let _lock = self
170 .bcx
171 .gctx
172 .acquire_package_cache_lock(CacheLockMode::Shared)?;
173 let mut queue = JobQueue::new(self.bcx);
174 self.lto = super::lto::generate(self.bcx)?;
175 self.prepare_units()?;
176 self.prepare()?;
177 custom_build::build_map(&mut self)?;
178 self.check_collisions()?;
179 self.compute_metadata_for_doc_units();
180
181 if self.bcx.build_config.intent.is_doc() {
185 RustdocFingerprint::check_rustdoc_fingerprint(&self)?
186 }
187
188 for unit in &self.bcx.roots {
189 let force_rebuild = self.bcx.build_config.force_rebuild;
190 super::compile(&mut self, &mut queue, unit, exec, force_rebuild)?;
191 }
192
193 for fingerprint in self.fingerprints.values() {
200 fingerprint.clear_memoized();
201 }
202
203 queue.execute(&mut self)?;
205
206 let units_with_build_script = &self
208 .bcx
209 .roots
210 .iter()
211 .filter(|unit| self.build_scripts.contains_key(unit))
212 .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
213 .collect::<Vec<_>>();
214 for unit in units_with_build_script {
215 for dep in &self.bcx.unit_graph[unit] {
216 if dep.unit.mode.is_run_custom_build() {
217 let out_dir = if self.bcx.gctx.cli_unstable().build_dir_new_layout {
218 self.files().out_dir_new_layout(&dep.unit)
219 } else {
220 self.files().build_script_out_dir(&dep.unit)
221 };
222 let script_meta = self.get_run_build_script_metadata(&dep.unit);
223 self.compilation
224 .extra_env
225 .entry(script_meta)
226 .or_insert_with(Vec::new)
227 .push(("OUT_DIR".to_string(), out_dir.display().to_string()));
228 }
229 }
230 }
231
232 self.collect_doc_merge_info()?;
233
234 for unit in &self.bcx.roots {
236 self.collect_tests_and_executables(unit)?;
237
238 if unit.mode.is_doc_test() {
240 let mut unstable_opts = false;
241 let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
242 args.extend(compiler::lib_search_paths(&self, unit)?);
243 args.extend(compiler::lto_args(&self, unit));
244 args.extend(compiler::features_args(unit));
245 args.extend(compiler::check_cfg_args(unit));
246
247 let script_metas = self.find_build_script_metadatas(unit);
248 if let Some(meta_vec) = script_metas.clone() {
249 for meta in meta_vec {
250 if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
251 for cfg in &output.cfgs {
252 args.push("--cfg".into());
253 args.push(cfg.into());
254 }
255
256 for check_cfg in &output.check_cfgs {
257 args.push("--check-cfg".into());
258 args.push(check_cfg.into());
259 }
260
261 for (lt, arg) in &output.linker_args {
262 if lt.applies_to(&unit.target, unit.mode) {
263 args.push("-C".into());
264 args.push(format!("link-arg={}", arg).into());
265 }
266 }
267 }
268 }
269 }
270 args.extend(unit.rustdocflags.iter().map(Into::into));
271
272 use super::MessageFormat;
273 let format = match self.bcx.build_config.message_format {
274 MessageFormat::Short => "short",
275 MessageFormat::Human => "human",
276 MessageFormat::Json { .. } => "json",
277 };
278 args.push("--error-format".into());
279 args.push(format.into());
280
281 self.compilation.to_doc_test.push(compilation::Doctest {
282 unit: unit.clone(),
283 args,
284 unstable_opts,
285 linker: self
286 .compilation
287 .target_linker(unit.kind)
288 .map(|p| p.to_path_buf()),
289 script_metas,
290 env: artifact::get_env(&self, unit, self.unit_deps(unit))?,
291 });
292 }
293
294 super::output_depinfo(&mut self, unit)?;
295 }
296
297 for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
298 self.compilation
299 .extra_env
300 .entry(*script_meta)
301 .or_insert_with(Vec::new)
302 .extend(output.env.iter().cloned());
303
304 for dir in output.library_paths.iter() {
305 self.compilation
306 .native_dirs
307 .insert(dir.clone().into_path_buf());
308 }
309 }
310 Ok(self.compilation)
311 }
312
313 fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
314 for output in self.outputs(unit)?.iter() {
315 if matches!(
316 output.flavor,
317 FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
318 ) {
319 continue;
320 }
321
322 let bindst = output.bin_dst();
323
324 if unit.mode == CompileMode::Test {
325 self.compilation
326 .tests
327 .push(self.unit_output(unit, &output.path)?);
328 } else if unit.target.is_executable() {
329 self.compilation
330 .binaries
331 .push(self.unit_output(unit, bindst)?);
332 } else if unit.target.is_cdylib()
333 && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
334 {
335 self.compilation
336 .cdylibs
337 .push(self.unit_output(unit, bindst)?);
338 }
339 }
340 Ok(())
341 }
342
343 fn collect_doc_merge_info(&mut self) -> CargoResult<()> {
344 if !self.bcx.gctx.cli_unstable().rustdoc_mergeable_info {
345 return Ok(());
346 }
347
348 if !self.bcx.build_config.intent.is_doc() {
349 return Ok(());
350 }
351
352 if self.bcx.build_config.intent.wants_doc_json_output() {
353 return Ok(());
355 }
356
357 let mut doc_parts_map: HashMap<_, Vec<_>> = HashMap::new();
358
359 let unit_iter = if self.bcx.build_config.intent.wants_deps_docs() {
360 itertools::Either::Left(self.bcx.unit_graph.keys())
361 } else {
362 itertools::Either::Right(self.bcx.roots.iter())
363 };
364
365 for unit in unit_iter {
366 if !unit.mode.is_doc() {
367 continue;
368 }
369 let outputs = self.outputs(unit)?;
371
372 let Some(doc_parts) = outputs
373 .iter()
374 .find(|o| matches!(o.flavor, FileFlavor::DocParts))
375 else {
376 continue;
377 };
378
379 doc_parts_map
380 .entry(unit.kind)
381 .or_default()
382 .push(doc_parts.path.to_owned());
383 }
384
385 self.compilation.rustdoc_fingerprints = Some(
386 doc_parts_map
387 .into_iter()
388 .map(|(kind, doc_parts)| (kind, RustdocFingerprint::new(self, kind, doc_parts)))
389 .collect(),
390 );
391
392 Ok(())
393 }
394
395 pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
397 let is_binary = unit.target.is_executable();
398 let is_test = unit.mode.is_any_test();
399 if !unit.mode.generates_executable() || !(is_binary || is_test) {
400 return Ok(None);
401 }
402 Ok(self
403 .outputs(unit)?
404 .iter()
405 .find(|o| o.flavor == FileFlavor::Normal)
406 .map(|output| output.bin_dst().clone()))
407 }
408
409 #[tracing::instrument(skip_all)]
410 pub fn prepare_units(&mut self) -> CargoResult<()> {
411 let dest = self.bcx.profiles.get_dir_name();
412 let must_take_artifact_dir_lock = match self.bcx.build_config.intent {
416 UserIntent::Check { .. } => {
417 self.bcx.build_config.timing_report
421 }
422 UserIntent::Build
423 | UserIntent::Test
424 | UserIntent::Doc { .. }
425 | UserIntent::Doctest
426 | UserIntent::Bench => true,
427 };
428 let host_layout =
429 Layout::new(self.bcx.ws, None, &dest, must_take_artifact_dir_lock, false)?;
430 let mut targets = HashMap::new();
431 for kind in self.bcx.all_kinds.iter() {
432 if let CompileKind::Target(target) = *kind {
433 let layout = Layout::new(
434 self.bcx.ws,
435 Some(target),
436 &dest,
437 must_take_artifact_dir_lock,
438 false,
439 )?;
440 targets.insert(target, layout);
441 }
442 }
443 self.primary_packages
444 .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
445 self.compilation
446 .root_crate_names
447 .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
448
449 self.record_units_requiring_metadata();
450
451 let files = CompilationFiles::new(self, host_layout, targets);
452 self.files = Some(files);
453 Ok(())
454 }
455
456 #[tracing::instrument(skip_all)]
459 pub fn prepare(&mut self) -> CargoResult<()> {
460 self.files
461 .as_mut()
462 .unwrap()
463 .host
464 .prepare()
465 .context("couldn't prepare build directories")?;
466 for target in self.files.as_mut().unwrap().target.values_mut() {
467 target
468 .prepare()
469 .context("couldn't prepare build directories")?;
470 }
471
472 let files = self.files.as_ref().unwrap();
473 for &kind in self.bcx.all_kinds.iter() {
474 let layout = files.layout(kind);
475 if let Some(artifact_dir) = layout.artifact_dir() {
476 self.compilation
477 .root_output
478 .insert(kind, artifact_dir.dest().to_path_buf());
479 }
480 if self.bcx.gctx.cli_unstable().build_dir_new_layout {
481 for (unit, _) in self.bcx.unit_graph.iter() {
482 let dep_dir = self.files().deps_dir(unit);
483 paths::create_dir_all(&dep_dir)?;
484 self.compilation.deps_output.insert(kind, dep_dir);
485 }
486 } else {
487 self.compilation
488 .deps_output
489 .insert(kind, layout.build_dir().legacy_deps().to_path_buf());
490 }
491 }
492 Ok(())
493 }
494
495 pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
496 self.files.as_ref().unwrap()
497 }
498
499 pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
501 self.files.as_ref().unwrap().outputs(unit, self.bcx)
502 }
503
504 pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
506 &self.bcx.unit_graph[unit]
507 }
508
509 pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
513 if unit.mode.is_run_custom_build() {
514 return Some(vec![unit.clone()]);
515 }
516
517 let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
518 .iter()
519 .filter(|unit_dep| {
520 unit_dep.unit.mode.is_run_custom_build()
521 && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
522 })
523 .map(|unit_dep| unit_dep.unit.clone())
524 .collect();
525 if build_script_units.is_empty() {
526 None
527 } else {
528 Some(build_script_units)
529 }
530 }
531
532 pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
537 self.find_build_script_units(unit).map(|units| {
538 units
539 .iter()
540 .map(|u| self.get_run_build_script_metadata(u))
541 .collect()
542 })
543 }
544
545 pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
547 assert!(unit.mode.is_run_custom_build());
548 self.files().metadata(unit).unit_id()
549 }
550
551 pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
553 Ok(self
554 .outputs(unit)?
555 .iter()
556 .filter(|o| o.flavor == FileFlavor::Sbom)
557 .map(|o| o.path.clone())
558 .collect())
559 }
560
561 pub fn is_primary_package(&self, unit: &Unit) -> bool {
562 self.primary_packages.contains(&unit.pkg.package_id())
563 }
564
565 pub fn unit_output(&self, unit: &Unit, path: &Path) -> CargoResult<UnitOutput> {
568 let script_metas = self.find_build_script_metadatas(unit);
569 let env = artifact::get_env(&self, unit, self.unit_deps(unit))?;
570 Ok(UnitOutput {
571 unit: unit.clone(),
572 path: path.to_path_buf(),
573 script_metas,
574 env,
575 })
576 }
577
578 #[tracing::instrument(skip_all)]
581 fn check_collisions(&self) -> CargoResult<()> {
582 let mut output_collisions = HashMap::new();
583 let describe_collision = |unit: &Unit, other_unit: &Unit| -> String {
584 format!(
585 "the {} target `{}` in package `{}` has the same output filename as the {} target `{}` in package `{}`",
586 unit.target.kind().description(),
587 unit.target.name(),
588 unit.pkg.package_id(),
589 other_unit.target.kind().description(),
590 other_unit.target.name(),
591 other_unit.pkg.package_id(),
592 )
593 };
594 let suggestion = [
595 Level::NOTE.message("this may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>"),
596 Level::HELP.message("consider changing their names to be unique or compiling them separately")
597 ];
598 let rustdoc_suggestion = [
599 Level::NOTE.message("this is a known bug where multiple crates with the same name use the same path; see <https://github.com/rust-lang/cargo/issues/6313>")
600 ];
601 let report_collision = |unit: &Unit,
602 other_unit: &Unit,
603 path: &PathBuf,
604 messages: &[Message<'_>]|
605 -> CargoResult<()> {
606 if unit.target.name() == other_unit.target.name() {
607 self.bcx.gctx.shell().print_report(
608 &[Level::WARNING
609 .secondary_title(format!("output filename collision at {}", path.display()))
610 .elements(
611 [Level::NOTE.message(describe_collision(unit, other_unit))]
612 .into_iter()
613 .chain(messages.iter().cloned()),
614 )],
615 false,
616 )
617 } else {
618 self.bcx.gctx.shell().print_report(
619 &[Level::WARNING
620 .secondary_title(format!("output filename collision at {}", path.display()))
621 .elements([
622 Level::NOTE.message(describe_collision(unit, other_unit)),
623 Level::NOTE.message("if this looks unexpected, it may be a bug in Cargo. Please file a bug \
624 report at https://github.com/rust-lang/cargo/issues/ with as much information as you \
625 can provide."),
626 Level::NOTE.message(format!("cargo {} running on `{}` target `{}`",
627 crate::version(), self.bcx.host_triple(), self.bcx.target_data.short_name(&unit.kind))),
628 Level::NOTE.message(format!("first unit: {unit:?}")),
629 Level::NOTE.message(format!("second unit: {other_unit:?}")),
630 ])],
631 false,
632 )
633 }
634 };
635
636 fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
637 bail!(
638 "document output filename collision\n\
639 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
640 Only one may be documented at once since they output to the same path.\n\
641 Consider documenting only one, renaming one, \
642 or marking one with `doc = false` in Cargo.toml.",
643 unit.target.kind().description(),
644 unit.target.name(),
645 unit.pkg,
646 other_unit.target.kind().description(),
647 other_unit.target.name(),
648 other_unit.pkg,
649 );
650 }
651
652 let mut keys = self
653 .bcx
654 .unit_graph
655 .keys()
656 .filter(|unit| !unit.mode.is_run_custom_build())
657 .collect::<Vec<_>>();
658 keys.sort_unstable();
660 let mut doc_libs = HashMap::new();
668 let mut doc_bins = HashMap::new();
669 for unit in keys {
670 if unit.mode.is_doc() && self.is_primary_package(unit) {
671 if unit.target.is_lib() {
674 if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
675 {
676 doc_collision_error(unit, prev)?;
677 }
678 } else if let Some(prev) =
679 doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
680 {
681 doc_collision_error(unit, prev)?;
682 }
683 }
684 for output in self.outputs(unit)?.iter() {
685 if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
686 if unit.mode.is_doc() {
687 report_collision(unit, other_unit, &output.path, &rustdoc_suggestion)?;
690 } else {
691 report_collision(unit, other_unit, &output.path, &suggestion)?;
692 }
693 }
694 if let Some(hardlink) = output.hardlink.as_ref() {
695 if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
696 report_collision(unit, other_unit, hardlink, &suggestion)?;
697 }
698 }
699 if let Some(ref export_path) = output.export_path {
700 if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
701 self.bcx.gctx.shell().print_report(
702 &[Level::WARNING
703 .secondary_title(format!(
704 "`--artifact-dir` filename collision at {}",
705 export_path.display()
706 ))
707 .elements(
708 [Level::NOTE.message(describe_collision(unit, other_unit))]
709 .into_iter()
710 .chain(suggestion.iter().cloned()),
711 )],
712 false,
713 )?;
714 }
715 }
716 }
717 }
718 Ok(())
719 }
720
721 fn record_units_requiring_metadata(&mut self) {
726 for (key, deps) in self.bcx.unit_graph.iter() {
727 for dep in deps {
728 if self.only_requires_rmeta(key, &dep.unit) {
729 self.rmeta_required.insert(dep.unit.clone());
730 }
731 }
732 }
733 }
734
735 pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
738 !parent.requires_upstream_objects()
741 && parent.mode == CompileMode::Build
742 && !dep.requires_upstream_objects()
745 && dep.mode == CompileMode::Build
746 }
747
748 pub fn rmeta_required(&self, unit: &Unit) -> bool {
751 self.rmeta_required.contains(unit)
752 }
753
754 #[tracing::instrument(skip_all)]
765 pub fn compute_metadata_for_doc_units(&mut self) {
766 for unit in self.bcx.unit_graph.keys() {
767 if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
768 continue;
769 }
770
771 let matching_units = self
772 .bcx
773 .unit_graph
774 .keys()
775 .filter(|other| {
776 unit.pkg == other.pkg
777 && unit.target == other.target
778 && !other.mode.is_doc_scrape()
779 })
780 .collect::<Vec<_>>();
781 let metadata_unit = matching_units
782 .iter()
783 .find(|other| other.mode.is_check())
784 .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
785 .unwrap_or(&unit);
786 self.metadata_for_doc_units
787 .insert(unit.clone(), self.files().metadata(metadata_unit));
788 }
789 }
790}