cargo/core/compiler/build_runner/
mod.rs1use std::collections::{BTreeSet, HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::{self, Unit, artifact};
10use crate::util::cache_lock::CacheLockMode;
11use crate::util::errors::CargoResult;
12use annotate_snippets::{Level, Message};
13use anyhow::{Context as _, bail};
14use cargo_util::paths;
15use filetime::FileTime;
16use itertools::Itertools;
17use jobserver::Client;
18
19use super::build_plan::BuildPlan;
20use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
21use super::fingerprint::{Checksum, Fingerprint};
22use super::job_queue::JobQueue;
23use super::layout::Layout;
24use super::lto::Lto;
25use super::unit_graph::UnitDep;
26use super::{
27 BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint,
28};
29
30mod compilation_files;
31use self::compilation_files::CompilationFiles;
32pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
33
34pub struct BuildRunner<'a, 'gctx> {
41 pub bcx: &'a BuildContext<'a, 'gctx>,
43 pub compilation: Compilation<'gctx>,
45 pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
47 pub build_explicit_deps: HashMap<Unit, BuildDeps>,
51 pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
53 pub mtime_cache: HashMap<PathBuf, FileTime>,
55 pub checksum_cache: HashMap<PathBuf, Checksum>,
57 pub compiled: HashSet<Unit>,
61 pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
64 pub jobserver: Client,
66 primary_packages: HashSet<PackageId>,
70 files: Option<CompilationFiles<'a, 'gctx>>,
74
75 rmeta_required: HashSet<Unit>,
78
79 pub lto: HashMap<Unit, Lto>,
83
84 pub metadata_for_doc_units: HashMap<Unit, Metadata>,
87
88 pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
92}
93
94impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
95 pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
96 let jobserver = match bcx.gctx.jobserver_from_env() {
105 Some(c) => c.clone(),
106 None => {
107 let client =
108 Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
109 client.acquire_raw()?;
110 client
111 }
112 };
113
114 Ok(Self {
115 bcx,
116 compilation: Compilation::new(bcx)?,
117 build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
118 fingerprints: HashMap::new(),
119 mtime_cache: HashMap::new(),
120 checksum_cache: HashMap::new(),
121 compiled: HashSet::new(),
122 build_scripts: HashMap::new(),
123 build_explicit_deps: HashMap::new(),
124 jobserver,
125 primary_packages: HashSet::new(),
126 files: None,
127 rmeta_required: HashSet::new(),
128 lto: HashMap::new(),
129 metadata_for_doc_units: HashMap::new(),
130 failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
131 })
132 }
133
134 pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
139 let _lock = self
140 .bcx
141 .gctx
142 .acquire_package_cache_lock(CacheLockMode::Shared)?;
143 self.lto = super::lto::generate(self.bcx)?;
144 self.prepare_units()?;
145 self.prepare()?;
146 self.check_collisions()?;
147
148 for unit in &self.bcx.roots {
149 self.collect_tests_and_executables(unit)?;
150 }
151
152 Ok(self.compilation)
153 }
154
155 #[tracing::instrument(skip_all)]
162 pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
163 let _lock = self
167 .bcx
168 .gctx
169 .acquire_package_cache_lock(CacheLockMode::Shared)?;
170 let mut queue = JobQueue::new(self.bcx);
171 let mut plan = BuildPlan::new();
172 let build_plan = self.bcx.build_config.build_plan;
173 self.lto = super::lto::generate(self.bcx)?;
174 self.prepare_units()?;
175 self.prepare()?;
176 custom_build::build_map(&mut self)?;
177 self.check_collisions()?;
178 self.compute_metadata_for_doc_units();
179
180 if self.bcx.build_config.intent.is_doc() {
189 RustDocFingerprint::check_rustdoc_fingerprint(&self)?
190 }
191
192 for unit in &self.bcx.roots {
193 let force_rebuild = self.bcx.build_config.force_rebuild;
194 super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
195 }
196
197 for fingerprint in self.fingerprints.values() {
204 fingerprint.clear_memoized();
205 }
206
207 queue.execute(&mut self, &mut plan)?;
209
210 if build_plan {
211 plan.set_inputs(self.build_plan_inputs()?);
212 plan.output_plan(self.bcx.gctx);
213 }
214
215 let units_with_build_script = &self
217 .bcx
218 .roots
219 .iter()
220 .filter(|unit| self.build_scripts.contains_key(unit))
221 .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
222 .collect::<Vec<_>>();
223 for unit in units_with_build_script {
224 for dep in &self.bcx.unit_graph[unit] {
225 if dep.unit.mode.is_run_custom_build() {
226 let out_dir = self
227 .files()
228 .build_script_out_dir(&dep.unit)
229 .display()
230 .to_string();
231 let script_meta = self.get_run_build_script_metadata(&dep.unit);
232 self.compilation
233 .extra_env
234 .entry(script_meta)
235 .or_insert_with(Vec::new)
236 .push(("OUT_DIR".to_string(), out_dir));
237 }
238 }
239 }
240
241 for unit in &self.bcx.roots {
243 self.collect_tests_and_executables(unit)?;
244
245 if unit.mode.is_doc_test() {
247 let mut unstable_opts = false;
248 let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
249 args.extend(compiler::lto_args(&self, unit));
250 args.extend(compiler::features_args(unit));
251 args.extend(compiler::check_cfg_args(unit));
252
253 let script_metas = self.find_build_script_metadatas(unit);
254 if let Some(meta_vec) = script_metas.clone() {
255 for meta in meta_vec {
256 if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
257 for cfg in &output.cfgs {
258 args.push("--cfg".into());
259 args.push(cfg.into());
260 }
261
262 for check_cfg in &output.check_cfgs {
263 args.push("--check-cfg".into());
264 args.push(check_cfg.into());
265 }
266
267 for (lt, arg) in &output.linker_args {
268 if lt.applies_to(&unit.target, unit.mode) {
269 args.push("-C".into());
270 args.push(format!("link-arg={}", arg).into());
271 }
272 }
273 }
274 }
275 }
276 args.extend(unit.rustdocflags.iter().map(Into::into));
277
278 use super::MessageFormat;
279 let format = match self.bcx.build_config.message_format {
280 MessageFormat::Short => "short",
281 MessageFormat::Human => "human",
282 MessageFormat::Json { .. } => "json",
283 };
284 args.push("--error-format".into());
285 args.push(format.into());
286
287 self.compilation.to_doc_test.push(compilation::Doctest {
288 unit: unit.clone(),
289 args,
290 unstable_opts,
291 linker: self.compilation.target_linker(unit.kind).clone(),
292 script_metas,
293 env: artifact::get_env(&self, self.unit_deps(unit))?,
294 });
295 }
296
297 super::output_depinfo(&mut self, unit)?;
298 }
299
300 for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
301 self.compilation
302 .extra_env
303 .entry(*script_meta)
304 .or_insert_with(Vec::new)
305 .extend(output.env.iter().cloned());
306
307 for dir in output.library_paths.iter() {
308 self.compilation
309 .native_dirs
310 .insert(dir.clone().into_path_buf());
311 }
312 }
313 Ok(self.compilation)
314 }
315
316 fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
317 for output in self.outputs(unit)?.iter() {
318 if matches!(
319 output.flavor,
320 FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
321 ) {
322 continue;
323 }
324
325 let bindst = output.bin_dst();
326
327 if unit.mode == CompileMode::Test {
328 self.compilation
329 .tests
330 .push(self.unit_output(unit, &output.path));
331 } else if unit.target.is_executable() {
332 self.compilation
333 .binaries
334 .push(self.unit_output(unit, bindst));
335 } else if unit.target.is_cdylib()
336 && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
337 {
338 self.compilation
339 .cdylibs
340 .push(self.unit_output(unit, bindst));
341 }
342 }
343 Ok(())
344 }
345
346 pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
348 let is_binary = unit.target.is_executable();
349 let is_test = unit.mode.is_any_test();
350 if !unit.mode.generates_executable() || !(is_binary || is_test) {
351 return Ok(None);
352 }
353 Ok(self
354 .outputs(unit)?
355 .iter()
356 .find(|o| o.flavor == FileFlavor::Normal)
357 .map(|output| output.bin_dst().clone()))
358 }
359
360 #[tracing::instrument(skip_all)]
361 pub fn prepare_units(&mut self) -> CargoResult<()> {
362 let dest = self.bcx.profiles.get_dir_name();
363 let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
364 let mut targets = HashMap::new();
365 for kind in self.bcx.all_kinds.iter() {
366 if let CompileKind::Target(target) = *kind {
367 let layout = Layout::new(self.bcx.ws, Some(target), &dest)?;
368 targets.insert(target, layout);
369 }
370 }
371 self.primary_packages
372 .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
373 self.compilation
374 .root_crate_names
375 .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
376
377 self.record_units_requiring_metadata();
378
379 let files = CompilationFiles::new(self, host_layout, targets);
380 self.files = Some(files);
381 Ok(())
382 }
383
384 #[tracing::instrument(skip_all)]
387 pub fn prepare(&mut self) -> CargoResult<()> {
388 self.files
389 .as_mut()
390 .unwrap()
391 .host
392 .prepare()
393 .context("couldn't prepare build directories")?;
394 for target in self.files.as_mut().unwrap().target.values_mut() {
395 target
396 .prepare()
397 .context("couldn't prepare build directories")?;
398 }
399
400 let files = self.files.as_ref().unwrap();
401 for &kind in self.bcx.all_kinds.iter() {
402 let layout = files.layout(kind);
403 self.compilation
404 .root_output
405 .insert(kind, layout.artifact_dir().dest().to_path_buf());
406 if self.bcx.gctx.cli_unstable().build_dir_new_layout {
407 for (unit, _) in self.bcx.unit_graph.iter() {
408 let dep_dir = self.files().deps_dir(unit);
409 paths::create_dir_all(&dep_dir)?;
410 self.compilation.deps_output.insert(kind, dep_dir);
411 }
412 } else {
413 self.compilation
414 .deps_output
415 .insert(kind, layout.build_dir().legacy_deps().to_path_buf());
416 }
417 }
418 Ok(())
419 }
420
421 pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
422 self.files.as_ref().unwrap()
423 }
424
425 pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
427 self.files.as_ref().unwrap().outputs(unit, self.bcx)
428 }
429
430 pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
432 &self.bcx.unit_graph[unit]
433 }
434
435 pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
439 if unit.mode.is_run_custom_build() {
440 return Some(vec![unit.clone()]);
441 }
442
443 let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
444 .iter()
445 .filter(|unit_dep| {
446 unit_dep.unit.mode.is_run_custom_build()
447 && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
448 })
449 .map(|unit_dep| unit_dep.unit.clone())
450 .collect();
451 if build_script_units.is_empty() {
452 None
453 } else {
454 Some(build_script_units)
455 }
456 }
457
458 pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
463 self.find_build_script_units(unit).map(|units| {
464 units
465 .iter()
466 .map(|u| self.get_run_build_script_metadata(u))
467 .collect()
468 })
469 }
470
471 pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
473 assert!(unit.mode.is_run_custom_build());
474 self.files().metadata(unit).unit_id()
475 }
476
477 pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
479 Ok(self
480 .outputs(unit)?
481 .iter()
482 .filter(|o| o.flavor == FileFlavor::Sbom)
483 .map(|o| o.path.clone())
484 .collect())
485 }
486
487 pub fn is_primary_package(&self, unit: &Unit) -> bool {
488 self.primary_packages.contains(&unit.pkg.package_id())
489 }
490
491 pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
494 let mut inputs = BTreeSet::new();
496 for unit in self.bcx.unit_graph.keys() {
498 inputs.insert(unit.pkg.manifest_path().to_path_buf());
499 }
500 Ok(inputs.into_iter().collect())
501 }
502
503 pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
506 let script_metas = self.find_build_script_metadatas(unit);
507 UnitOutput {
508 unit: unit.clone(),
509 path: path.to_path_buf(),
510 script_metas,
511 }
512 }
513
514 #[tracing::instrument(skip_all)]
517 fn check_collisions(&self) -> CargoResult<()> {
518 let mut output_collisions = HashMap::new();
519 let describe_collision = |unit: &Unit, other_unit: &Unit| -> String {
520 format!(
521 "the {} target `{}` in package `{}` has the same output filename as the {} target `{}` in package `{}`",
522 unit.target.kind().description(),
523 unit.target.name(),
524 unit.pkg.package_id(),
525 other_unit.target.kind().description(),
526 other_unit.target.name(),
527 other_unit.pkg.package_id(),
528 )
529 };
530 let suggestion = [
531 Level::NOTE.message("this may become a hard error in the future; see <https://github.com/rust-lang/cargo/issues/6313>"),
532 Level::HELP.message("consider changing their names to be unique or compiling them separately")
533 ];
534 let rustdoc_suggestion = [
535 Level::NOTE.message("this is a known bug where multiple crates with the same name use the same path; see <https://github.com/rust-lang/cargo/issues/6313>")
536 ];
537 let report_collision = |unit: &Unit,
538 other_unit: &Unit,
539 path: &PathBuf,
540 messages: &[Message<'_>]|
541 -> CargoResult<()> {
542 if unit.target.name() == other_unit.target.name() {
543 self.bcx.gctx.shell().print_report(
544 &[Level::WARNING
545 .secondary_title(format!("output filename collision at {}", path.display()))
546 .elements(
547 [Level::NOTE.message(describe_collision(unit, other_unit))]
548 .into_iter()
549 .chain(messages.iter().cloned()),
550 )],
551 false,
552 )
553 } else {
554 self.bcx.gctx.shell().print_report(
555 &[Level::WARNING
556 .secondary_title(format!("output filename collision at {}", path.display()))
557 .elements([
558 Level::NOTE.message(describe_collision(unit, other_unit)),
559 Level::NOTE.message("if this looks unexpected, it may be a bug in Cargo. Please file a bug \
560 report at https://github.com/rust-lang/cargo/issues/ with as much information as you \
561 can provide."),
562 Level::NOTE.message(format!("cargo {} running on `{}` target `{}`",
563 crate::version(), self.bcx.host_triple(), self.bcx.target_data.short_name(&unit.kind))),
564 Level::NOTE.message(format!("first unit: {unit:?}")),
565 Level::NOTE.message(format!("second unit: {other_unit:?}")),
566 ])],
567 false,
568 )
569 }
570 };
571
572 fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
573 bail!(
574 "document output filename collision\n\
575 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
576 Only one may be documented at once since they output to the same path.\n\
577 Consider documenting only one, renaming one, \
578 or marking one with `doc = false` in Cargo.toml.",
579 unit.target.kind().description(),
580 unit.target.name(),
581 unit.pkg,
582 other_unit.target.kind().description(),
583 other_unit.target.name(),
584 other_unit.pkg,
585 );
586 }
587
588 let mut keys = self
589 .bcx
590 .unit_graph
591 .keys()
592 .filter(|unit| !unit.mode.is_run_custom_build())
593 .collect::<Vec<_>>();
594 keys.sort_unstable();
596 let mut doc_libs = HashMap::new();
604 let mut doc_bins = HashMap::new();
605 for unit in keys {
606 if unit.mode.is_doc() && self.is_primary_package(unit) {
607 if unit.target.is_lib() {
610 if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
611 {
612 doc_collision_error(unit, prev)?;
613 }
614 } else if let Some(prev) =
615 doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
616 {
617 doc_collision_error(unit, prev)?;
618 }
619 }
620 for output in self.outputs(unit)?.iter() {
621 if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
622 if unit.mode.is_doc() {
623 report_collision(unit, other_unit, &output.path, &rustdoc_suggestion)?;
626 } else {
627 report_collision(unit, other_unit, &output.path, &suggestion)?;
628 }
629 }
630 if let Some(hardlink) = output.hardlink.as_ref() {
631 if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
632 report_collision(unit, other_unit, hardlink, &suggestion)?;
633 }
634 }
635 if let Some(ref export_path) = output.export_path {
636 if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
637 self.bcx.gctx.shell().print_report(
638 &[Level::WARNING
639 .secondary_title(format!(
640 "`--artifact-dir` filename collision at {}",
641 export_path.display()
642 ))
643 .elements(
644 [Level::NOTE.message(describe_collision(unit, other_unit))]
645 .into_iter()
646 .chain(suggestion.iter().cloned()),
647 )],
648 false,
649 )?;
650 }
651 }
652 }
653 }
654 Ok(())
655 }
656
657 fn record_units_requiring_metadata(&mut self) {
662 for (key, deps) in self.bcx.unit_graph.iter() {
663 for dep in deps {
664 if self.only_requires_rmeta(key, &dep.unit) {
665 self.rmeta_required.insert(dep.unit.clone());
666 }
667 }
668 }
669 }
670
671 pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
674 !parent.requires_upstream_objects()
677 && parent.mode == CompileMode::Build
678 && !dep.requires_upstream_objects()
681 && dep.mode == CompileMode::Build
682 }
683
684 pub fn rmeta_required(&self, unit: &Unit) -> bool {
687 self.rmeta_required.contains(unit)
688 }
689
690 #[tracing::instrument(skip_all)]
701 pub fn compute_metadata_for_doc_units(&mut self) {
702 for unit in self.bcx.unit_graph.keys() {
703 if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
704 continue;
705 }
706
707 let matching_units = self
708 .bcx
709 .unit_graph
710 .keys()
711 .filter(|other| {
712 unit.pkg == other.pkg
713 && unit.target == other.target
714 && !other.mode.is_doc_scrape()
715 })
716 .collect::<Vec<_>>();
717 let metadata_unit = matching_units
718 .iter()
719 .find(|other| other.mode.is_check())
720 .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
721 .unwrap_or(&unit);
722 self.metadata_for_doc_units
723 .insert(unit.clone(), self.files().metadata(metadata_unit));
724 }
725 }
726}