cargo/core/compiler/build_runner/
mod.rs1use std::collections::{BTreeSet, HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::PackageId;
8use crate::core::compiler::compilation::{self, UnitOutput};
9use crate::core::compiler::{self, Unit, artifact};
10use crate::util::cache_lock::CacheLockMode;
11use crate::util::errors::CargoResult;
12use anyhow::{Context as _, bail};
13use cargo_util::paths;
14use filetime::FileTime;
15use itertools::Itertools;
16use jobserver::Client;
17
18use super::build_plan::BuildPlan;
19use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
20use super::fingerprint::{Checksum, Fingerprint};
21use super::job_queue::JobQueue;
22use super::layout::Layout;
23use super::lto::Lto;
24use super::unit_graph::UnitDep;
25use super::{
26 BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint,
27};
28
29mod compilation_files;
30use self::compilation_files::CompilationFiles;
31pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
32
33pub struct BuildRunner<'a, 'gctx> {
40 pub bcx: &'a BuildContext<'a, 'gctx>,
42 pub compilation: Compilation<'gctx>,
44 pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
46 pub build_explicit_deps: HashMap<Unit, BuildDeps>,
50 pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
52 pub mtime_cache: HashMap<PathBuf, FileTime>,
54 pub checksum_cache: HashMap<PathBuf, Checksum>,
56 pub compiled: HashSet<Unit>,
60 pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
63 pub jobserver: Client,
65 primary_packages: HashSet<PackageId>,
69 files: Option<CompilationFiles<'a, 'gctx>>,
73
74 rmeta_required: HashSet<Unit>,
77
78 pub lto: HashMap<Unit, Lto>,
82
83 pub metadata_for_doc_units: HashMap<Unit, Metadata>,
86
87 pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
91}
92
93impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
94 pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
95 let jobserver = match bcx.gctx.jobserver_from_env() {
104 Some(c) => c.clone(),
105 None => {
106 let client =
107 Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
108 client.acquire_raw()?;
109 client
110 }
111 };
112
113 Ok(Self {
114 bcx,
115 compilation: Compilation::new(bcx)?,
116 build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
117 fingerprints: HashMap::new(),
118 mtime_cache: HashMap::new(),
119 checksum_cache: HashMap::new(),
120 compiled: HashSet::new(),
121 build_scripts: HashMap::new(),
122 build_explicit_deps: HashMap::new(),
123 jobserver,
124 primary_packages: HashSet::new(),
125 files: None,
126 rmeta_required: HashSet::new(),
127 lto: HashMap::new(),
128 metadata_for_doc_units: HashMap::new(),
129 failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
130 })
131 }
132
133 pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
138 let _lock = self
139 .bcx
140 .gctx
141 .acquire_package_cache_lock(CacheLockMode::Shared)?;
142 self.lto = super::lto::generate(self.bcx)?;
143 self.prepare_units()?;
144 self.prepare()?;
145 self.check_collisions()?;
146
147 for unit in &self.bcx.roots {
148 self.collect_tests_and_executables(unit)?;
149 }
150
151 Ok(self.compilation)
152 }
153
154 #[tracing::instrument(skip_all)]
161 pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
162 let _lock = self
166 .bcx
167 .gctx
168 .acquire_package_cache_lock(CacheLockMode::Shared)?;
169 let mut queue = JobQueue::new(self.bcx);
170 let mut plan = BuildPlan::new();
171 let build_plan = self.bcx.build_config.build_plan;
172 self.lto = super::lto::generate(self.bcx)?;
173 self.prepare_units()?;
174 self.prepare()?;
175 custom_build::build_map(&mut self)?;
176 self.check_collisions()?;
177 self.compute_metadata_for_doc_units();
178
179 if self.bcx.build_config.intent.is_doc() {
188 RustDocFingerprint::check_rustdoc_fingerprint(&self)?
189 }
190
191 for unit in &self.bcx.roots {
192 let force_rebuild = self.bcx.build_config.force_rebuild;
193 super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
194 }
195
196 for fingerprint in self.fingerprints.values() {
203 fingerprint.clear_memoized();
204 }
205
206 queue.execute(&mut self, &mut plan)?;
208
209 if build_plan {
210 plan.set_inputs(self.build_plan_inputs()?);
211 plan.output_plan(self.bcx.gctx);
212 }
213
214 let units_with_build_script = &self
216 .bcx
217 .roots
218 .iter()
219 .filter(|unit| self.build_scripts.contains_key(unit))
220 .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
221 .collect::<Vec<_>>();
222 for unit in units_with_build_script {
223 for dep in &self.bcx.unit_graph[unit] {
224 if dep.unit.mode.is_run_custom_build() {
225 let out_dir = self
226 .files()
227 .build_script_out_dir(&dep.unit)
228 .display()
229 .to_string();
230 let script_meta = self.get_run_build_script_metadata(&dep.unit);
231 self.compilation
232 .extra_env
233 .entry(script_meta)
234 .or_insert_with(Vec::new)
235 .push(("OUT_DIR".to_string(), out_dir));
236 }
237 }
238 }
239
240 for unit in &self.bcx.roots {
242 self.collect_tests_and_executables(unit)?;
243
244 if unit.mode.is_doc_test() {
246 let mut unstable_opts = false;
247 let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
248 args.extend(compiler::lto_args(&self, unit));
249 args.extend(compiler::features_args(unit));
250 args.extend(compiler::check_cfg_args(unit));
251
252 let script_metas = self.find_build_script_metadatas(unit);
253 if let Some(meta_vec) = script_metas.clone() {
254 for meta in meta_vec {
255 if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
256 for cfg in &output.cfgs {
257 args.push("--cfg".into());
258 args.push(cfg.into());
259 }
260
261 for check_cfg in &output.check_cfgs {
262 args.push("--check-cfg".into());
263 args.push(check_cfg.into());
264 }
265
266 for (lt, arg) in &output.linker_args {
267 if lt.applies_to(&unit.target, unit.mode) {
268 args.push("-C".into());
269 args.push(format!("link-arg={}", arg).into());
270 }
271 }
272 }
273 }
274 }
275 args.extend(unit.rustdocflags.iter().map(Into::into));
276
277 use super::MessageFormat;
278 let format = match self.bcx.build_config.message_format {
279 MessageFormat::Short => "short",
280 MessageFormat::Human => "human",
281 MessageFormat::Json { .. } => "json",
282 };
283 args.push("--error-format".into());
284 args.push(format.into());
285
286 self.compilation.to_doc_test.push(compilation::Doctest {
287 unit: unit.clone(),
288 args,
289 unstable_opts,
290 linker: self.compilation.target_linker(unit.kind).clone(),
291 script_metas,
292 env: artifact::get_env(&self, self.unit_deps(unit))?,
293 });
294 }
295
296 super::output_depinfo(&mut self, unit)?;
297 }
298
299 for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
300 self.compilation
301 .extra_env
302 .entry(*script_meta)
303 .or_insert_with(Vec::new)
304 .extend(output.env.iter().cloned());
305
306 for dir in output.library_paths.iter() {
307 self.compilation
308 .native_dirs
309 .insert(dir.clone().into_path_buf());
310 }
311 }
312 Ok(self.compilation)
313 }
314
315 fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
316 for output in self.outputs(unit)?.iter() {
317 if matches!(
318 output.flavor,
319 FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
320 ) {
321 continue;
322 }
323
324 let bindst = output.bin_dst();
325
326 if unit.mode == CompileMode::Test {
327 self.compilation
328 .tests
329 .push(self.unit_output(unit, &output.path));
330 } else if unit.target.is_executable() {
331 self.compilation
332 .binaries
333 .push(self.unit_output(unit, bindst));
334 } else if unit.target.is_cdylib()
335 && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
336 {
337 self.compilation
338 .cdylibs
339 .push(self.unit_output(unit, bindst));
340 }
341 }
342 Ok(())
343 }
344
345 pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
347 let is_binary = unit.target.is_executable();
348 let is_test = unit.mode.is_any_test();
349 if !unit.mode.generates_executable() || !(is_binary || is_test) {
350 return Ok(None);
351 }
352 Ok(self
353 .outputs(unit)?
354 .iter()
355 .find(|o| o.flavor == FileFlavor::Normal)
356 .map(|output| output.bin_dst().clone()))
357 }
358
359 #[tracing::instrument(skip_all)]
360 pub fn prepare_units(&mut self) -> CargoResult<()> {
361 let dest = self.bcx.profiles.get_dir_name();
362 let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
363 let mut targets = HashMap::new();
364 for kind in self.bcx.all_kinds.iter() {
365 if let CompileKind::Target(target) = *kind {
366 let layout = Layout::new(self.bcx.ws, Some(target), &dest)?;
367 targets.insert(target, layout);
368 }
369 }
370 self.primary_packages
371 .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
372 self.compilation
373 .root_crate_names
374 .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
375
376 self.record_units_requiring_metadata();
377
378 let files = CompilationFiles::new(self, host_layout, targets);
379 self.files = Some(files);
380 Ok(())
381 }
382
383 #[tracing::instrument(skip_all)]
386 pub fn prepare(&mut self) -> CargoResult<()> {
387 self.files
388 .as_mut()
389 .unwrap()
390 .host
391 .prepare()
392 .context("couldn't prepare build directories")?;
393 for target in self.files.as_mut().unwrap().target.values_mut() {
394 target
395 .prepare()
396 .context("couldn't prepare build directories")?;
397 }
398
399 let files = self.files.as_ref().unwrap();
400 for &kind in self.bcx.all_kinds.iter() {
401 let layout = files.layout(kind);
402 self.compilation
403 .root_output
404 .insert(kind, layout.artifact_dir().dest().to_path_buf());
405 if self.bcx.gctx.cli_unstable().build_dir_new_layout {
406 for (unit, _) in self.bcx.unit_graph.iter() {
407 let dep_dir = self.files().deps_dir(unit);
408 paths::create_dir_all(&dep_dir)?;
409 self.compilation.deps_output.insert(kind, dep_dir);
410 }
411 } else {
412 self.compilation
413 .deps_output
414 .insert(kind, layout.build_dir().legacy_deps().to_path_buf());
415 }
416 }
417 Ok(())
418 }
419
420 pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
421 self.files.as_ref().unwrap()
422 }
423
424 pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
426 self.files.as_ref().unwrap().outputs(unit, self.bcx)
427 }
428
429 pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
431 &self.bcx.unit_graph[unit]
432 }
433
434 pub fn find_build_script_units(&self, unit: &Unit) -> Option<Vec<Unit>> {
438 if unit.mode.is_run_custom_build() {
439 return Some(vec![unit.clone()]);
440 }
441
442 let build_script_units: Vec<Unit> = self.bcx.unit_graph[unit]
443 .iter()
444 .filter(|unit_dep| {
445 unit_dep.unit.mode.is_run_custom_build()
446 && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
447 })
448 .map(|unit_dep| unit_dep.unit.clone())
449 .collect();
450 if build_script_units.is_empty() {
451 None
452 } else {
453 Some(build_script_units)
454 }
455 }
456
457 pub fn find_build_script_metadatas(&self, unit: &Unit) -> Option<Vec<UnitHash>> {
462 self.find_build_script_units(unit).map(|units| {
463 units
464 .iter()
465 .map(|u| self.get_run_build_script_metadata(u))
466 .collect()
467 })
468 }
469
470 pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
472 assert!(unit.mode.is_run_custom_build());
473 self.files().metadata(unit).unit_id()
474 }
475
476 pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
478 Ok(self
479 .outputs(unit)?
480 .iter()
481 .filter(|o| o.flavor == FileFlavor::Sbom)
482 .map(|o| o.path.clone())
483 .collect())
484 }
485
486 pub fn is_primary_package(&self, unit: &Unit) -> bool {
487 self.primary_packages.contains(&unit.pkg.package_id())
488 }
489
490 pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
493 let mut inputs = BTreeSet::new();
495 for unit in self.bcx.unit_graph.keys() {
497 inputs.insert(unit.pkg.manifest_path().to_path_buf());
498 }
499 Ok(inputs.into_iter().collect())
500 }
501
502 pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
505 let script_metas = self.find_build_script_metadatas(unit);
506 UnitOutput {
507 unit: unit.clone(),
508 path: path.to_path_buf(),
509 script_metas,
510 }
511 }
512
513 #[tracing::instrument(skip_all)]
516 fn check_collisions(&self) -> CargoResult<()> {
517 let mut output_collisions = HashMap::new();
518 let describe_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> String {
519 format!(
520 "The {} target `{}` in package `{}` has the same output \
521 filename as the {} target `{}` in package `{}`.\n\
522 Colliding filename is: {}\n",
523 unit.target.kind().description(),
524 unit.target.name(),
525 unit.pkg.package_id(),
526 other_unit.target.kind().description(),
527 other_unit.target.name(),
528 other_unit.pkg.package_id(),
529 path.display()
530 )
531 };
532 let suggestion = "Consider changing their names to be unique or compiling them separately.\n\
533 This may become a hard error in the future; see \
534 <https://github.com/rust-lang/cargo/issues/6313>.";
535 let rustdoc_suggestion = "This is a known bug where multiple crates with the same name use\n\
536 the same path; see <https://github.com/rust-lang/cargo/issues/6313>.";
537 let report_collision = |unit: &Unit,
538 other_unit: &Unit,
539 path: &PathBuf,
540 suggestion: &str|
541 -> CargoResult<()> {
542 if unit.target.name() == other_unit.target.name() {
543 self.bcx.gctx.shell().warn(format!(
544 "output filename collision.\n\
545 {}\
546 The targets should have unique names.\n\
547 {}",
548 describe_collision(unit, other_unit, path),
549 suggestion
550 ))
551 } else {
552 self.bcx.gctx.shell().warn(format!(
553 "output filename collision.\n\
554 {}\
555 The output filenames should be unique.\n\
556 {}\n\
557 If this looks unexpected, it may be a bug in Cargo. Please file a bug report at\n\
558 https://github.com/rust-lang/cargo/issues/ with as much information as you\n\
559 can provide.\n\
560 cargo {} running on `{}` target `{}`\n\
561 First unit: {:?}\n\
562 Second unit: {:?}",
563 describe_collision(unit, other_unit, path),
564 suggestion,
565 crate::version(),
566 self.bcx.host_triple(),
567 self.bcx.target_data.short_name(&unit.kind),
568 unit,
569 other_unit))
570 }
571 };
572
573 fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
574 bail!(
575 "document output filename collision\n\
576 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
577 Only one may be documented at once since they output to the same path.\n\
578 Consider documenting only one, renaming one, \
579 or marking one with `doc = false` in Cargo.toml.",
580 unit.target.kind().description(),
581 unit.target.name(),
582 unit.pkg,
583 other_unit.target.kind().description(),
584 other_unit.target.name(),
585 other_unit.pkg,
586 );
587 }
588
589 let mut keys = self
590 .bcx
591 .unit_graph
592 .keys()
593 .filter(|unit| !unit.mode.is_run_custom_build())
594 .collect::<Vec<_>>();
595 keys.sort_unstable();
597 let mut doc_libs = HashMap::new();
605 let mut doc_bins = HashMap::new();
606 for unit in keys {
607 if unit.mode.is_doc() && self.is_primary_package(unit) {
608 if unit.target.is_lib() {
611 if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
612 {
613 doc_collision_error(unit, prev)?;
614 }
615 } else if let Some(prev) =
616 doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
617 {
618 doc_collision_error(unit, prev)?;
619 }
620 }
621 for output in self.outputs(unit)?.iter() {
622 if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
623 if unit.mode.is_doc() {
624 report_collision(unit, other_unit, &output.path, rustdoc_suggestion)?;
627 } else {
628 report_collision(unit, other_unit, &output.path, suggestion)?;
629 }
630 }
631 if let Some(hardlink) = output.hardlink.as_ref() {
632 if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
633 report_collision(unit, other_unit, hardlink, suggestion)?;
634 }
635 }
636 if let Some(ref export_path) = output.export_path {
637 if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
638 self.bcx.gctx.shell().warn(format!(
639 "`--artifact-dir` filename collision.\n\
640 {}\
641 The exported filenames should be unique.\n\
642 {}",
643 describe_collision(unit, other_unit, export_path),
644 suggestion
645 ))?;
646 }
647 }
648 }
649 }
650 Ok(())
651 }
652
653 fn record_units_requiring_metadata(&mut self) {
658 for (key, deps) in self.bcx.unit_graph.iter() {
659 for dep in deps {
660 if self.only_requires_rmeta(key, &dep.unit) {
661 self.rmeta_required.insert(dep.unit.clone());
662 }
663 }
664 }
665 }
666
667 pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
670 !parent.requires_upstream_objects()
673 && parent.mode == CompileMode::Build
674 && !dep.requires_upstream_objects()
677 && dep.mode == CompileMode::Build
678 }
679
680 pub fn rmeta_required(&self, unit: &Unit) -> bool {
683 self.rmeta_required.contains(unit)
684 }
685
686 #[tracing::instrument(skip_all)]
697 pub fn compute_metadata_for_doc_units(&mut self) {
698 for unit in self.bcx.unit_graph.keys() {
699 if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
700 continue;
701 }
702
703 let matching_units = self
704 .bcx
705 .unit_graph
706 .keys()
707 .filter(|other| {
708 unit.pkg == other.pkg
709 && unit.target == other.target
710 && !other.mode.is_doc_scrape()
711 })
712 .collect::<Vec<_>>();
713 let metadata_unit = matching_units
714 .iter()
715 .find(|other| other.mode.is_check())
716 .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
717 .unwrap_or(&unit);
718 self.metadata_for_doc_units
719 .insert(unit.clone(), self.files().metadata(metadata_unit));
720 }
721 }
722}