1use std::collections::{BTreeSet, HashMap, HashSet};
4use std::path::{Path, PathBuf};
5use std::sync::{Arc, Mutex};
6
7use crate::core::compiler::compilation::{self, UnitOutput};
8use crate::core::compiler::{self, artifact, Unit};
9use crate::core::PackageId;
10use crate::util::cache_lock::CacheLockMode;
11use crate::util::errors::CargoResult;
12use anyhow::{bail, Context as _};
13use filetime::FileTime;
14use itertools::Itertools;
15use jobserver::Client;
16
17use super::build_plan::BuildPlan;
18use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
19use super::fingerprint::{Checksum, Fingerprint};
20use super::job_queue::JobQueue;
21use super::layout::Layout;
22use super::lto::Lto;
23use super::unit_graph::UnitDep;
24use super::{
25 BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint,
26};
27
28mod compilation_files;
29use self::compilation_files::CompilationFiles;
30pub use self::compilation_files::{Metadata, OutputFile, UnitHash};
31
32pub struct BuildRunner<'a, 'gctx> {
39 pub bcx: &'a BuildContext<'a, 'gctx>,
41 pub compilation: Compilation<'gctx>,
43 pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>,
45 pub build_explicit_deps: HashMap<Unit, BuildDeps>,
49 pub fingerprints: HashMap<Unit, Arc<Fingerprint>>,
51 pub mtime_cache: HashMap<PathBuf, FileTime>,
53 pub checksum_cache: HashMap<PathBuf, Checksum>,
55 pub compiled: HashSet<Unit>,
59 pub build_scripts: HashMap<Unit, Arc<BuildScripts>>,
62 pub jobserver: Client,
64 primary_packages: HashSet<PackageId>,
68 files: Option<CompilationFiles<'a, 'gctx>>,
72
73 rmeta_required: HashSet<Unit>,
76
77 pub lto: HashMap<Unit, Lto>,
81
82 pub metadata_for_doc_units: HashMap<Unit, Metadata>,
85
86 pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,
90}
91
92impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
93 pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
94 let jobserver = match bcx.gctx.jobserver_from_env() {
103 Some(c) => c.clone(),
104 None => {
105 let client =
106 Client::new(bcx.jobs() as usize).context("failed to create jobserver")?;
107 client.acquire_raw()?;
108 client
109 }
110 };
111
112 Ok(Self {
113 bcx,
114 compilation: Compilation::new(bcx)?,
115 build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())),
116 fingerprints: HashMap::new(),
117 mtime_cache: HashMap::new(),
118 checksum_cache: HashMap::new(),
119 compiled: HashSet::new(),
120 build_scripts: HashMap::new(),
121 build_explicit_deps: HashMap::new(),
122 jobserver,
123 primary_packages: HashSet::new(),
124 files: None,
125 rmeta_required: HashSet::new(),
126 lto: HashMap::new(),
127 metadata_for_doc_units: HashMap::new(),
128 failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
129 })
130 }
131
132 pub fn dry_run(mut self) -> CargoResult<Compilation<'gctx>> {
137 let _lock = self
138 .bcx
139 .gctx
140 .acquire_package_cache_lock(CacheLockMode::Shared)?;
141 self.lto = super::lto::generate(self.bcx)?;
142 self.prepare_units()?;
143 self.prepare()?;
144 self.check_collisions()?;
145
146 for unit in &self.bcx.roots {
147 self.collect_tests_and_executables(unit)?;
148 }
149
150 Ok(self.compilation)
151 }
152
153 #[tracing::instrument(skip_all)]
160 pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'gctx>> {
161 let _lock = self
165 .bcx
166 .gctx
167 .acquire_package_cache_lock(CacheLockMode::Shared)?;
168 let mut queue = JobQueue::new(self.bcx);
169 let mut plan = BuildPlan::new();
170 let build_plan = self.bcx.build_config.build_plan;
171 self.lto = super::lto::generate(self.bcx)?;
172 self.prepare_units()?;
173 self.prepare()?;
174 custom_build::build_map(&mut self)?;
175 self.check_collisions()?;
176 self.compute_metadata_for_doc_units();
177
178 if self.bcx.build_config.mode.is_doc() {
187 RustDocFingerprint::check_rustdoc_fingerprint(&self)?
188 }
189
190 for unit in &self.bcx.roots {
191 let force_rebuild = self.bcx.build_config.force_rebuild;
192 super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?;
193 }
194
195 for fingerprint in self.fingerprints.values() {
202 fingerprint.clear_memoized();
203 }
204
205 queue.execute(&mut self, &mut plan)?;
207
208 if build_plan {
209 plan.set_inputs(self.build_plan_inputs()?);
210 plan.output_plan(self.bcx.gctx);
211 }
212
213 let units_with_build_script = &self
215 .bcx
216 .roots
217 .iter()
218 .filter(|unit| self.build_scripts.contains_key(unit))
219 .dedup_by(|x, y| x.pkg.package_id() == y.pkg.package_id())
220 .collect::<Vec<_>>();
221 for unit in units_with_build_script {
222 for dep in &self.bcx.unit_graph[unit] {
223 if dep.unit.mode.is_run_custom_build() {
224 let out_dir = self
225 .files()
226 .build_script_out_dir(&dep.unit)
227 .display()
228 .to_string();
229 let script_meta = self.get_run_build_script_metadata(&dep.unit);
230 self.compilation
231 .extra_env
232 .entry(script_meta)
233 .or_insert_with(Vec::new)
234 .push(("OUT_DIR".to_string(), out_dir));
235 }
236 }
237 }
238
239 for unit in &self.bcx.roots {
241 self.collect_tests_and_executables(unit)?;
242
243 if unit.mode.is_doc_test() {
245 let mut unstable_opts = false;
246 let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?;
247 args.extend(compiler::lto_args(&self, unit));
248 args.extend(compiler::features_args(unit));
249 args.extend(compiler::check_cfg_args(unit));
250
251 let script_meta = self.find_build_script_metadata(unit);
252 if let Some(meta) = script_meta {
253 if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) {
254 for cfg in &output.cfgs {
255 args.push("--cfg".into());
256 args.push(cfg.into());
257 }
258
259 for check_cfg in &output.check_cfgs {
260 args.push("--check-cfg".into());
261 args.push(check_cfg.into());
262 }
263
264 for (lt, arg) in &output.linker_args {
265 if lt.applies_to(&unit.target) {
266 args.push("-C".into());
267 args.push(format!("link-arg={}", arg).into());
268 }
269 }
270 }
271 }
272 args.extend(unit.rustdocflags.iter().map(Into::into));
273
274 use super::MessageFormat;
275 let format = match self.bcx.build_config.message_format {
276 MessageFormat::Short => "short",
277 MessageFormat::Human => "human",
278 MessageFormat::Json { .. } => "json",
279 };
280 args.push("--error-format".into());
281 args.push(format.into());
282
283 self.compilation.to_doc_test.push(compilation::Doctest {
284 unit: unit.clone(),
285 args,
286 unstable_opts,
287 linker: self.compilation.target_linker(unit.kind).clone(),
288 script_meta,
289 env: artifact::get_env(&self, self.unit_deps(unit))?,
290 });
291 }
292
293 super::output_depinfo(&mut self, unit)?;
294 }
295
296 for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() {
297 self.compilation
298 .extra_env
299 .entry(*script_meta)
300 .or_insert_with(Vec::new)
301 .extend(output.env.iter().cloned());
302
303 for dir in output.library_paths.iter() {
304 self.compilation.native_dirs.insert(dir.clone());
305 }
306 }
307 Ok(self.compilation)
308 }
309
310 fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
311 for output in self.outputs(unit)?.iter() {
312 if matches!(
313 output.flavor,
314 FileFlavor::DebugInfo | FileFlavor::Auxiliary | FileFlavor::Sbom
315 ) {
316 continue;
317 }
318
319 let bindst = output.bin_dst();
320
321 if unit.mode == CompileMode::Test {
322 self.compilation
323 .tests
324 .push(self.unit_output(unit, &output.path));
325 } else if unit.target.is_executable() {
326 self.compilation
327 .binaries
328 .push(self.unit_output(unit, bindst));
329 } else if unit.target.is_cdylib()
330 && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit)
331 {
332 self.compilation
333 .cdylibs
334 .push(self.unit_output(unit, bindst));
335 }
336 }
337 Ok(())
338 }
339
340 pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> {
342 let is_binary = unit.target.is_executable();
343 let is_test = unit.mode.is_any_test();
344 if !unit.mode.generates_executable() || !(is_binary || is_test) {
345 return Ok(None);
346 }
347 Ok(self
348 .outputs(unit)?
349 .iter()
350 .find(|o| o.flavor == FileFlavor::Normal)
351 .map(|output| output.bin_dst().clone()))
352 }
353
354 #[tracing::instrument(skip_all)]
355 pub fn prepare_units(&mut self) -> CargoResult<()> {
356 let dest = self.bcx.profiles.get_dir_name();
357 let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
358 let mut targets = HashMap::new();
359 for kind in self.bcx.all_kinds.iter() {
360 if let CompileKind::Target(target) = *kind {
361 let layout = Layout::new(self.bcx.ws, Some(target), &dest)?;
362 targets.insert(target, layout);
363 }
364 }
365 self.primary_packages
366 .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id()));
367 self.compilation
368 .root_crate_names
369 .extend(self.bcx.roots.iter().map(|u| u.target.crate_name()));
370
371 self.record_units_requiring_metadata();
372
373 let files = CompilationFiles::new(self, host_layout, targets);
374 self.files = Some(files);
375 Ok(())
376 }
377
378 #[tracing::instrument(skip_all)]
381 pub fn prepare(&mut self) -> CargoResult<()> {
382 self.files
383 .as_mut()
384 .unwrap()
385 .host
386 .prepare()
387 .context("couldn't prepare build directories")?;
388 for target in self.files.as_mut().unwrap().target.values_mut() {
389 target
390 .prepare()
391 .context("couldn't prepare build directories")?;
392 }
393
394 let files = self.files.as_ref().unwrap();
395 for &kind in self.bcx.all_kinds.iter() {
396 let layout = files.layout(kind);
397 self.compilation
398 .root_output
399 .insert(kind, layout.dest().to_path_buf());
400 self.compilation
401 .deps_output
402 .insert(kind, layout.deps().to_path_buf());
403 }
404 Ok(())
405 }
406
407 pub fn files(&self) -> &CompilationFiles<'a, 'gctx> {
408 self.files.as_ref().unwrap()
409 }
410
411 pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> {
413 self.files.as_ref().unwrap().outputs(unit, self.bcx)
414 }
415
416 pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] {
418 &self.bcx.unit_graph[unit]
419 }
420
421 pub fn find_build_script_unit(&self, unit: &Unit) -> Option<Unit> {
425 if unit.mode.is_run_custom_build() {
426 return Some(unit.clone());
427 }
428 self.bcx.unit_graph[unit]
429 .iter()
430 .find(|unit_dep| {
431 unit_dep.unit.mode.is_run_custom_build()
432 && unit_dep.unit.pkg.package_id() == unit.pkg.package_id()
433 })
434 .map(|unit_dep| unit_dep.unit.clone())
435 }
436
437 pub fn find_build_script_metadata(&self, unit: &Unit) -> Option<UnitHash> {
442 let script_unit = self.find_build_script_unit(unit)?;
443 Some(self.get_run_build_script_metadata(&script_unit))
444 }
445
446 pub fn get_run_build_script_metadata(&self, unit: &Unit) -> UnitHash {
448 assert!(unit.mode.is_run_custom_build());
449 self.files().metadata(unit).unit_id()
450 }
451
452 pub fn sbom_output_files(&self, unit: &Unit) -> CargoResult<Vec<PathBuf>> {
454 Ok(self
455 .outputs(unit)?
456 .iter()
457 .filter(|o| o.flavor == FileFlavor::Sbom)
458 .map(|o| o.path.clone())
459 .collect())
460 }
461
462 pub fn is_primary_package(&self, unit: &Unit) -> bool {
463 self.primary_packages.contains(&unit.pkg.package_id())
464 }
465
466 pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> {
469 let mut inputs = BTreeSet::new();
471 for unit in self.bcx.unit_graph.keys() {
473 inputs.insert(unit.pkg.manifest_path().to_path_buf());
474 }
475 Ok(inputs.into_iter().collect())
476 }
477
478 pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput {
481 let script_meta = self.find_build_script_metadata(unit);
482 UnitOutput {
483 unit: unit.clone(),
484 path: path.to_path_buf(),
485 script_meta,
486 }
487 }
488
489 #[tracing::instrument(skip_all)]
492 fn check_collisions(&self) -> CargoResult<()> {
493 let mut output_collisions = HashMap::new();
494 let describe_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> String {
495 format!(
496 "The {} target `{}` in package `{}` has the same output \
497 filename as the {} target `{}` in package `{}`.\n\
498 Colliding filename is: {}\n",
499 unit.target.kind().description(),
500 unit.target.name(),
501 unit.pkg.package_id(),
502 other_unit.target.kind().description(),
503 other_unit.target.name(),
504 other_unit.pkg.package_id(),
505 path.display()
506 )
507 };
508 let suggestion =
509 "Consider changing their names to be unique or compiling them separately.\n\
510 This may become a hard error in the future; see \
511 <https://github.com/rust-lang/cargo/issues/6313>.";
512 let rustdoc_suggestion =
513 "This is a known bug where multiple crates with the same name use\n\
514 the same path; see <https://github.com/rust-lang/cargo/issues/6313>.";
515 let report_collision = |unit: &Unit,
516 other_unit: &Unit,
517 path: &PathBuf,
518 suggestion: &str|
519 -> CargoResult<()> {
520 if unit.target.name() == other_unit.target.name() {
521 self.bcx.gctx.shell().warn(format!(
522 "output filename collision.\n\
523 {}\
524 The targets should have unique names.\n\
525 {}",
526 describe_collision(unit, other_unit, path),
527 suggestion
528 ))
529 } else {
530 self.bcx.gctx.shell().warn(format!(
531 "output filename collision.\n\
532 {}\
533 The output filenames should be unique.\n\
534 {}\n\
535 If this looks unexpected, it may be a bug in Cargo. Please file a bug report at\n\
536 https://github.com/rust-lang/cargo/issues/ with as much information as you\n\
537 can provide.\n\
538 cargo {} running on `{}` target `{}`\n\
539 First unit: {:?}\n\
540 Second unit: {:?}",
541 describe_collision(unit, other_unit, path),
542 suggestion,
543 crate::version(),
544 self.bcx.host_triple(),
545 self.bcx.target_data.short_name(&unit.kind),
546 unit,
547 other_unit))
548 }
549 };
550
551 fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> {
552 bail!(
553 "document output filename collision\n\
554 The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\
555 Only one may be documented at once since they output to the same path.\n\
556 Consider documenting only one, renaming one, \
557 or marking one with `doc = false` in Cargo.toml.",
558 unit.target.kind().description(),
559 unit.target.name(),
560 unit.pkg,
561 other_unit.target.kind().description(),
562 other_unit.target.name(),
563 other_unit.pkg,
564 );
565 }
566
567 let mut keys = self
568 .bcx
569 .unit_graph
570 .keys()
571 .filter(|unit| !unit.mode.is_run_custom_build())
572 .collect::<Vec<_>>();
573 keys.sort_unstable();
575 let mut doc_libs = HashMap::new();
583 let mut doc_bins = HashMap::new();
584 for unit in keys {
585 if unit.mode.is_doc() && self.is_primary_package(unit) {
586 if unit.target.is_lib() {
589 if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit)
590 {
591 doc_collision_error(unit, prev)?;
592 }
593 } else if let Some(prev) =
594 doc_bins.insert((unit.target.crate_name(), unit.kind), unit)
595 {
596 doc_collision_error(unit, prev)?;
597 }
598 }
599 for output in self.outputs(unit)?.iter() {
600 if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) {
601 if unit.mode.is_doc() {
602 report_collision(unit, other_unit, &output.path, rustdoc_suggestion)?;
605 } else {
606 report_collision(unit, other_unit, &output.path, suggestion)?;
607 }
608 }
609 if let Some(hardlink) = output.hardlink.as_ref() {
610 if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) {
611 report_collision(unit, other_unit, hardlink, suggestion)?;
612 }
613 }
614 if let Some(ref export_path) = output.export_path {
615 if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) {
616 self.bcx.gctx.shell().warn(format!(
617 "`--artifact-dir` filename collision.\n\
618 {}\
619 The exported filenames should be unique.\n\
620 {}",
621 describe_collision(unit, other_unit, export_path),
622 suggestion
623 ))?;
624 }
625 }
626 }
627 }
628 Ok(())
629 }
630
631 fn record_units_requiring_metadata(&mut self) {
636 for (key, deps) in self.bcx.unit_graph.iter() {
637 for dep in deps {
638 if self.only_requires_rmeta(key, &dep.unit) {
639 self.rmeta_required.insert(dep.unit.clone());
640 }
641 }
642 }
643 }
644
645 pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool {
648 !parent.requires_upstream_objects()
651 && parent.mode == CompileMode::Build
652 && !dep.requires_upstream_objects()
655 && dep.mode == CompileMode::Build
656 }
657
658 pub fn rmeta_required(&self, unit: &Unit) -> bool {
661 self.rmeta_required.contains(unit)
662 }
663
664 #[tracing::instrument(skip_all)]
675 pub fn compute_metadata_for_doc_units(&mut self) {
676 for unit in self.bcx.unit_graph.keys() {
677 if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() {
678 continue;
679 }
680
681 let matching_units = self
682 .bcx
683 .unit_graph
684 .keys()
685 .filter(|other| {
686 unit.pkg == other.pkg
687 && unit.target == other.target
688 && !other.mode.is_doc_scrape()
689 })
690 .collect::<Vec<_>>();
691 let metadata_unit = matching_units
692 .iter()
693 .find(|other| other.mode.is_check())
694 .or_else(|| matching_units.iter().find(|other| other.mode.is_doc()))
695 .unwrap_or(&unit);
696 self.metadata_for_doc_units
697 .insert(unit.clone(), self.files().metadata(metadata_unit));
698 }
699 }
700}