Skip to main content

cargo/core/compiler/
mod.rs

1//! # Interact with the compiler
2//!
3//! If you consider [`ops::cargo_compile::compile`] as a `rustc` driver but on
4//! Cargo side, this module is kinda the `rustc_interface` for that merits.
5//! It contains all the interaction between Cargo and the rustc compiler,
6//! from preparing the context for the entire build process, to scheduling
7//! and executing each unit of work (e.g. running `rustc`), to managing and
8//! caching the output artifact of a build.
9//!
10//! However, it hasn't yet exposed a clear definition of each phase or session,
11//! like what rustc has done. Also, no one knows if Cargo really needs that.
12//! To be pragmatic, here we list a handful of items you may want to learn:
13//!
14//! * [`BuildContext`] is a static context containing all information you need
15//!   before a build gets started.
16//! * [`BuildRunner`] is the center of the world, coordinating a running build and
17//!   collecting information from it.
18//! * [`custom_build`] is the home of build script executions and output parsing.
19//! * [`fingerprint`] not only defines but also executes a set of rules to
20//!   determine if a re-compile is needed.
21//! * [`job_queue`] is where the parallelism, job scheduling, and communication
22//!   machinery happen between Cargo and the compiler.
23//! * [`layout`] defines and manages output artifacts of a build in the filesystem.
24//! * [`unit_dependencies`] is for building a dependency graph for compilation
25//!   from a result of dependency resolution.
26//! * [`Unit`] contains sufficient information to build something, usually
27//!   turning into a compiler invocation in a later phase.
28//!
29//! [`ops::cargo_compile::compile`]: crate::ops::compile
30
31pub mod artifact;
32mod build_config;
33pub(crate) mod build_context;
34pub(crate) mod build_runner;
35mod compilation;
36mod compile_kind;
37mod crate_type;
38mod custom_build;
39pub(crate) mod fingerprint;
40pub mod future_incompat;
41pub(crate) mod job_queue;
42pub(crate) mod layout;
43mod links;
44mod locking;
45mod lto;
46mod output_depinfo;
47mod output_sbom;
48pub mod rustdoc;
49pub mod standard_lib;
50pub mod timings;
51mod unit;
52pub mod unit_dependencies;
53pub mod unit_graph;
54
55use std::borrow::Cow;
56use std::cell::OnceCell;
57use std::collections::{BTreeMap, HashMap, HashSet};
58use std::env;
59use std::ffi::{OsStr, OsString};
60use std::fmt::Display;
61use std::fs::{self, File};
62use std::io::{BufRead, BufWriter, Write};
63use std::ops::Range;
64use std::path::{Path, PathBuf};
65use std::sync::{Arc, LazyLock};
66
67use annotate_snippets::{AnnotationKind, Group, Level, Renderer, Snippet};
68use anyhow::{Context as _, Error};
69use cargo_platform::{Cfg, Platform};
70use itertools::Itertools;
71use regex::Regex;
72use tracing::{debug, instrument, trace};
73
74pub use self::build_config::UserIntent;
75pub use self::build_config::{BuildConfig, CompileMode, MessageFormat};
76pub use self::build_context::BuildContext;
77pub use self::build_context::FileFlavor;
78pub use self::build_context::FileType;
79pub use self::build_context::RustcTargetData;
80pub use self::build_context::TargetInfo;
81pub use self::build_runner::{BuildRunner, Metadata, UnitHash};
82pub use self::compilation::{Compilation, Doctest, UnitOutput};
83pub use self::compile_kind::{CompileKind, CompileKindFallback, CompileTarget};
84pub use self::crate_type::CrateType;
85pub use self::custom_build::LinkArgTarget;
86pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts, LibraryPath};
87pub(crate) use self::fingerprint::DirtyReason;
88pub use self::fingerprint::RustdocFingerprint;
89pub use self::job_queue::Freshness;
90use self::job_queue::{Job, JobQueue, JobState, Work};
91pub(crate) use self::layout::Layout;
92pub use self::lto::Lto;
93use self::output_depinfo::output_depinfo;
94use self::output_sbom::build_sbom;
95use self::unit_graph::UnitDep;
96
97use crate::core::compiler::future_incompat::FutureIncompatReport;
98use crate::core::compiler::locking::LockKey;
99use crate::core::compiler::timings::SectionTiming;
100pub use crate::core::compiler::unit::Unit;
101pub use crate::core::compiler::unit::UnitIndex;
102pub use crate::core::compiler::unit::UnitInterner;
103use crate::core::manifest::TargetSourcePath;
104use crate::core::profiles::{PanicStrategy, Profile, StripInner};
105use crate::core::{Feature, PackageId, Target, Verbosity};
106use crate::lints::get_key_value;
107use crate::util::OnceExt;
108use crate::util::context::WarningHandling;
109use crate::util::errors::{CargoResult, VerboseError};
110use crate::util::interning::InternedString;
111use crate::util::machine_message::{self, Message};
112use crate::util::{add_path_args, internal, path_args};
113
114use cargo_util::{ProcessBuilder, ProcessError, paths};
115use cargo_util_schemas::manifest::TomlDebugInfo;
116use cargo_util_schemas::manifest::TomlTrimPaths;
117use cargo_util_schemas::manifest::TomlTrimPathsValue;
118use rustfix::diagnostics::Applicability;
119
120const RUSTDOC_CRATE_VERSION_FLAG: &str = "--crate-version";
121
122/// A glorified callback for executing calls to rustc. Rather than calling rustc
123/// directly, we'll use an `Executor`, giving clients an opportunity to intercept
124/// the build calls.
125pub trait Executor: Send + Sync + 'static {
126    /// Called after a rustc process invocation is prepared up-front for a given
127    /// unit of work (may still be modified for runtime-known dependencies, when
128    /// the work is actually executed).
129    fn init(&self, _build_runner: &BuildRunner<'_, '_>, _unit: &Unit) {}
130
131    /// In case of an `Err`, Cargo will not continue with the build process for
132    /// this package.
133    fn exec(
134        &self,
135        cmd: &ProcessBuilder,
136        id: PackageId,
137        target: &Target,
138        mode: CompileMode,
139        on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>,
140        on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>,
141    ) -> CargoResult<()>;
142
143    /// Queried when queuing each unit of work. If it returns true, then the
144    /// unit will always be rebuilt, independent of whether it needs to be.
145    fn force_rebuild(&self, _unit: &Unit) -> bool {
146        false
147    }
148}
149
150/// A `DefaultExecutor` calls rustc without doing anything else. It is Cargo's
151/// default behaviour.
152#[derive(Copy, Clone)]
153pub struct DefaultExecutor;
154
155impl Executor for DefaultExecutor {
156    #[instrument(name = "rustc", skip_all, fields(package = id.name().as_str(), process = cmd.to_string()))]
157    fn exec(
158        &self,
159        cmd: &ProcessBuilder,
160        id: PackageId,
161        _target: &Target,
162        _mode: CompileMode,
163        on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>,
164        on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>,
165    ) -> CargoResult<()> {
166        cmd.exec_with_streaming(on_stdout_line, on_stderr_line, false)
167            .map(drop)
168    }
169}
170
171/// Builds up and enqueue a list of pending jobs onto the `job` queue.
172///
173/// Starting from the `unit`, this function recursively calls itself to build
174/// all jobs for dependencies of the `unit`. Each of these jobs represents
175/// compiling a particular package.
176///
177/// Note that **no actual work is executed as part of this**, that's all done
178/// next as part of [`JobQueue::execute`] function which will run everything
179/// in order with proper parallelism.
180#[tracing::instrument(skip(build_runner, jobs, exec))]
181fn compile<'gctx>(
182    build_runner: &mut BuildRunner<'_, 'gctx>,
183    jobs: &mut JobQueue<'gctx>,
184    unit: &Unit,
185    exec: &Arc<dyn Executor>,
186    force_rebuild: bool,
187) -> CargoResult<()> {
188    let bcx = build_runner.bcx;
189    if !build_runner.compiled.insert(unit.clone()) {
190        return Ok(());
191    }
192
193    let lock = if build_runner.bcx.gctx.cli_unstable().fine_grain_locking {
194        Some(build_runner.lock_manager.lock_shared(build_runner, unit)?)
195    } else {
196        None
197    };
198
199    // If we are in `--compile-time-deps` and the given unit is not a compile time
200    // dependency, skip compiling the unit and jumps to dependencies, which still
201    // have chances to be compile time dependencies
202    if !unit.skip_non_compile_time_dep {
203        // Build up the work to be done to compile this unit, enqueuing it once
204        // we've got everything constructed.
205        fingerprint::prepare_init(build_runner, unit)?;
206
207        let job = if unit.mode.is_run_custom_build() {
208            custom_build::prepare(build_runner, unit)?
209        } else if unit.mode.is_doc_test() {
210            // We run these targets later, so this is just a no-op for now.
211            Job::new_fresh()
212        } else {
213            let force = exec.force_rebuild(unit) || force_rebuild;
214            let mut job = fingerprint::prepare_target(build_runner, unit, force)?;
215            job.before(if job.freshness().is_dirty() {
216                let work = if unit.mode.is_doc() || unit.mode.is_doc_scrape() {
217                    rustdoc(build_runner, unit)?
218                } else {
219                    rustc(build_runner, unit, exec)?
220                };
221                work.then(link_targets(build_runner, unit, false)?)
222            } else {
223                // We always replay the output cache,
224                // since it might contain future-incompat-report messages
225                let show_diagnostics = unit.show_warnings(bcx.gctx)
226                    && build_runner.bcx.gctx.warning_handling()? != WarningHandling::Allow;
227                let manifest = ManifestErrorContext::new(build_runner, unit);
228                let work = replay_output_cache(
229                    unit.pkg.package_id(),
230                    manifest,
231                    &unit.target,
232                    build_runner.files().message_cache_path(unit),
233                    build_runner.bcx.build_config.message_format,
234                    show_diagnostics,
235                );
236                // Need to link targets on both the dirty and fresh.
237                work.then(link_targets(build_runner, unit, true)?)
238            });
239
240            // If -Zfine-grain-locking is enabled, we wrap the job with an upgrade to exclusive
241            // lock before starting, then downgrade to a shared lock after the job is finished.
242            if build_runner.bcx.gctx.cli_unstable().fine_grain_locking && job.freshness().is_dirty()
243            {
244                if let Some(lock) = lock {
245                    // Here we unlock the current shared lock to avoid deadlocking with other cargo
246                    // processes. Then we configure our compile job to take an exclusive lock
247                    // before starting. Once we are done compiling (including both rmeta and rlib)
248                    // we downgrade to a shared lock to allow other cargo's to read the build unit.
249                    // We will hold this shared lock for the remainder of compilation to prevent
250                    // other cargo from re-compiling while we are still using the unit.
251                    build_runner.lock_manager.unlock(&lock)?;
252                    job.before(prebuild_lock_exclusive(lock.clone()));
253                    job.after(downgrade_lock_to_shared(lock));
254                }
255            }
256
257            job
258        };
259        jobs.enqueue(build_runner, unit, job)?;
260    }
261
262    // Be sure to compile all dependencies of this target as well.
263    let deps = Vec::from(build_runner.unit_deps(unit)); // Create vec due to mutable borrow.
264    for dep in deps {
265        compile(build_runner, jobs, &dep.unit, exec, false)?;
266    }
267
268    Ok(())
269}
270
271/// Generates the warning message used when fallible doc-scrape units fail,
272/// either for rustdoc or rustc.
273fn make_failed_scrape_diagnostic(
274    build_runner: &BuildRunner<'_, '_>,
275    unit: &Unit,
276    top_line: impl Display,
277) -> String {
278    let manifest_path = unit.pkg.manifest_path();
279    let relative_manifest_path = manifest_path
280        .strip_prefix(build_runner.bcx.ws.root())
281        .unwrap_or(&manifest_path);
282
283    format!(
284        "\
285{top_line}
286    Try running with `--verbose` to see the error message.
287    If an example should not be scanned, then consider adding `doc-scrape-examples = false` to its `[[example]]` definition in {}",
288        relative_manifest_path.display()
289    )
290}
291
292/// Creates a unit of work invoking `rustc` for building the `unit`.
293fn rustc(
294    build_runner: &mut BuildRunner<'_, '_>,
295    unit: &Unit,
296    exec: &Arc<dyn Executor>,
297) -> CargoResult<Work> {
298    let mut rustc = prepare_rustc(build_runner, unit)?;
299
300    let name = unit.pkg.name();
301
302    let outputs = build_runner.outputs(unit)?;
303    let root = build_runner.files().output_dir(unit);
304
305    // Prepare the native lib state (extra `-L` and `-l` flags).
306    let build_script_outputs = Arc::clone(&build_runner.build_script_outputs);
307    let current_id = unit.pkg.package_id();
308    let manifest = ManifestErrorContext::new(build_runner, unit);
309    let build_scripts = build_runner.build_scripts.get(unit).cloned();
310
311    // If we are a binary and the package also contains a library, then we
312    // don't pass the `-l` flags.
313    let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib());
314
315    let dep_info_name =
316        if let Some(c_extra_filename) = build_runner.files().metadata(unit).c_extra_filename() {
317            format!("{}-{}.d", unit.target.crate_name(), c_extra_filename)
318        } else {
319            format!("{}.d", unit.target.crate_name())
320        };
321    let rustc_dep_info_loc = root.join(dep_info_name);
322    let dep_info_loc = fingerprint::dep_info_loc(build_runner, unit);
323
324    let mut output_options = OutputOptions::new(build_runner, unit);
325    let package_id = unit.pkg.package_id();
326    let target = Target::clone(&unit.target);
327    let mode = unit.mode;
328
329    exec.init(build_runner, unit);
330    let exec = exec.clone();
331
332    let root_output = build_runner.files().host_dest().map(|v| v.to_path_buf());
333    let build_dir = build_runner.bcx.ws.build_dir().into_path_unlocked();
334    let pkg_root = unit.pkg.root().to_path_buf();
335    let cwd = rustc
336        .get_cwd()
337        .unwrap_or_else(|| build_runner.bcx.gctx.cwd())
338        .to_path_buf();
339    let fingerprint_dir = build_runner.files().fingerprint_dir(unit);
340    let script_metadatas = build_runner.find_build_script_metadatas(unit);
341    let is_local = unit.is_local();
342    let artifact = unit.artifact;
343    let sbom_files = build_runner.sbom_output_files(unit)?;
344    let sbom = build_sbom(build_runner, unit)?;
345
346    let hide_diagnostics_for_scrape_unit = build_runner.bcx.unit_can_fail_for_docscraping(unit)
347        && !matches!(
348            build_runner.bcx.gctx.shell().verbosity(),
349            Verbosity::Verbose
350        );
351    let failed_scrape_diagnostic = hide_diagnostics_for_scrape_unit.then(|| {
352        // If this unit is needed for doc-scraping, then we generate a diagnostic that
353        // describes the set of reverse-dependencies that cause the unit to be needed.
354        let target_desc = unit.target.description_named();
355        let mut for_scrape_units = build_runner
356            .bcx
357            .scrape_units_have_dep_on(unit)
358            .into_iter()
359            .map(|unit| unit.target.description_named())
360            .collect::<Vec<_>>();
361        for_scrape_units.sort();
362        let for_scrape_units = for_scrape_units.join(", ");
363        make_failed_scrape_diagnostic(build_runner, unit, format_args!("failed to check {target_desc} in package `{name}` as a prerequisite for scraping examples from: {for_scrape_units}"))
364    });
365    if hide_diagnostics_for_scrape_unit {
366        output_options.show_diagnostics = false;
367    }
368    let env_config = Arc::clone(build_runner.bcx.gctx.env_config()?);
369    return Ok(Work::new(move |state| {
370        // Artifacts are in a different location than typical units,
371        // hence we must assure the crate- and target-dependent
372        // directory is present.
373        if artifact.is_true() {
374            paths::create_dir_all(&root)?;
375        }
376
377        // Only at runtime have we discovered what the extra -L and -l
378        // arguments are for native libraries, so we process those here. We
379        // also need to be sure to add any -L paths for our plugins to the
380        // dynamic library load path as a plugin's dynamic library may be
381        // located somewhere in there.
382        // Finally, if custom environment variables have been produced by
383        // previous build scripts, we include them in the rustc invocation.
384        if let Some(build_scripts) = build_scripts {
385            let script_outputs = build_script_outputs.lock().unwrap();
386            add_native_deps(
387                &mut rustc,
388                &script_outputs,
389                &build_scripts,
390                pass_l_flag,
391                &target,
392                current_id,
393                mode,
394            )?;
395            if let Some(ref root_output) = root_output {
396                add_plugin_deps(&mut rustc, &script_outputs, &build_scripts, root_output)?;
397            }
398            add_custom_flags(&mut rustc, &script_outputs, script_metadatas)?;
399        }
400
401        for output in outputs.iter() {
402            // If there is both an rmeta and rlib, rustc will prefer to use the
403            // rlib, even if it is older. Therefore, we must delete the rlib to
404            // force using the new rmeta.
405            if output.path.extension() == Some(OsStr::new("rmeta")) {
406                let dst = root.join(&output.path).with_extension("rlib");
407                if dst.exists() {
408                    paths::remove_file(&dst)?;
409                }
410            }
411
412            // Some linkers do not remove the executable, but truncate and modify it.
413            // That results in the old hard-link being modified even after renamed.
414            // We delete the old artifact here to prevent this behavior from confusing users.
415            // See rust-lang/cargo#8348.
416            if output.hardlink.is_some() && output.path.exists() {
417                _ = paths::remove_file(&output.path).map_err(|e| {
418                    tracing::debug!(
419                        "failed to delete previous output file `{:?}`: {e:?}",
420                        output.path
421                    );
422                });
423            }
424        }
425
426        state.running(&rustc);
427        let timestamp = paths::set_invocation_time(&fingerprint_dir)?;
428        for file in sbom_files {
429            tracing::debug!("writing sbom to {}", file.display());
430            let outfile = BufWriter::new(paths::create(&file)?);
431            serde_json::to_writer(outfile, &sbom)?;
432        }
433
434        let result = exec
435            .exec(
436                &rustc,
437                package_id,
438                &target,
439                mode,
440                &mut |line| on_stdout_line(state, line, package_id, &target),
441                &mut |line| {
442                    on_stderr_line(
443                        state,
444                        line,
445                        package_id,
446                        &manifest,
447                        &target,
448                        &mut output_options,
449                    )
450                },
451            )
452            .map_err(|e| {
453                if output_options.errors_seen == 0 {
454                    // If we didn't expect an error, do not require --verbose to fail.
455                    // This is intended to debug
456                    // https://github.com/rust-lang/crater/issues/733, where we are seeing
457                    // Cargo exit unsuccessfully while seeming to not show any errors.
458                    e
459                } else {
460                    verbose_if_simple_exit_code(e)
461                }
462            })
463            .with_context(|| {
464                // adapted from rustc_errors/src/lib.rs
465                let warnings = match output_options.warnings_seen {
466                    0 => String::new(),
467                    1 => "; 1 warning emitted".to_string(),
468                    count => format!("; {} warnings emitted", count),
469                };
470                let errors = match output_options.errors_seen {
471                    0 => String::new(),
472                    1 => " due to 1 previous error".to_string(),
473                    count => format!(" due to {} previous errors", count),
474                };
475                let name = descriptive_pkg_name(&name, &target, &mode);
476                format!("could not compile {name}{errors}{warnings}")
477            });
478
479        if let Err(e) = result {
480            if let Some(diagnostic) = failed_scrape_diagnostic {
481                state.warning(diagnostic);
482            }
483
484            return Err(e);
485        }
486
487        // Exec should never return with success *and* generate an error.
488        debug_assert_eq!(output_options.errors_seen, 0);
489
490        if rustc_dep_info_loc.exists() {
491            fingerprint::translate_dep_info(
492                &rustc_dep_info_loc,
493                &dep_info_loc,
494                &cwd,
495                &pkg_root,
496                &build_dir,
497                &rustc,
498                // Do not track source files in the fingerprint for registry dependencies.
499                is_local,
500                &env_config,
501            )
502            .with_context(|| {
503                internal(format!(
504                    "could not parse/generate dep info at: {}",
505                    rustc_dep_info_loc.display()
506                ))
507            })?;
508            // This mtime shift allows Cargo to detect if a source file was
509            // modified in the middle of the build.
510            paths::set_file_time_no_err(dep_info_loc, timestamp);
511        }
512
513        // This mtime shift for .rmeta is a workaround as rustc incremental build
514        // since rust-lang/rust#114669 (1.90.0) skips unnecessary rmeta generation.
515        //
516        // The situation is like this:
517        //
518        // 1. When build script execution's external dependendies
519        //    (rerun-if-changed, rerun-if-env-changed) got updated,
520        //    the execution unit reran and got a newer mtime.
521        // 2. rustc type-checked the associated crate, though with incremental
522        //    compilation, no rmeta regeneration. Its `.rmeta` stays old.
523        // 3. Run `cargo check` again. Cargo found build script execution had
524        //    a new mtime than existing crate rmeta, so re-checking the crate.
525        //    However the check is a no-op (input has no change), so stuck.
526        if mode.is_check() {
527            for output in outputs.iter() {
528                paths::set_file_time_no_err(&output.path, timestamp);
529            }
530        }
531
532        Ok(())
533    }));
534
535    // Add all relevant `-L` and `-l` flags from dependencies (now calculated and
536    // present in `state`) to the command provided.
537    fn add_native_deps(
538        rustc: &mut ProcessBuilder,
539        build_script_outputs: &BuildScriptOutputs,
540        build_scripts: &BuildScripts,
541        pass_l_flag: bool,
542        target: &Target,
543        current_id: PackageId,
544        mode: CompileMode,
545    ) -> CargoResult<()> {
546        let mut library_paths = vec![];
547
548        for key in build_scripts.to_link.iter() {
549            let output = build_script_outputs.get(key.1).ok_or_else(|| {
550                internal(format!(
551                    "couldn't find build script output for {}/{}",
552                    key.0, key.1
553                ))
554            })?;
555            library_paths.extend(output.library_paths.iter());
556        }
557
558        // NOTE: This very intentionally does not use the derived ord from LibraryPath because we need to
559        // retain relative ordering within the same type (i.e. not lexicographic). The use of a stable sort
560        // is also important here because it ensures that paths of the same type retain the same relative
561        // ordering (for an unstable sort to work here, the list would need to retain the idx of each element
562        // and then sort by that idx when the type is equivalent.
563        library_paths.sort_by_key(|p| match p {
564            LibraryPath::CargoArtifact(_) => 0,
565            LibraryPath::External(_) => 1,
566        });
567
568        for path in library_paths.iter() {
569            rustc.arg("-L").arg(path.as_ref());
570        }
571
572        for key in build_scripts.to_link.iter() {
573            let output = build_script_outputs.get(key.1).ok_or_else(|| {
574                internal(format!(
575                    "couldn't find build script output for {}/{}",
576                    key.0, key.1
577                ))
578            })?;
579
580            if key.0 == current_id {
581                if pass_l_flag {
582                    for name in output.library_links.iter() {
583                        rustc.arg("-l").arg(name);
584                    }
585                }
586            }
587
588            for (lt, arg) in &output.linker_args {
589                // There was an unintentional change where cdylibs were
590                // allowed to be passed via transitive dependencies. This
591                // clause should have been kept in the `if` block above. For
592                // now, continue allowing it for cdylib only.
593                // See https://github.com/rust-lang/cargo/issues/9562
594                if lt.applies_to(target, mode)
595                    && (key.0 == current_id || *lt == LinkArgTarget::Cdylib)
596                {
597                    rustc.arg("-C").arg(format!("link-arg={}", arg));
598                }
599            }
600        }
601        Ok(())
602    }
603}
604
605fn verbose_if_simple_exit_code(err: Error) -> Error {
606    // If a signal on unix (`code == None`) or an abnormal termination
607    // on Windows (codes like `0xC0000409`), don't hide the error details.
608    match err
609        .downcast_ref::<ProcessError>()
610        .as_ref()
611        .and_then(|perr| perr.code)
612    {
613        Some(n) if cargo_util::is_simple_exit_code(n) => VerboseError::new(err).into(),
614        _ => err,
615    }
616}
617
618fn prebuild_lock_exclusive(lock: LockKey) -> Work {
619    Work::new(move |state| {
620        state.lock_exclusive(&lock)?;
621        Ok(())
622    })
623}
624
625fn downgrade_lock_to_shared(lock: LockKey) -> Work {
626    Work::new(move |state| {
627        state.downgrade_to_shared(&lock)?;
628        Ok(())
629    })
630}
631
632/// Link the compiled target (often of form `foo-{metadata_hash}`) to the
633/// final target. This must happen during both "Fresh" and "Compile".
634fn link_targets(
635    build_runner: &mut BuildRunner<'_, '_>,
636    unit: &Unit,
637    fresh: bool,
638) -> CargoResult<Work> {
639    let bcx = build_runner.bcx;
640    let outputs = build_runner.outputs(unit)?;
641    let export_dir = build_runner.files().export_dir();
642    let package_id = unit.pkg.package_id();
643    let manifest_path = PathBuf::from(unit.pkg.manifest_path());
644    let profile = unit.profile.clone();
645    let unit_mode = unit.mode;
646    let features = unit.features.iter().map(|s| s.to_string()).collect();
647    let json_messages = bcx.build_config.emit_json();
648    let executable = build_runner.get_executable(unit)?;
649    let mut target = Target::clone(&unit.target);
650    if let TargetSourcePath::Metabuild = target.src_path() {
651        // Give it something to serialize.
652        let path = unit
653            .pkg
654            .manifest()
655            .metabuild_path(build_runner.bcx.ws.build_dir());
656        target.set_src_path(TargetSourcePath::Path(path));
657    }
658
659    Ok(Work::new(move |state| {
660        // If we're a "root crate", e.g., the target of this compilation, then we
661        // hard link our outputs out of the `deps` directory into the directory
662        // above. This means that `cargo build` will produce binaries in
663        // `target/debug` which one probably expects.
664        let mut destinations = vec![];
665        for output in outputs.iter() {
666            let src = &output.path;
667            // This may have been a `cargo rustc` command which changes the
668            // output, so the source may not actually exist.
669            if !src.exists() {
670                continue;
671            }
672            let Some(dst) = output.hardlink.as_ref() else {
673                destinations.push(src.clone());
674                continue;
675            };
676            destinations.push(dst.clone());
677            paths::link_or_copy(src, dst)?;
678            if let Some(ref path) = output.export_path {
679                let export_dir = export_dir.as_ref().unwrap();
680                paths::create_dir_all(export_dir)?;
681
682                paths::link_or_copy(src, path)?;
683            }
684        }
685
686        if json_messages {
687            let debuginfo = match profile.debuginfo.into_inner() {
688                TomlDebugInfo::None => machine_message::ArtifactDebuginfo::Int(0),
689                TomlDebugInfo::Limited => machine_message::ArtifactDebuginfo::Int(1),
690                TomlDebugInfo::Full => machine_message::ArtifactDebuginfo::Int(2),
691                TomlDebugInfo::LineDirectivesOnly => {
692                    machine_message::ArtifactDebuginfo::Named("line-directives-only")
693                }
694                TomlDebugInfo::LineTablesOnly => {
695                    machine_message::ArtifactDebuginfo::Named("line-tables-only")
696                }
697            };
698            let art_profile = machine_message::ArtifactProfile {
699                opt_level: profile.opt_level.as_str(),
700                debuginfo: Some(debuginfo),
701                debug_assertions: profile.debug_assertions,
702                overflow_checks: profile.overflow_checks,
703                test: unit_mode.is_any_test(),
704            };
705
706            let msg = machine_message::Artifact {
707                package_id: package_id.to_spec(),
708                manifest_path,
709                target: &target,
710                profile: art_profile,
711                features,
712                filenames: destinations,
713                executable,
714                fresh,
715            }
716            .to_json_string();
717            state.stdout(msg)?;
718        }
719        Ok(())
720    }))
721}
722
723// For all plugin dependencies, add their -L paths (now calculated and present
724// in `build_script_outputs`) to the dynamic library load path for the command
725// to execute.
726fn add_plugin_deps(
727    rustc: &mut ProcessBuilder,
728    build_script_outputs: &BuildScriptOutputs,
729    build_scripts: &BuildScripts,
730    root_output: &Path,
731) -> CargoResult<()> {
732    let var = paths::dylib_path_envvar();
733    let search_path = rustc.get_env(var).unwrap_or_default();
734    let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>();
735    for (pkg_id, metadata) in &build_scripts.plugins {
736        let output = build_script_outputs
737            .get(*metadata)
738            .ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", pkg_id)))?;
739        search_path.append(&mut filter_dynamic_search_path(
740            output.library_paths.iter().map(AsRef::as_ref),
741            root_output,
742        ));
743    }
744    let search_path = paths::join_paths(&search_path, var)?;
745    rustc.env(var, &search_path);
746    Ok(())
747}
748
749fn get_dynamic_search_path(path: &Path) -> &Path {
750    match path.to_str().and_then(|s| s.split_once("=")) {
751        Some(("native" | "crate" | "dependency" | "framework" | "all", path)) => Path::new(path),
752        _ => path,
753    }
754}
755
756// Determine paths to add to the dynamic search path from -L entries
757//
758// Strip off prefixes like "native=" or "framework=" and filter out directories
759// **not** inside our output directory since they are likely spurious and can cause
760// clashes with system shared libraries (issue #3366).
761fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &Path) -> Vec<PathBuf>
762where
763    I: Iterator<Item = &'a PathBuf>,
764{
765    let mut search_path = vec![];
766    for dir in paths {
767        let dir = get_dynamic_search_path(dir);
768        if dir.starts_with(&root_output) {
769            search_path.push(dir.to_path_buf());
770        } else {
771            debug!(
772                "Not including path {} in runtime library search path because it is \
773                 outside target root {}",
774                dir.display(),
775                root_output.display()
776            );
777        }
778    }
779    search_path
780}
781
782/// Prepares flags and environments we can compute for a `rustc` invocation
783/// before the job queue starts compiling any unit.
784///
785/// This builds a static view of the invocation. Flags depending on the
786/// completion of other units will be added later in runtime, such as flags
787/// from build scripts.
788fn prepare_rustc(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult<ProcessBuilder> {
789    let gctx = build_runner.bcx.gctx;
790    let is_primary = build_runner.is_primary_package(unit);
791    let is_workspace = build_runner.bcx.ws.is_member(&unit.pkg);
792
793    let mut base = build_runner
794        .compilation
795        .rustc_process(unit, is_primary, is_workspace)?;
796    build_base_args(build_runner, &mut base, unit)?;
797    if unit.pkg.manifest().is_embedded() {
798        if !gctx.cli_unstable().script {
799            anyhow::bail!(
800                "parsing `{}` requires `-Zscript`",
801                unit.pkg.manifest_path().display()
802            );
803        }
804        base.arg("-Z").arg("crate-attr=feature(frontmatter)");
805    }
806
807    base.inherit_jobserver(&build_runner.jobserver);
808    build_deps_args(&mut base, build_runner, unit)?;
809    add_cap_lints(build_runner.bcx, unit, &mut base);
810    if let Some(args) = build_runner.bcx.extra_args_for(unit) {
811        base.args(args);
812    }
813    base.args(&unit.rustflags);
814    if gctx.cli_unstable().binary_dep_depinfo {
815        base.arg("-Z").arg("binary-dep-depinfo");
816    }
817    if build_runner.bcx.gctx.cli_unstable().checksum_freshness {
818        base.arg("-Z").arg("checksum-hash-algorithm=blake3");
819    }
820
821    if is_primary {
822        base.env("CARGO_PRIMARY_PACKAGE", "1");
823        let file_list = build_runner.sbom_output_files(unit)?;
824        if !file_list.is_empty() {
825            let file_list = std::env::join_paths(file_list)?;
826            base.env("CARGO_SBOM_PATH", file_list);
827        }
828    }
829
830    if unit.target.is_test() || unit.target.is_bench() {
831        let tmp = build_runner
832            .files()
833            .layout(unit.kind)
834            .build_dir()
835            .prepare_tmp()?;
836        base.env("CARGO_TARGET_TMPDIR", tmp.display().to_string());
837    }
838
839    Ok(base)
840}
841
842/// Prepares flags and environments we can compute for a `rustdoc` invocation
843/// before the job queue starts compiling any unit.
844///
845/// This builds a static view of the invocation. Flags depending on the
846/// completion of other units will be added later in runtime, such as flags
847/// from build scripts.
848fn prepare_rustdoc(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult<ProcessBuilder> {
849    let bcx = build_runner.bcx;
850    // script_metadata is not needed here, it is only for tests.
851    let mut rustdoc = build_runner.compilation.rustdoc_process(unit, None)?;
852    if unit.pkg.manifest().is_embedded() {
853        if !bcx.gctx.cli_unstable().script {
854            anyhow::bail!(
855                "parsing `{}` requires `-Zscript`",
856                unit.pkg.manifest_path().display()
857            );
858        }
859        rustdoc.arg("-Z").arg("crate-attr=feature(frontmatter)");
860    }
861    rustdoc.inherit_jobserver(&build_runner.jobserver);
862    let crate_name = unit.target.crate_name();
863    rustdoc.arg("--crate-name").arg(&crate_name);
864    add_path_args(bcx.ws, unit, &mut rustdoc);
865    add_cap_lints(bcx, unit, &mut rustdoc);
866
867    unit.kind.add_target_arg(&mut rustdoc);
868    let doc_dir = build_runner.files().output_dir(unit);
869    rustdoc.arg("-o").arg(&doc_dir);
870    rustdoc.args(&features_args(unit));
871    rustdoc.args(&check_cfg_args(unit));
872
873    add_error_format_and_color(build_runner, &mut rustdoc);
874    add_allow_features(build_runner, &mut rustdoc);
875
876    if build_runner.bcx.gctx.cli_unstable().rustdoc_depinfo {
877        // toolchain-shared-resources is required for keeping the shared styling resources
878        // invocation-specific is required for keeping the original rustdoc emission
879        let mut arg = if build_runner.bcx.gctx.cli_unstable().rustdoc_mergeable_info {
880            // toolchain resources are written at the end, at the same time as merging
881            OsString::from("--emit=invocation-specific,dep-info=")
882        } else {
883            // if not using mergeable CCI, everything is written every time
884            OsString::from("--emit=toolchain-shared-resources,invocation-specific,dep-info=")
885        };
886        arg.push(rustdoc_dep_info_loc(build_runner, unit));
887        rustdoc.arg(arg);
888
889        if build_runner.bcx.gctx.cli_unstable().checksum_freshness {
890            rustdoc.arg("-Z").arg("checksum-hash-algorithm=blake3");
891        }
892
893        rustdoc.arg("-Zunstable-options");
894    } else if build_runner.bcx.gctx.cli_unstable().rustdoc_mergeable_info {
895        // toolchain resources are written at the end, at the same time as merging
896        rustdoc.arg("--emit=invocation-specific");
897        rustdoc.arg("-Zunstable-options");
898    }
899
900    if build_runner.bcx.gctx.cli_unstable().rustdoc_mergeable_info {
901        // write out mergeable data to be imported
902        rustdoc.arg("--merge=none");
903        let mut arg = OsString::from("--parts-out-dir=");
904        // `-Zrustdoc-mergeable-info` always uses the new layout.
905        arg.push(build_runner.files().out_dir_new_layout(unit));
906        rustdoc.arg(arg);
907    }
908
909    if let Some(trim_paths) = unit.profile.trim_paths.as_ref() {
910        trim_paths_args_rustdoc(&mut rustdoc, build_runner, unit, trim_paths)?;
911    }
912
913    rustdoc.args(unit.pkg.manifest().lint_rustflags());
914
915    let metadata = build_runner.metadata_for_doc_units[unit];
916    rustdoc
917        .arg("-C")
918        .arg(format!("metadata={}", metadata.c_metadata()));
919
920    if unit.mode.is_doc_scrape() {
921        debug_assert!(build_runner.bcx.scrape_units.contains(unit));
922
923        if unit.target.is_test() {
924            rustdoc.arg("--scrape-tests");
925        }
926
927        rustdoc.arg("-Zunstable-options");
928
929        rustdoc
930            .arg("--scrape-examples-output-path")
931            .arg(scrape_output_path(build_runner, unit)?);
932
933        // Only scrape example for items from crates in the workspace, to reduce generated file size
934        for pkg in build_runner.bcx.packages.packages() {
935            let names = pkg
936                .targets()
937                .iter()
938                .map(|target| target.crate_name())
939                .collect::<HashSet<_>>();
940            for name in names {
941                rustdoc.arg("--scrape-examples-target-crate").arg(name);
942            }
943        }
944    }
945
946    if should_include_scrape_units(build_runner.bcx, unit) {
947        rustdoc.arg("-Zunstable-options");
948    }
949
950    build_deps_args(&mut rustdoc, build_runner, unit)?;
951    rustdoc::add_root_urls(build_runner, unit, &mut rustdoc)?;
952
953    rustdoc::add_output_format(build_runner, &mut rustdoc)?;
954
955    if let Some(args) = build_runner.bcx.extra_args_for(unit) {
956        rustdoc.args(args);
957    }
958    rustdoc.args(&unit.rustdocflags);
959
960    if !crate_version_flag_already_present(&rustdoc) {
961        append_crate_version_flag(unit, &mut rustdoc);
962    }
963
964    Ok(rustdoc)
965}
966
967/// Creates a unit of work invoking `rustdoc` for documenting the `unit`.
968fn rustdoc(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult<Work> {
969    let mut rustdoc = prepare_rustdoc(build_runner, unit)?;
970
971    let crate_name = unit.target.crate_name();
972    let doc_dir = build_runner.files().output_dir(unit);
973    // Create the documentation directory ahead of time as rustdoc currently has
974    // a bug where concurrent invocations will race to create this directory if
975    // it doesn't already exist.
976    paths::create_dir_all(&doc_dir)?;
977
978    let target_desc = unit.target.description_named();
979    let name = unit.pkg.name();
980    let build_script_outputs = Arc::clone(&build_runner.build_script_outputs);
981    let package_id = unit.pkg.package_id();
982    let target = Target::clone(&unit.target);
983    let manifest = ManifestErrorContext::new(build_runner, unit);
984
985    let rustdoc_dep_info_loc = rustdoc_dep_info_loc(build_runner, unit);
986    let dep_info_loc = fingerprint::dep_info_loc(build_runner, unit);
987    let build_dir = build_runner.bcx.ws.build_dir().into_path_unlocked();
988    let pkg_root = unit.pkg.root().to_path_buf();
989    let cwd = rustdoc
990        .get_cwd()
991        .unwrap_or_else(|| build_runner.bcx.gctx.cwd())
992        .to_path_buf();
993    let fingerprint_dir = build_runner.files().fingerprint_dir(unit);
994    let is_local = unit.is_local();
995    let env_config = Arc::clone(build_runner.bcx.gctx.env_config()?);
996    let rustdoc_depinfo_enabled = build_runner.bcx.gctx.cli_unstable().rustdoc_depinfo;
997
998    let mut output_options = OutputOptions::new(build_runner, unit);
999    let script_metadatas = build_runner.find_build_script_metadatas(unit);
1000    let scrape_outputs = if should_include_scrape_units(build_runner.bcx, unit) {
1001        Some(
1002            build_runner
1003                .bcx
1004                .scrape_units
1005                .iter()
1006                .map(|unit| {
1007                    Ok((
1008                        build_runner.files().metadata(unit).unit_id(),
1009                        scrape_output_path(build_runner, unit)?,
1010                    ))
1011                })
1012                .collect::<CargoResult<HashMap<_, _>>>()?,
1013        )
1014    } else {
1015        None
1016    };
1017
1018    let failed_scrape_units = Arc::clone(&build_runner.failed_scrape_units);
1019    let hide_diagnostics_for_scrape_unit = build_runner.bcx.unit_can_fail_for_docscraping(unit)
1020        && !matches!(
1021            build_runner.bcx.gctx.shell().verbosity(),
1022            Verbosity::Verbose
1023        );
1024    let failed_scrape_diagnostic = hide_diagnostics_for_scrape_unit.then(|| {
1025        make_failed_scrape_diagnostic(
1026            build_runner,
1027            unit,
1028            format_args!("failed to scan {target_desc} in package `{name}` for example code usage"),
1029        )
1030    });
1031    if hide_diagnostics_for_scrape_unit {
1032        output_options.show_diagnostics = false;
1033    }
1034
1035    Ok(Work::new(move |state| {
1036        add_custom_flags(
1037            &mut rustdoc,
1038            &build_script_outputs.lock().unwrap(),
1039            script_metadatas,
1040        )?;
1041
1042        // Add the output of scraped examples to the rustdoc command.
1043        // This action must happen after the unit's dependencies have finished,
1044        // because some of those deps may be Docscrape units which have failed.
1045        // So we dynamically determine which `--with-examples` flags to pass here.
1046        if let Some(scrape_outputs) = scrape_outputs {
1047            let failed_scrape_units = failed_scrape_units.lock().unwrap();
1048            for (metadata, output_path) in &scrape_outputs {
1049                if !failed_scrape_units.contains(metadata) {
1050                    rustdoc.arg("--with-examples").arg(output_path);
1051                }
1052            }
1053        }
1054
1055        let crate_dir = doc_dir.join(&crate_name);
1056        if crate_dir.exists() {
1057            // Remove output from a previous build. This ensures that stale
1058            // files for removed items are removed.
1059            debug!("removing pre-existing doc directory {:?}", crate_dir);
1060            paths::remove_dir_all(crate_dir)?;
1061        }
1062        state.running(&rustdoc);
1063        let timestamp = paths::set_invocation_time(&fingerprint_dir)?;
1064
1065        let result = rustdoc
1066            .exec_with_streaming(
1067                &mut |line| on_stdout_line(state, line, package_id, &target),
1068                &mut |line| {
1069                    on_stderr_line(
1070                        state,
1071                        line,
1072                        package_id,
1073                        &manifest,
1074                        &target,
1075                        &mut output_options,
1076                    )
1077                },
1078                false,
1079            )
1080            .map_err(verbose_if_simple_exit_code)
1081            .with_context(|| format!("could not document `{}`", name));
1082
1083        if let Err(e) = result {
1084            if let Some(diagnostic) = failed_scrape_diagnostic {
1085                state.warning(diagnostic);
1086            }
1087
1088            return Err(e);
1089        }
1090
1091        if rustdoc_depinfo_enabled && rustdoc_dep_info_loc.exists() {
1092            fingerprint::translate_dep_info(
1093                &rustdoc_dep_info_loc,
1094                &dep_info_loc,
1095                &cwd,
1096                &pkg_root,
1097                &build_dir,
1098                &rustdoc,
1099                // Should we track source file for doc gen?
1100                is_local,
1101                &env_config,
1102            )
1103            .with_context(|| {
1104                internal(format_args!(
1105                    "could not parse/generate dep info at: {}",
1106                    rustdoc_dep_info_loc.display()
1107                ))
1108            })?;
1109            // This mtime shift allows Cargo to detect if a source file was
1110            // modified in the middle of the build.
1111            paths::set_file_time_no_err(dep_info_loc, timestamp);
1112        }
1113
1114        Ok(())
1115    }))
1116}
1117
1118// The --crate-version flag could have already been passed in RUSTDOCFLAGS
1119// or as an extra compiler argument for rustdoc
1120fn crate_version_flag_already_present(rustdoc: &ProcessBuilder) -> bool {
1121    rustdoc.get_args().any(|flag| {
1122        flag.to_str()
1123            .map_or(false, |flag| flag.starts_with(RUSTDOC_CRATE_VERSION_FLAG))
1124    })
1125}
1126
1127fn append_crate_version_flag(unit: &Unit, rustdoc: &mut ProcessBuilder) {
1128    rustdoc
1129        .arg(RUSTDOC_CRATE_VERSION_FLAG)
1130        .arg(unit.pkg.version().to_string());
1131}
1132
1133/// Adds [`--cap-lints`] to the command to execute.
1134///
1135/// [`--cap-lints`]: https://doc.rust-lang.org/nightly/rustc/lints/levels.html#capping-lints
1136fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuilder) {
1137    // If this is an upstream dep we don't want warnings from, turn off all
1138    // lints.
1139    if !unit.show_warnings(bcx.gctx) {
1140        cmd.arg("--cap-lints").arg("allow");
1141
1142    // If this is an upstream dep but we *do* want warnings, make sure that they
1143    // don't fail compilation.
1144    } else if !unit.is_local() {
1145        cmd.arg("--cap-lints").arg("warn");
1146    }
1147}
1148
1149/// Forwards [`-Zallow-features`] if it is set for cargo.
1150///
1151/// [`-Zallow-features`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#allow-features
1152fn add_allow_features(build_runner: &BuildRunner<'_, '_>, cmd: &mut ProcessBuilder) {
1153    if let Some(allow) = &build_runner.bcx.gctx.cli_unstable().allow_features {
1154        use std::fmt::Write;
1155        let mut arg = String::from("-Zallow-features=");
1156        for f in allow {
1157            let _ = write!(&mut arg, "{f},");
1158        }
1159        cmd.arg(arg.trim_end_matches(','));
1160    }
1161}
1162
1163/// Adds [`--error-format`] to the command to execute.
1164///
1165/// Cargo always uses JSON output. This has several benefits, such as being
1166/// easier to parse, handles changing formats (for replaying cached messages),
1167/// ensures atomic output (so messages aren't interleaved), allows for
1168/// intercepting messages like rmeta artifacts, etc. rustc includes a
1169/// "rendered" field in the JSON message with the message properly formatted,
1170/// which Cargo will extract and display to the user.
1171///
1172/// [`--error-format`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#--error-format-control-how-errors-are-produced
1173fn add_error_format_and_color(build_runner: &BuildRunner<'_, '_>, cmd: &mut ProcessBuilder) {
1174    let enable_timings =
1175        build_runner.bcx.gctx.cli_unstable().section_timings && build_runner.bcx.logger.is_some();
1176    if enable_timings {
1177        cmd.arg("-Zunstable-options");
1178    }
1179
1180    cmd.arg("--error-format=json");
1181    let mut json = String::from("--json=diagnostic-rendered-ansi,artifacts,future-incompat");
1182
1183    if let MessageFormat::Short | MessageFormat::Json { short: true, .. } =
1184        build_runner.bcx.build_config.message_format
1185    {
1186        json.push_str(",diagnostic-short");
1187    } else if build_runner.bcx.gctx.shell().err_unicode()
1188        && build_runner.bcx.gctx.cli_unstable().rustc_unicode
1189    {
1190        json.push_str(",diagnostic-unicode");
1191    }
1192
1193    if enable_timings {
1194        json.push_str(",timings");
1195    }
1196
1197    cmd.arg(json);
1198
1199    let gctx = build_runner.bcx.gctx;
1200    if let Some(width) = gctx.shell().err_width().diagnostic_terminal_width() {
1201        cmd.arg(format!("--diagnostic-width={width}"));
1202    }
1203}
1204
1205/// Adds essential rustc flags and environment variables to the command to execute.
1206fn build_base_args(
1207    build_runner: &BuildRunner<'_, '_>,
1208    cmd: &mut ProcessBuilder,
1209    unit: &Unit,
1210) -> CargoResult<()> {
1211    assert!(!unit.mode.is_run_custom_build());
1212
1213    let bcx = build_runner.bcx;
1214    let Profile {
1215        ref opt_level,
1216        codegen_backend,
1217        codegen_units,
1218        debuginfo,
1219        debug_assertions,
1220        split_debuginfo,
1221        overflow_checks,
1222        rpath,
1223        ref panic,
1224        incremental,
1225        strip,
1226        rustflags: profile_rustflags,
1227        trim_paths,
1228        hint_mostly_unused: profile_hint_mostly_unused,
1229        ..
1230    } = unit.profile.clone();
1231    let hints = unit.pkg.hints().cloned().unwrap_or_default();
1232    let test = unit.mode.is_any_test();
1233
1234    let warn = |msg: &str| {
1235        bcx.gctx.shell().warn(format!(
1236            "{}@{}: {msg}",
1237            unit.pkg.package_id().name(),
1238            unit.pkg.package_id().version()
1239        ))
1240    };
1241    let unit_capped_warn = |msg: &str| {
1242        if unit.show_warnings(bcx.gctx) {
1243            warn(msg)
1244        } else {
1245            Ok(())
1246        }
1247    };
1248
1249    cmd.arg("--crate-name").arg(&unit.target.crate_name());
1250
1251    let edition = unit.target.edition();
1252    edition.cmd_edition_arg(cmd);
1253
1254    add_path_args(bcx.ws, unit, cmd);
1255    add_error_format_and_color(build_runner, cmd);
1256    add_allow_features(build_runner, cmd);
1257
1258    let mut contains_dy_lib = false;
1259    if !test {
1260        for crate_type in &unit.target.rustc_crate_types() {
1261            cmd.arg("--crate-type").arg(crate_type.as_str());
1262            contains_dy_lib |= crate_type == &CrateType::Dylib;
1263        }
1264    }
1265
1266    if unit.mode.is_check() {
1267        cmd.arg("--emit=dep-info,metadata");
1268    } else if build_runner.bcx.gctx.cli_unstable().no_embed_metadata {
1269        // Nightly rustc supports the -Zembed-metadata=no flag, which tells it to avoid including
1270        // full metadata in rlib/dylib artifacts, to save space on disk. In this case, metadata
1271        // will only be stored in .rmeta files.
1272        // When we use this flag, we should also pass --emit=metadata to all artifacts that
1273        // contain useful metadata (rlib/dylib/proc macros), so that a .rmeta file is actually
1274        // generated. If we didn't do this, the full metadata would not get written anywhere.
1275        // However, we do not want to pass --emit=metadata to artifacts that never produce useful
1276        // metadata, such as binaries, because that would just unnecessarily create empty .rmeta
1277        // files on disk.
1278        if unit.benefits_from_no_embed_metadata() {
1279            cmd.arg("--emit=dep-info,metadata,link");
1280            cmd.args(&["-Z", "embed-metadata=no"]);
1281        } else {
1282            cmd.arg("--emit=dep-info,link");
1283        }
1284    } else {
1285        // If we don't use -Zembed-metadata=no, we emit .rmeta files only for rlib outputs.
1286        // This metadata may be used in this session for a pipelined compilation, or it may
1287        // be used in a future Cargo session as part of a pipelined compile.
1288        if !unit.requires_upstream_objects() {
1289            cmd.arg("--emit=dep-info,metadata,link");
1290        } else {
1291            cmd.arg("--emit=dep-info,link");
1292        }
1293    }
1294
1295    let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build())
1296        || (contains_dy_lib && !build_runner.is_primary_package(unit));
1297    if prefer_dynamic {
1298        cmd.arg("-C").arg("prefer-dynamic");
1299    }
1300
1301    if opt_level.as_str() != "0" {
1302        cmd.arg("-C").arg(&format!("opt-level={}", opt_level));
1303    }
1304
1305    if *panic != PanicStrategy::Unwind {
1306        cmd.arg("-C").arg(format!("panic={}", panic));
1307    }
1308    if *panic == PanicStrategy::ImmediateAbort {
1309        cmd.arg("-Z").arg("unstable-options");
1310    }
1311
1312    cmd.args(&lto_args(build_runner, unit));
1313
1314    if let Some(backend) = codegen_backend {
1315        cmd.arg("-Z").arg(&format!("codegen-backend={}", backend));
1316    }
1317
1318    if let Some(n) = codegen_units {
1319        cmd.arg("-C").arg(&format!("codegen-units={}", n));
1320    }
1321
1322    let debuginfo = debuginfo.into_inner();
1323    // Shorten the number of arguments if possible.
1324    if debuginfo != TomlDebugInfo::None {
1325        cmd.arg("-C").arg(format!("debuginfo={debuginfo}"));
1326        // This is generally just an optimization on build time so if we don't
1327        // pass it then it's ok. The values for the flag (off, packed, unpacked)
1328        // may be supported or not depending on the platform, so availability is
1329        // checked per-value. For example, at the time of writing this code, on
1330        // Windows the only stable valid value for split-debuginfo is "packed",
1331        // while on Linux "unpacked" is also stable.
1332        if let Some(split) = split_debuginfo {
1333            if build_runner
1334                .bcx
1335                .target_data
1336                .info(unit.kind)
1337                .supports_debuginfo_split(split)
1338            {
1339                cmd.arg("-C").arg(format!("split-debuginfo={split}"));
1340            }
1341        }
1342    }
1343
1344    if let Some(trim_paths) = trim_paths {
1345        trim_paths_args(cmd, build_runner, unit, &trim_paths)?;
1346    }
1347
1348    cmd.args(unit.pkg.manifest().lint_rustflags());
1349    cmd.args(&profile_rustflags);
1350
1351    // `-C overflow-checks` is implied by the setting of `-C debug-assertions`,
1352    // so we only need to provide `-C overflow-checks` if it differs from
1353    // the value of `-C debug-assertions` we would provide.
1354    if opt_level.as_str() != "0" {
1355        if debug_assertions {
1356            cmd.args(&["-C", "debug-assertions=on"]);
1357            if !overflow_checks {
1358                cmd.args(&["-C", "overflow-checks=off"]);
1359            }
1360        } else if overflow_checks {
1361            cmd.args(&["-C", "overflow-checks=on"]);
1362        }
1363    } else if !debug_assertions {
1364        cmd.args(&["-C", "debug-assertions=off"]);
1365        if overflow_checks {
1366            cmd.args(&["-C", "overflow-checks=on"]);
1367        }
1368    } else if !overflow_checks {
1369        cmd.args(&["-C", "overflow-checks=off"]);
1370    }
1371
1372    if test && unit.target.harness() {
1373        cmd.arg("--test");
1374
1375        // Cargo has historically never compiled `--test` binaries with
1376        // `panic=abort` because the `test` crate itself didn't support it.
1377        // Support is now upstream, however, but requires an unstable flag to be
1378        // passed when compiling the test. We require, in Cargo, an unstable
1379        // flag to pass to rustc, so register that here. Eventually this flag
1380        // will simply not be needed when the behavior is stabilized in the Rust
1381        // compiler itself.
1382        if *panic == PanicStrategy::Abort || *panic == PanicStrategy::ImmediateAbort {
1383            cmd.arg("-Z").arg("panic-abort-tests");
1384        }
1385    } else if test {
1386        cmd.arg("--cfg").arg("test");
1387    }
1388
1389    cmd.args(&features_args(unit));
1390    cmd.args(&check_cfg_args(unit));
1391
1392    let meta = build_runner.files().metadata(unit);
1393    cmd.arg("-C")
1394        .arg(&format!("metadata={}", meta.c_metadata()));
1395    if let Some(c_extra_filename) = meta.c_extra_filename() {
1396        cmd.arg("-C")
1397            .arg(&format!("extra-filename=-{c_extra_filename}"));
1398    }
1399
1400    if rpath {
1401        cmd.arg("-C").arg("rpath");
1402    }
1403
1404    cmd.arg("--out-dir")
1405        .arg(&build_runner.files().output_dir(unit));
1406
1407    unit.kind.add_target_arg(cmd);
1408
1409    add_codegen_linker(cmd, build_runner, unit, bcx.gctx.target_applies_to_host()?);
1410
1411    if incremental {
1412        add_codegen_incremental(cmd, build_runner, unit)
1413    }
1414
1415    let pkg_hint_mostly_unused = match hints.mostly_unused {
1416        None => None,
1417        Some(toml::Value::Boolean(b)) => Some(b),
1418        Some(v) => {
1419            unit_capped_warn(&format!(
1420                "ignoring unsupported value type ({}) for 'hints.mostly-unused', which expects a boolean",
1421                v.type_str()
1422            ))?;
1423            None
1424        }
1425    };
1426    if profile_hint_mostly_unused
1427        .or(pkg_hint_mostly_unused)
1428        .unwrap_or(false)
1429    {
1430        if bcx.gctx.cli_unstable().profile_hint_mostly_unused {
1431            cmd.arg("-Zhint-mostly-unused");
1432        } else {
1433            if profile_hint_mostly_unused.is_some() {
1434                // Profiles come from the top-level unit, so we don't use `unit_capped_warn` here.
1435                warn(
1436                    "ignoring 'hint-mostly-unused' profile option, pass `-Zprofile-hint-mostly-unused` to enable it",
1437                )?;
1438            } else if pkg_hint_mostly_unused.is_some() {
1439                unit_capped_warn(
1440                    "ignoring 'hints.mostly-unused', pass `-Zprofile-hint-mostly-unused` to enable it",
1441                )?;
1442            }
1443        }
1444    }
1445
1446    let strip = strip.into_inner();
1447    if strip != StripInner::None {
1448        cmd.arg("-C").arg(format!("strip={}", strip));
1449    }
1450
1451    if unit.is_std {
1452        // -Zforce-unstable-if-unmarked prevents the accidental use of
1453        // unstable crates within the sysroot (such as "extern crate libc" or
1454        // any non-public crate in the sysroot).
1455        //
1456        // RUSTC_BOOTSTRAP allows unstable features on stable.
1457        cmd.arg("-Z")
1458            .arg("force-unstable-if-unmarked")
1459            .env("RUSTC_BOOTSTRAP", "1");
1460    }
1461
1462    Ok(())
1463}
1464
1465/// All active features for the unit passed as `--cfg features=<feature-name>`.
1466fn features_args(unit: &Unit) -> Vec<OsString> {
1467    let mut args = Vec::with_capacity(unit.features.len() * 2);
1468
1469    for feat in &unit.features {
1470        args.push(OsString::from("--cfg"));
1471        args.push(OsString::from(format!("feature=\"{}\"", feat)));
1472    }
1473
1474    args
1475}
1476
1477/// Like [`trim_paths_args`] but for rustdoc invocations.
1478fn trim_paths_args_rustdoc(
1479    cmd: &mut ProcessBuilder,
1480    build_runner: &BuildRunner<'_, '_>,
1481    unit: &Unit,
1482    trim_paths: &TomlTrimPaths,
1483) -> CargoResult<()> {
1484    match trim_paths {
1485        // rustdoc supports diagnostics trimming only.
1486        TomlTrimPaths::Values(values) if !values.contains(&TomlTrimPathsValue::Diagnostics) => {
1487            return Ok(());
1488        }
1489        _ => {}
1490    }
1491
1492    // feature gate was checked during manifest/config parsing.
1493    cmd.arg("-Zunstable-options");
1494
1495    // Order of `--remap-path-prefix` flags is important for `-Zbuild-std`.
1496    // We want to show `/rustc/<hash>/library/std` instead of `std-0.0.0`.
1497    cmd.arg(package_remap(build_runner, unit));
1498    cmd.arg(build_dir_remap(build_runner));
1499    cmd.arg(sysroot_remap(build_runner, unit));
1500
1501    Ok(())
1502}
1503
1504/// Generates the `--remap-path-scope` and `--remap-path-prefix` for [RFC 3127].
1505/// See also unstable feature [`-Ztrim-paths`].
1506///
1507/// [RFC 3127]: https://rust-lang.github.io/rfcs/3127-trim-paths.html
1508/// [`-Ztrim-paths`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-trim-paths-option
1509fn trim_paths_args(
1510    cmd: &mut ProcessBuilder,
1511    build_runner: &BuildRunner<'_, '_>,
1512    unit: &Unit,
1513    trim_paths: &TomlTrimPaths,
1514) -> CargoResult<()> {
1515    if trim_paths.is_none() {
1516        return Ok(());
1517    }
1518
1519    // feature gate was checked during manifest/config parsing.
1520    cmd.arg(format!("--remap-path-scope={trim_paths}"));
1521
1522    // Order of `--remap-path-prefix` flags is important for `-Zbuild-std`.
1523    // We want to show `/rustc/<hash>/library/std` instead of `std-0.0.0`.
1524    cmd.arg(package_remap(build_runner, unit));
1525    cmd.arg(build_dir_remap(build_runner));
1526    cmd.arg(sysroot_remap(build_runner, unit));
1527
1528    Ok(())
1529}
1530
1531/// Path prefix remap rules for sysroot.
1532///
1533/// This remap logic aligns with rustc:
1534/// <https://github.com/rust-lang/rust/blob/c2ef3516/src/bootstrap/src/lib.rs#L1113-L1116>
1535fn sysroot_remap(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> OsString {
1536    let mut remap = OsString::from("--remap-path-prefix=");
1537    remap.push({
1538        // See also `detect_sysroot_src_path()`.
1539        let mut sysroot = build_runner.bcx.target_data.info(unit.kind).sysroot.clone();
1540        sysroot.push("lib");
1541        sysroot.push("rustlib");
1542        sysroot.push("src");
1543        sysroot.push("rust");
1544        sysroot
1545    });
1546    remap.push("=");
1547    remap.push("/rustc/");
1548    if let Some(commit_hash) = build_runner.bcx.rustc().commit_hash.as_ref() {
1549        remap.push(commit_hash);
1550    } else {
1551        remap.push(build_runner.bcx.rustc().version.to_string());
1552    }
1553    remap
1554}
1555
1556/// Path prefix remap rules for dependencies.
1557///
1558/// * Git dependencies: remove `~/.cargo/git/checkouts` prefix.
1559/// * Registry dependencies: remove `~/.cargo/registry/src` prefix.
1560/// * Others (e.g. path dependencies):
1561///     * relative paths to workspace root if inside the workspace directory.
1562///     * otherwise remapped to `<pkg>-<version>`.
1563fn package_remap(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> OsString {
1564    let pkg_root = unit.pkg.root();
1565    let ws_root = build_runner.bcx.ws.root();
1566    let mut remap = OsString::from("--remap-path-prefix=");
1567    let source_id = unit.pkg.package_id().source_id();
1568    if source_id.is_git() {
1569        remap.push(
1570            build_runner
1571                .bcx
1572                .gctx
1573                .git_checkouts_path()
1574                .as_path_unlocked(),
1575        );
1576        remap.push("=");
1577    } else if source_id.is_registry() {
1578        remap.push(
1579            build_runner
1580                .bcx
1581                .gctx
1582                .registry_source_path()
1583                .as_path_unlocked(),
1584        );
1585        remap.push("=");
1586    } else if pkg_root.strip_prefix(ws_root).is_ok() {
1587        remap.push(ws_root);
1588        remap.push("=."); // remap to relative rustc work dir explicitly
1589    } else {
1590        remap.push(pkg_root);
1591        remap.push("=");
1592        remap.push(unit.pkg.name());
1593        remap.push("-");
1594        remap.push(unit.pkg.version().to_string());
1595    }
1596    remap
1597}
1598
1599/// Remap all paths pointing to `build.build-dir`,
1600/// i.e., `[BUILD_DIR]/debug/deps/foo-[HASH].dwo` would be remapped to
1601/// `/cargo/build-dir/debug/deps/foo-[HASH].dwo`
1602/// (note the `/cargo/build-dir` prefix).
1603///
1604/// This covers scenarios like:
1605///
1606/// * Build script generated code. For example, a build script may call `file!`
1607///   macros, and the associated crate uses [`include!`] to include the expanded
1608///   [`file!`] macro in-place via the `OUT_DIR` environment.
1609/// * On Linux, `DW_AT_GNU_dwo_name` that contains paths to split debuginfo
1610///   files (dwp and dwo).
1611fn build_dir_remap(build_runner: &BuildRunner<'_, '_>) -> OsString {
1612    let build_dir = build_runner.bcx.ws.build_dir();
1613    let mut remap = OsString::from("--remap-path-prefix=");
1614    remap.push(build_dir.as_path_unlocked());
1615    remap.push("=/cargo/build-dir");
1616    remap
1617}
1618
1619/// Generates the `--check-cfg` arguments for the `unit`.
1620fn check_cfg_args(unit: &Unit) -> Vec<OsString> {
1621    // The routine below generates the --check-cfg arguments. Our goals here are to
1622    // enable the checking of conditionals and pass the list of declared features.
1623    //
1624    // In the simplified case, it would resemble something like this:
1625    //
1626    //   --check-cfg=cfg() --check-cfg=cfg(feature, values(...))
1627    //
1628    // but having `cfg()` is redundant with the second argument (as well-known names
1629    // and values are implicitly enabled when one or more `--check-cfg` argument is
1630    // passed) so we don't emit it and just pass:
1631    //
1632    //   --check-cfg=cfg(feature, values(...))
1633    //
1634    // This way, even if there are no declared features, the config `feature` will
1635    // still be expected, meaning users would get "unexpected value" instead of name.
1636    // This wasn't always the case, see rust-lang#119930 for some details.
1637
1638    let gross_cap_estimation = unit.pkg.summary().features().len() * 7 + 25;
1639    let mut arg_feature = OsString::with_capacity(gross_cap_estimation);
1640
1641    arg_feature.push("cfg(feature, values(");
1642    for (i, feature) in unit.pkg.summary().features().keys().enumerate() {
1643        if i != 0 {
1644            arg_feature.push(", ");
1645        }
1646        arg_feature.push("\"");
1647        arg_feature.push(feature);
1648        arg_feature.push("\"");
1649    }
1650    arg_feature.push("))");
1651
1652    // In addition to the package features, we also include the `test` cfg (since
1653    // compiler-team#785, as to be able to someday apply it conditionally), as well
1654    // the `docsrs` cfg from the docs.rs service.
1655    //
1656    // We include `docsrs` here (in Cargo) instead of rustc, since there is a much closer
1657    // relationship between Cargo and docs.rs than rustc and docs.rs. In particular, all
1658    // users of docs.rs use Cargo, but not all users of rustc (like Rust-for-Linux) use docs.rs.
1659
1660    vec![
1661        OsString::from("--check-cfg"),
1662        OsString::from("cfg(docsrs,test)"),
1663        OsString::from("--check-cfg"),
1664        arg_feature,
1665    ]
1666}
1667
1668/// Adds LTO related codegen flags.
1669fn lto_args(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> Vec<OsString> {
1670    let mut result = Vec::new();
1671    let mut push = |arg: &str| {
1672        result.push(OsString::from("-C"));
1673        result.push(OsString::from(arg));
1674    };
1675    match build_runner.lto[unit] {
1676        lto::Lto::Run(None) => push("lto"),
1677        lto::Lto::Run(Some(s)) => push(&format!("lto={}", s)),
1678        lto::Lto::Off => {
1679            push("lto=off");
1680            push("embed-bitcode=no");
1681        }
1682        lto::Lto::ObjectAndBitcode => {} // this is rustc's default
1683        lto::Lto::OnlyBitcode => push("linker-plugin-lto"),
1684        lto::Lto::OnlyObject => push("embed-bitcode=no"),
1685    }
1686    result
1687}
1688
1689/// Adds dependency-relevant rustc flags and environment variables
1690/// to the command to execute, such as [`-L`] and [`--extern`].
1691///
1692/// [`-L`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#-l-add-a-directory-to-the-library-search-path
1693/// [`--extern`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#--extern-specify-where-an-external-library-is-located
1694fn build_deps_args(
1695    cmd: &mut ProcessBuilder,
1696    build_runner: &BuildRunner<'_, '_>,
1697    unit: &Unit,
1698) -> CargoResult<()> {
1699    let bcx = build_runner.bcx;
1700
1701    for arg in lib_search_paths(build_runner, unit)? {
1702        cmd.arg(arg);
1703    }
1704
1705    let deps = build_runner.unit_deps(unit);
1706
1707    // If there is not one linkable target but should, rustc fails later
1708    // on if there is an `extern crate` for it. This may turn into a hard
1709    // error in the future (see PR #4797).
1710    if !deps
1711        .iter()
1712        .any(|dep| !dep.unit.mode.is_doc() && dep.unit.target.is_linkable())
1713    {
1714        if let Some(dep) = deps.iter().find(|dep| {
1715            !dep.unit.mode.is_doc() && dep.unit.target.is_lib() && !dep.unit.artifact.is_true()
1716        }) {
1717            let dep_name = dep.unit.target.crate_name();
1718            let name = unit.target.crate_name();
1719            bcx.gctx.shell().print_report(&[
1720                Level::WARNING.secondary_title(format!("the package `{dep_name}` provides no linkable target"))
1721                    .elements([
1722                        Level::NOTE.message(format!("this might cause `{name}` to fail compilation")),
1723                        Level::NOTE.message("this warning might turn into a hard error in the future"),
1724                        Level::HELP.message(format!("consider adding 'dylib' or 'rlib' to key 'crate-type' in `{dep_name}`'s Cargo.toml"))
1725                    ])
1726            ], false)?;
1727        }
1728    }
1729
1730    let mut unstable_opts = false;
1731
1732    // Add `OUT_DIR` environment variables for build scripts
1733    let first_custom_build_dep = deps.iter().find(|dep| dep.unit.mode.is_run_custom_build());
1734    if let Some(dep) = first_custom_build_dep {
1735        let out_dir = if bcx.gctx.cli_unstable().build_dir_new_layout {
1736            build_runner.files().out_dir_new_layout(&dep.unit)
1737        } else {
1738            build_runner.files().build_script_out_dir(&dep.unit)
1739        };
1740        cmd.env("OUT_DIR", &out_dir);
1741    }
1742
1743    // Adding output directory for each build script
1744    let is_multiple_build_scripts_enabled = unit
1745        .pkg
1746        .manifest()
1747        .unstable_features()
1748        .require(Feature::multiple_build_scripts())
1749        .is_ok();
1750
1751    if is_multiple_build_scripts_enabled {
1752        for dep in deps {
1753            if dep.unit.mode.is_run_custom_build() {
1754                let out_dir = if bcx.gctx.cli_unstable().build_dir_new_layout {
1755                    build_runner.files().out_dir_new_layout(&dep.unit)
1756                } else {
1757                    build_runner.files().build_script_out_dir(&dep.unit)
1758                };
1759                let target_name = dep.unit.target.name();
1760                let out_dir_prefix = target_name
1761                    .strip_prefix("build-script-")
1762                    .unwrap_or(target_name);
1763                let out_dir_name = format!("{out_dir_prefix}_OUT_DIR");
1764                cmd.env(&out_dir_name, &out_dir);
1765            }
1766        }
1767    }
1768    for arg in extern_args(build_runner, unit, &mut unstable_opts)? {
1769        cmd.arg(arg);
1770    }
1771
1772    for (var, env) in artifact::get_env(build_runner, unit, deps)? {
1773        cmd.env(&var, env);
1774    }
1775
1776    // This will only be set if we're already using a feature
1777    // requiring nightly rust
1778    if unstable_opts {
1779        cmd.arg("-Z").arg("unstable-options");
1780    }
1781
1782    Ok(())
1783}
1784
1785fn add_dep_arg<'a, 'b: 'a>(
1786    map: &mut BTreeMap<&'a Unit, PathBuf>,
1787    build_runner: &'b BuildRunner<'b, '_>,
1788    unit: &'a Unit,
1789) {
1790    if map.contains_key(&unit) {
1791        return;
1792    }
1793    map.insert(&unit, build_runner.files().deps_dir(&unit));
1794
1795    for dep in build_runner.unit_deps(unit) {
1796        add_dep_arg(map, build_runner, &dep.unit);
1797    }
1798}
1799
1800/// Adds extra rustc flags and environment variables collected from the output
1801/// of a build-script to the command to execute, include custom environment
1802/// variables and `cfg`.
1803fn add_custom_flags(
1804    cmd: &mut ProcessBuilder,
1805    build_script_outputs: &BuildScriptOutputs,
1806    metadata_vec: Option<Vec<UnitHash>>,
1807) -> CargoResult<()> {
1808    if let Some(metadata_vec) = metadata_vec {
1809        for metadata in metadata_vec {
1810            if let Some(output) = build_script_outputs.get(metadata) {
1811                for cfg in output.cfgs.iter() {
1812                    cmd.arg("--cfg").arg(cfg);
1813                }
1814                for check_cfg in &output.check_cfgs {
1815                    cmd.arg("--check-cfg").arg(check_cfg);
1816                }
1817                for (name, value) in output.env.iter() {
1818                    cmd.env(name, value);
1819                }
1820            }
1821        }
1822    }
1823
1824    Ok(())
1825}
1826
1827/// Generate a list of `-L` arguments
1828pub fn lib_search_paths(
1829    build_runner: &BuildRunner<'_, '_>,
1830    unit: &Unit,
1831) -> CargoResult<Vec<OsString>> {
1832    let mut lib_search_paths = Vec::new();
1833    if build_runner.bcx.gctx.cli_unstable().build_dir_new_layout {
1834        let mut map = BTreeMap::new();
1835
1836        // Recursively add all dependency args to rustc process
1837        add_dep_arg(&mut map, build_runner, unit);
1838
1839        let paths = map.into_iter().map(|(_, path)| path).sorted_unstable();
1840
1841        for path in paths {
1842            let mut deps = OsString::from("dependency=");
1843            deps.push(path);
1844            lib_search_paths.extend(["-L".into(), deps]);
1845        }
1846    } else {
1847        let mut deps = OsString::from("dependency=");
1848        deps.push(build_runner.files().deps_dir(unit));
1849        lib_search_paths.extend(["-L".into(), deps]);
1850    }
1851
1852    // Be sure that the host path is also listed. This'll ensure that proc macro
1853    // dependencies are correctly found (for reexported macros).
1854    if !unit.kind.is_host() {
1855        let mut deps = OsString::from("dependency=");
1856        deps.push(build_runner.files().host_deps(unit));
1857        lib_search_paths.extend(["-L".into(), deps]);
1858    }
1859
1860    Ok(lib_search_paths)
1861}
1862
1863/// Generates a list of `--extern` arguments.
1864pub fn extern_args(
1865    build_runner: &BuildRunner<'_, '_>,
1866    unit: &Unit,
1867    unstable_opts: &mut bool,
1868) -> CargoResult<Vec<OsString>> {
1869    let mut result = Vec::new();
1870    let deps = build_runner.unit_deps(unit);
1871
1872    let no_embed_metadata = build_runner.bcx.gctx.cli_unstable().no_embed_metadata;
1873
1874    // Closure to add one dependency to `result`.
1875    let mut link_to =
1876        |dep: &UnitDep, extern_crate_name: InternedString, noprelude: bool| -> CargoResult<()> {
1877            let mut value = OsString::new();
1878            let mut opts = Vec::new();
1879            let is_public_dependency_enabled = unit
1880                .pkg
1881                .manifest()
1882                .unstable_features()
1883                .require(Feature::public_dependency())
1884                .is_ok()
1885                || build_runner.bcx.gctx.cli_unstable().public_dependency;
1886            if !dep.public && unit.target.is_lib() && is_public_dependency_enabled {
1887                opts.push("priv");
1888                *unstable_opts = true;
1889            }
1890            if noprelude {
1891                opts.push("noprelude");
1892                *unstable_opts = true;
1893            }
1894            if !opts.is_empty() {
1895                value.push(opts.join(","));
1896                value.push(":");
1897            }
1898            value.push(extern_crate_name.as_str());
1899            value.push("=");
1900
1901            let mut pass = |file| {
1902                let mut value = value.clone();
1903                value.push(file);
1904                result.push(OsString::from("--extern"));
1905                result.push(value);
1906            };
1907
1908            let outputs = build_runner.outputs(&dep.unit)?;
1909
1910            if build_runner.only_requires_rmeta(unit, &dep.unit) || dep.unit.mode.is_check() {
1911                // Example: rlib dependency for an rlib, rmeta is all that is required.
1912                let output = outputs
1913                    .iter()
1914                    .find(|output| output.flavor == FileFlavor::Rmeta)
1915                    .expect("failed to find rmeta dep for pipelined dep");
1916                pass(&output.path);
1917            } else {
1918                // Example: a bin needs `rlib` for dependencies, it cannot use rmeta.
1919                for output in outputs.iter() {
1920                    if output.flavor == FileFlavor::Linkable {
1921                        pass(&output.path);
1922                    }
1923                    // If we use -Zembed-metadata=no, we also need to pass the path to the
1924                    // corresponding .rmeta file to the linkable artifact, because the
1925                    // normal dependency (rlib) doesn't contain the full metadata.
1926                    else if no_embed_metadata && output.flavor == FileFlavor::Rmeta {
1927                        pass(&output.path);
1928                    }
1929                }
1930            }
1931            Ok(())
1932        };
1933
1934    for dep in deps {
1935        if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() {
1936            link_to(dep, dep.extern_crate_name, dep.noprelude)?;
1937        }
1938    }
1939    if unit.target.proc_macro() {
1940        // Automatically import `proc_macro`.
1941        result.push(OsString::from("--extern"));
1942        result.push(OsString::from("proc_macro"));
1943    }
1944
1945    Ok(result)
1946}
1947
1948/// Adds `-C linker=<path>` if specified.
1949fn add_codegen_linker(
1950    cmd: &mut ProcessBuilder,
1951    build_runner: &BuildRunner<'_, '_>,
1952    unit: &Unit,
1953    target_applies_to_host: bool,
1954) {
1955    let linker = if unit.target.for_host() && !target_applies_to_host {
1956        build_runner
1957            .compilation
1958            .host_linker()
1959            .map(|s| s.as_os_str())
1960    } else {
1961        build_runner
1962            .compilation
1963            .target_linker(unit.kind)
1964            .map(|s| s.as_os_str())
1965    };
1966
1967    if let Some(linker) = linker {
1968        let mut arg = OsString::from("linker=");
1969        arg.push(linker);
1970        cmd.arg("-C").arg(arg);
1971    }
1972}
1973
1974/// Adds `-C incremental=<path>`.
1975fn add_codegen_incremental(
1976    cmd: &mut ProcessBuilder,
1977    build_runner: &BuildRunner<'_, '_>,
1978    unit: &Unit,
1979) {
1980    let dir = build_runner.files().incremental_dir(&unit);
1981    let mut arg = OsString::from("incremental=");
1982    arg.push(dir.as_os_str());
1983    cmd.arg("-C").arg(arg);
1984}
1985
1986fn envify(s: &str) -> String {
1987    s.chars()
1988        .flat_map(|c| c.to_uppercase())
1989        .map(|c| if c == '-' { '_' } else { c })
1990        .collect()
1991}
1992
1993/// Configuration of the display of messages emitted by the compiler,
1994/// e.g. diagnostics, warnings, errors, and message caching.
1995struct OutputOptions {
1996    /// What format we're emitting from Cargo itself.
1997    format: MessageFormat,
1998    /// Where to write the JSON messages to support playback later if the unit
1999    /// is fresh. The file is created lazily so that in the normal case, lots
2000    /// of empty files are not created. If this is None, the output will not
2001    /// be cached (such as when replaying cached messages).
2002    cache_cell: Option<(PathBuf, OnceCell<File>)>,
2003    /// If `true`, display any diagnostics.
2004    /// Other types of JSON messages are processed regardless
2005    /// of the value of this flag.
2006    ///
2007    /// This is used primarily for cache replay. If you build with `-vv`, the
2008    /// cache will be filled with diagnostics from dependencies. When the
2009    /// cache is replayed without `-vv`, we don't want to show them.
2010    show_diagnostics: bool,
2011    /// Tracks the number of warnings we've seen so far.
2012    warnings_seen: usize,
2013    /// Tracks the number of errors we've seen so far.
2014    errors_seen: usize,
2015}
2016
2017impl OutputOptions {
2018    fn new(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> OutputOptions {
2019        let path = build_runner.files().message_cache_path(unit);
2020        // Remove old cache, ignore ENOENT, which is the common case.
2021        drop(fs::remove_file(&path));
2022        let cache_cell = Some((path, OnceCell::new()));
2023        let show_diagnostics =
2024            build_runner.bcx.gctx.warning_handling().unwrap_or_default() != WarningHandling::Allow;
2025        OutputOptions {
2026            format: build_runner.bcx.build_config.message_format,
2027            cache_cell,
2028            show_diagnostics,
2029            warnings_seen: 0,
2030            errors_seen: 0,
2031        }
2032    }
2033}
2034
2035/// Cloned and sendable context about the manifest file.
2036///
2037/// Sometimes we enrich rustc's errors with some locations in the manifest file; this
2038/// contains a `Send`-able copy of the manifest information that we need for the
2039/// enriched errors.
2040struct ManifestErrorContext {
2041    /// The path to the manifest.
2042    path: PathBuf,
2043    /// The locations of various spans within the manifest.
2044    spans: Option<toml::Spanned<toml::de::DeTable<'static>>>,
2045    /// The raw manifest contents.
2046    contents: Option<String>,
2047    /// A lookup for all the unambiguous renamings, mapping from the original package
2048    /// name to the renamed one.
2049    rename_table: HashMap<InternedString, InternedString>,
2050    /// A list of targets we're compiling for, to determine which of the `[target.<something>.dependencies]`
2051    /// tables might be of interest.
2052    requested_kinds: Vec<CompileKind>,
2053    /// A list of all the collections of cfg values, one collection for each target, to determine
2054    /// which of the `[target.'cfg(...)'.dependencies]` tables might be of interest.
2055    cfgs: Vec<Vec<Cfg>>,
2056    host_name: InternedString,
2057    /// Cargo's working directory (for printing out a more friendly manifest path).
2058    cwd: PathBuf,
2059    /// Terminal width for formatting diagnostics.
2060    term_width: usize,
2061}
2062
2063fn on_stdout_line(
2064    state: &JobState<'_, '_>,
2065    line: &str,
2066    _package_id: PackageId,
2067    _target: &Target,
2068) -> CargoResult<()> {
2069    state.stdout(line.to_string())?;
2070    Ok(())
2071}
2072
2073fn on_stderr_line(
2074    state: &JobState<'_, '_>,
2075    line: &str,
2076    package_id: PackageId,
2077    manifest: &ManifestErrorContext,
2078    target: &Target,
2079    options: &mut OutputOptions,
2080) -> CargoResult<()> {
2081    if on_stderr_line_inner(state, line, package_id, manifest, target, options)? {
2082        // Check if caching is enabled.
2083        if let Some((path, cell)) = &mut options.cache_cell {
2084            // Cache the output, which will be replayed later when Fresh.
2085            let f = cell.try_borrow_mut_with(|| paths::create(path))?;
2086            debug_assert!(!line.contains('\n'));
2087            f.write_all(line.as_bytes())?;
2088            f.write_all(&[b'\n'])?;
2089        }
2090    }
2091    Ok(())
2092}
2093
2094/// Returns true if the line should be cached.
2095fn on_stderr_line_inner(
2096    state: &JobState<'_, '_>,
2097    line: &str,
2098    package_id: PackageId,
2099    manifest: &ManifestErrorContext,
2100    target: &Target,
2101    options: &mut OutputOptions,
2102) -> CargoResult<bool> {
2103    // We primarily want to use this function to process JSON messages from
2104    // rustc. The compiler should always print one JSON message per line, and
2105    // otherwise it may have other output intermingled (think RUST_LOG or
2106    // something like that), so skip over everything that doesn't look like a
2107    // JSON message.
2108    if !line.starts_with('{') {
2109        state.stderr(line.to_string())?;
2110        return Ok(true);
2111    }
2112
2113    let mut compiler_message: Box<serde_json::value::RawValue> = match serde_json::from_str(line) {
2114        Ok(msg) => msg,
2115
2116        // If the compiler produced a line that started with `{` but it wasn't
2117        // valid JSON, maybe it wasn't JSON in the first place! Forward it along
2118        // to stderr.
2119        Err(e) => {
2120            debug!("failed to parse json: {:?}", e);
2121            state.stderr(line.to_string())?;
2122            return Ok(true);
2123        }
2124    };
2125
2126    let count_diagnostic = |level, options: &mut OutputOptions| {
2127        if level == "warning" {
2128            options.warnings_seen += 1;
2129        } else if level == "error" {
2130            options.errors_seen += 1;
2131        }
2132    };
2133
2134    if let Ok(report) = serde_json::from_str::<FutureIncompatReport>(compiler_message.get()) {
2135        for item in &report.future_incompat_report {
2136            count_diagnostic(&*item.diagnostic.level, options);
2137        }
2138        state.future_incompat_report(report.future_incompat_report);
2139        return Ok(true);
2140    }
2141
2142    let res = serde_json::from_str::<SectionTiming>(compiler_message.get());
2143    if let Ok(timing_record) = res {
2144        state.on_section_timing_emitted(timing_record);
2145        return Ok(false);
2146    }
2147
2148    // Returns `true` if the diagnostic was modified.
2149    let add_pub_in_priv_diagnostic = |diag: &mut String| -> bool {
2150        // We are parsing the compiler diagnostic here, as this information isn't
2151        // currently exposed elsewhere.
2152        // At the time of writing this comment, rustc emits two different
2153        // "exported_private_dependencies" errors:
2154        //  - type `FromPriv` from private dependency 'priv_dep' in public interface
2155        //  - struct `FromPriv` from private dependency 'priv_dep' is re-exported
2156        // This regex matches them both. To see if it needs to be updated, grep the rust
2157        // source for "EXPORTED_PRIVATE_DEPENDENCIES".
2158        static PRIV_DEP_REGEX: LazyLock<Regex> =
2159            LazyLock::new(|| Regex::new("from private dependency '([A-Za-z0-9-_]+)'").unwrap());
2160        if let Some(crate_name) = PRIV_DEP_REGEX.captures(diag).and_then(|m| m.get(1))
2161            && let Some(ref contents) = manifest.contents
2162            && let Some(span) = manifest.find_crate_span(crate_name.as_str())
2163        {
2164            let rel_path = pathdiff::diff_paths(&manifest.path, &manifest.cwd)
2165                .unwrap_or_else(|| manifest.path.clone())
2166                .display()
2167                .to_string();
2168            let report = [Group::with_title(Level::NOTE.secondary_title(format!(
2169                "dependency `{}` declared here",
2170                crate_name.as_str()
2171            )))
2172            .element(
2173                Snippet::source(contents)
2174                    .path(rel_path)
2175                    .annotation(AnnotationKind::Context.span(span)),
2176            )];
2177
2178            let rendered = Renderer::styled()
2179                .term_width(manifest.term_width)
2180                .render(&report);
2181            diag.push_str(&rendered);
2182            diag.push('\n');
2183            return true;
2184        }
2185        false
2186    };
2187
2188    // Depending on what we're emitting from Cargo itself, we figure out what to
2189    // do with this JSON message.
2190    match options.format {
2191        // In the "human" output formats (human/short) or if diagnostic messages
2192        // from rustc aren't being included in the output of Cargo's JSON
2193        // messages then we extract the diagnostic (if present) here and handle
2194        // it ourselves.
2195        MessageFormat::Human
2196        | MessageFormat::Short
2197        | MessageFormat::Json {
2198            render_diagnostics: true,
2199            ..
2200        } => {
2201            #[derive(serde::Deserialize)]
2202            struct CompilerMessage<'a> {
2203                // `rendered` contains escape sequences, which can't be
2204                // zero-copy deserialized by serde_json.
2205                // See https://github.com/serde-rs/json/issues/742
2206                rendered: String,
2207                #[serde(borrow)]
2208                message: Cow<'a, str>,
2209                #[serde(borrow)]
2210                level: Cow<'a, str>,
2211                children: Vec<PartialDiagnostic>,
2212                code: Option<DiagnosticCode>,
2213            }
2214
2215            // A partial rustfix::diagnostics::Diagnostic. We deserialize only a
2216            // subset of the fields because rustc's output can be extremely
2217            // deeply nested JSON in pathological cases involving macro
2218            // expansion. Rustfix's Diagnostic struct is recursive containing a
2219            // field `children: Vec<Self>`, and it can cause deserialization to
2220            // hit serde_json's default recursion limit, or overflow the stack
2221            // if we turn that off. Cargo only cares about the 1 field listed
2222            // here.
2223            #[derive(serde::Deserialize)]
2224            struct PartialDiagnostic {
2225                spans: Vec<PartialDiagnosticSpan>,
2226            }
2227
2228            // A partial rustfix::diagnostics::DiagnosticSpan.
2229            #[derive(serde::Deserialize)]
2230            struct PartialDiagnosticSpan {
2231                suggestion_applicability: Option<Applicability>,
2232            }
2233
2234            #[derive(serde::Deserialize)]
2235            struct DiagnosticCode {
2236                code: String,
2237            }
2238
2239            if let Ok(mut msg) = serde_json::from_str::<CompilerMessage<'_>>(compiler_message.get())
2240            {
2241                if msg.message.starts_with("aborting due to")
2242                    || msg.message.ends_with("warning emitted")
2243                    || msg.message.ends_with("warnings emitted")
2244                {
2245                    // Skip this line; we'll print our own summary at the end.
2246                    return Ok(true);
2247                }
2248                // state.stderr will add a newline
2249                if msg.rendered.ends_with('\n') {
2250                    msg.rendered.pop();
2251                }
2252                let mut rendered = msg.rendered;
2253                if options.show_diagnostics {
2254                    let machine_applicable: bool = msg
2255                        .children
2256                        .iter()
2257                        .map(|child| {
2258                            child
2259                                .spans
2260                                .iter()
2261                                .filter_map(|span| span.suggestion_applicability)
2262                                .any(|app| app == Applicability::MachineApplicable)
2263                        })
2264                        .any(|b| b);
2265                    count_diagnostic(&msg.level, options);
2266                    if msg
2267                        .code
2268                        .as_ref()
2269                        .is_some_and(|c| c.code == "exported_private_dependencies")
2270                        && options.format != MessageFormat::Short
2271                    {
2272                        add_pub_in_priv_diagnostic(&mut rendered);
2273                    }
2274                    let lint = msg.code.is_some();
2275                    state.emit_diag(&msg.level, rendered, lint, machine_applicable)?;
2276                }
2277                return Ok(true);
2278            }
2279        }
2280
2281        MessageFormat::Json { ansi, .. } => {
2282            #[derive(serde::Deserialize, serde::Serialize)]
2283            struct CompilerMessage<'a> {
2284                rendered: String,
2285                #[serde(flatten, borrow)]
2286                other: std::collections::BTreeMap<Cow<'a, str>, serde_json::Value>,
2287                code: Option<DiagnosticCode<'a>>,
2288            }
2289
2290            #[derive(serde::Deserialize, serde::Serialize)]
2291            struct DiagnosticCode<'a> {
2292                code: String,
2293                #[serde(flatten, borrow)]
2294                other: std::collections::BTreeMap<Cow<'a, str>, serde_json::Value>,
2295            }
2296
2297            if let Ok(mut error) =
2298                serde_json::from_str::<CompilerMessage<'_>>(compiler_message.get())
2299            {
2300                let modified_diag = if error
2301                    .code
2302                    .as_ref()
2303                    .is_some_and(|c| c.code == "exported_private_dependencies")
2304                {
2305                    add_pub_in_priv_diagnostic(&mut error.rendered)
2306                } else {
2307                    false
2308                };
2309
2310                // Remove color information from the rendered string if color is not
2311                // enabled. Cargo always asks for ANSI colors from rustc. This allows
2312                // cached replay to enable/disable colors without re-invoking rustc.
2313                if !ansi {
2314                    error.rendered = anstream::adapter::strip_str(&error.rendered).to_string();
2315                }
2316                if !ansi || modified_diag {
2317                    let new_line = serde_json::to_string(&error)?;
2318                    compiler_message = serde_json::value::RawValue::from_string(new_line)?;
2319                }
2320            }
2321        }
2322    }
2323
2324    // We always tell rustc to emit messages about artifacts being produced.
2325    // These messages feed into pipelined compilation, as well as timing
2326    // information.
2327    //
2328    // Look for a matching directive and inform Cargo internally that a
2329    // metadata file has been produced.
2330    #[derive(serde::Deserialize)]
2331    struct ArtifactNotification<'a> {
2332        #[serde(borrow)]
2333        artifact: Cow<'a, str>,
2334    }
2335
2336    if let Ok(artifact) = serde_json::from_str::<ArtifactNotification<'_>>(compiler_message.get()) {
2337        trace!("found directive from rustc: `{}`", artifact.artifact);
2338        if artifact.artifact.ends_with(".rmeta") {
2339            debug!("looks like metadata finished early!");
2340            state.rmeta_produced();
2341        }
2342        return Ok(false);
2343    }
2344
2345    // And failing all that above we should have a legitimate JSON diagnostic
2346    // from the compiler, so wrap it in an external Cargo JSON message
2347    // indicating which package it came from and then emit it.
2348
2349    if !options.show_diagnostics {
2350        return Ok(true);
2351    }
2352
2353    #[derive(serde::Deserialize)]
2354    struct CompilerMessage<'a> {
2355        #[serde(borrow)]
2356        message: Cow<'a, str>,
2357        #[serde(borrow)]
2358        level: Cow<'a, str>,
2359    }
2360
2361    if let Ok(msg) = serde_json::from_str::<CompilerMessage<'_>>(compiler_message.get()) {
2362        if msg.message.starts_with("aborting due to")
2363            || msg.message.ends_with("warning emitted")
2364            || msg.message.ends_with("warnings emitted")
2365        {
2366            // Skip this line; we'll print our own summary at the end.
2367            return Ok(true);
2368        }
2369        count_diagnostic(&msg.level, options);
2370    }
2371
2372    let msg = machine_message::FromCompiler {
2373        package_id: package_id.to_spec(),
2374        manifest_path: &manifest.path,
2375        target,
2376        message: compiler_message,
2377    }
2378    .to_json_string();
2379
2380    // Switch json lines from rustc/rustdoc that appear on stderr to stdout
2381    // instead. We want the stdout of Cargo to always be machine parseable as
2382    // stderr has our colorized human-readable messages.
2383    state.stdout(msg)?;
2384    Ok(true)
2385}
2386
2387impl ManifestErrorContext {
2388    fn new(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> ManifestErrorContext {
2389        let mut duplicates = HashSet::new();
2390        let mut rename_table = HashMap::new();
2391
2392        for dep in build_runner.unit_deps(unit) {
2393            let unrenamed_id = dep.unit.pkg.package_id().name();
2394            if duplicates.contains(&unrenamed_id) {
2395                continue;
2396            }
2397            match rename_table.entry(unrenamed_id) {
2398                std::collections::hash_map::Entry::Occupied(occ) => {
2399                    occ.remove_entry();
2400                    duplicates.insert(unrenamed_id);
2401                }
2402                std::collections::hash_map::Entry::Vacant(vac) => {
2403                    vac.insert(dep.extern_crate_name);
2404                }
2405            }
2406        }
2407
2408        let bcx = build_runner.bcx;
2409        ManifestErrorContext {
2410            path: unit.pkg.manifest_path().to_owned(),
2411            spans: unit.pkg.manifest().document().cloned(),
2412            contents: unit.pkg.manifest().contents().map(String::from),
2413            requested_kinds: bcx.target_data.requested_kinds().to_owned(),
2414            host_name: bcx.rustc().host,
2415            rename_table,
2416            cwd: path_args(build_runner.bcx.ws, unit).1,
2417            cfgs: bcx
2418                .target_data
2419                .requested_kinds()
2420                .iter()
2421                .map(|k| bcx.target_data.cfg(*k).to_owned())
2422                .collect(),
2423            term_width: bcx
2424                .gctx
2425                .shell()
2426                .err_width()
2427                .diagnostic_terminal_width()
2428                .unwrap_or(annotate_snippets::renderer::DEFAULT_TERM_WIDTH),
2429        }
2430    }
2431
2432    fn requested_target_names(&self) -> impl Iterator<Item = &str> {
2433        self.requested_kinds.iter().map(|kind| match kind {
2434            CompileKind::Host => &self.host_name,
2435            CompileKind::Target(target) => target.short_name(),
2436        })
2437    }
2438
2439    /// Find a span for the dependency that specifies this unrenamed crate, if it's unique.
2440    ///
2441    /// rustc diagnostics (at least for public-in-private) mention the un-renamed
2442    /// crate: if you have `foo = { package = "bar" }`, the rustc diagnostic will
2443    /// say "bar".
2444    ///
2445    /// This function does its best to find a span for "bar", but it could fail if
2446    /// there are multiple candidates:
2447    ///
2448    /// ```toml
2449    /// foo = { package = "bar" }
2450    /// baz = { path = "../bar", package = "bar" }
2451    /// ```
2452    fn find_crate_span(&self, unrenamed: &str) -> Option<Range<usize>> {
2453        let Some(ref spans) = self.spans else {
2454            return None;
2455        };
2456
2457        let orig_name = self.rename_table.get(unrenamed)?.as_str();
2458
2459        if let Some((k, v)) = get_key_value(&spans, &["dependencies", orig_name]) {
2460            // We make some effort to find the unrenamed text: in
2461            //
2462            // ```
2463            // foo = { package = "bar" }
2464            // ```
2465            //
2466            // we try to find the "bar", but fall back to "foo" if we can't (which might
2467            // happen if the renaming took place in the workspace, for example).
2468            if let Some(package) = v.get_ref().as_table().and_then(|t| t.get("package")) {
2469                return Some(package.span());
2470            } else {
2471                return Some(k.span());
2472            }
2473        }
2474
2475        // The dependency could also be in a target-specific table, like
2476        // [target.x86_64-unknown-linux-gnu.dependencies] or
2477        // [target.'cfg(something)'.dependencies]. We filter out target tables
2478        // that don't match a requested target or a requested cfg.
2479        if let Some(target) = spans
2480            .as_ref()
2481            .get("target")
2482            .and_then(|t| t.as_ref().as_table())
2483        {
2484            for (platform, platform_table) in target.iter() {
2485                match platform.as_ref().parse::<Platform>() {
2486                    Ok(Platform::Name(name)) => {
2487                        if !self.requested_target_names().any(|n| n == name) {
2488                            continue;
2489                        }
2490                    }
2491                    Ok(Platform::Cfg(cfg_expr)) => {
2492                        if !self.cfgs.iter().any(|cfgs| cfg_expr.matches(cfgs)) {
2493                            continue;
2494                        }
2495                    }
2496                    Err(_) => continue,
2497                }
2498
2499                let Some(platform_table) = platform_table.as_ref().as_table() else {
2500                    continue;
2501                };
2502
2503                if let Some(deps) = platform_table
2504                    .get("dependencies")
2505                    .and_then(|d| d.as_ref().as_table())
2506                {
2507                    if let Some((k, v)) = deps.get_key_value(orig_name) {
2508                        if let Some(package) = v.get_ref().as_table().and_then(|t| t.get("package"))
2509                        {
2510                            return Some(package.span());
2511                        } else {
2512                            return Some(k.span());
2513                        }
2514                    }
2515                }
2516            }
2517        }
2518        None
2519    }
2520}
2521
2522/// Creates a unit of work that replays the cached compiler message.
2523///
2524/// Usually used when a job is fresh and doesn't need to recompile.
2525fn replay_output_cache(
2526    package_id: PackageId,
2527    manifest: ManifestErrorContext,
2528    target: &Target,
2529    path: PathBuf,
2530    format: MessageFormat,
2531    show_diagnostics: bool,
2532) -> Work {
2533    let target = target.clone();
2534    let mut options = OutputOptions {
2535        format,
2536        cache_cell: None,
2537        show_diagnostics,
2538        warnings_seen: 0,
2539        errors_seen: 0,
2540    };
2541    Work::new(move |state| {
2542        if !path.exists() {
2543            // No cached output, probably didn't emit anything.
2544            return Ok(());
2545        }
2546        // We sometimes have gigabytes of output from the compiler, so avoid
2547        // loading it all into memory at once, as that can cause OOM where
2548        // otherwise there would be none.
2549        let file = paths::open(&path)?;
2550        let mut reader = std::io::BufReader::new(file);
2551        let mut line = String::new();
2552        loop {
2553            let length = reader.read_line(&mut line)?;
2554            if length == 0 {
2555                break;
2556            }
2557            let trimmed = line.trim_end_matches(&['\n', '\r'][..]);
2558            on_stderr_line(state, trimmed, package_id, &manifest, &target, &mut options)?;
2559            line.clear();
2560        }
2561        Ok(())
2562    })
2563}
2564
2565/// Provides a package name with descriptive target information,
2566/// e.g., '`foo` (bin "bar" test)', '`foo` (lib doctest)'.
2567fn descriptive_pkg_name(name: &str, target: &Target, mode: &CompileMode) -> String {
2568    let desc_name = target.description_named();
2569    let mode = if mode.is_rustc_test() && !(target.is_test() || target.is_bench()) {
2570        " test"
2571    } else if mode.is_doc_test() {
2572        " doctest"
2573    } else if mode.is_doc() {
2574        " doc"
2575    } else {
2576        ""
2577    };
2578    format!("`{name}` ({desc_name}{mode})")
2579}
2580
2581/// Applies environment variables from config `[env]` to [`ProcessBuilder`].
2582pub(crate) fn apply_env_config(
2583    gctx: &crate::GlobalContext,
2584    cmd: &mut ProcessBuilder,
2585) -> CargoResult<()> {
2586    for (key, value) in gctx.env_config()?.iter() {
2587        // never override a value that has already been set by cargo
2588        if cmd.get_envs().contains_key(key) {
2589            continue;
2590        }
2591        cmd.env(key, value);
2592    }
2593    Ok(())
2594}
2595
2596/// Checks if there are some scrape units waiting to be processed.
2597fn should_include_scrape_units(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool {
2598    unit.mode.is_doc() && bcx.scrape_units.len() > 0 && bcx.ws.unit_needs_doc_scrape(unit)
2599}
2600
2601/// Gets the file path of function call information output from `rustdoc`.
2602fn scrape_output_path(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult<PathBuf> {
2603    assert!(unit.mode.is_doc() || unit.mode.is_doc_scrape());
2604    build_runner
2605        .outputs(unit)
2606        .map(|outputs| outputs[0].path.clone())
2607}
2608
2609/// Gets the dep-info file emitted by rustdoc.
2610fn rustdoc_dep_info_loc(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> PathBuf {
2611    let mut loc = build_runner.files().fingerprint_file_path(unit, "");
2612    loc.set_extension("d");
2613    loc
2614}