cargo/ops/cargo_compile/
mod.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
//! The entry point for starting the compilation process for commands like
//! `build`, `test`, `doc`, `rustc`, etc.
//!
//! The [`compile`] function will do all the work to compile a workspace. A
//! rough outline is:
//!
//! 1. Resolve the dependency graph (see [`ops::resolve`]).
//! 2. Download any packages needed (see [`PackageSet`]).
//! 3. Generate a list of top-level "units" of work for the targets the user
//!   requested on the command-line. Each [`Unit`] corresponds to a compiler
//!   invocation. This is done in this module ([`UnitGenerator::generate_root_units`]).
//! 4. Starting from the root [`Unit`]s, generate the [`UnitGraph`] by walking the dependency graph
//!   from the resolver.  See also [`unit_dependencies`].
//! 5. Construct the [`BuildContext`] with all of the information collected so
//!   far. This is the end of the "front end" of compilation.
//! 6. Create a [`BuildRunner`] which coordinates the compilation process
//!   and will perform the following steps:
//!     1. Prepare the `target` directory (see [`Layout`]).
//!     2. Create a [`JobQueue`]. The queue checks the
//!       fingerprint of each `Unit` to determine if it should run or be
//!       skipped.
//!     3. Execute the queue via [`drain_the_queue`]. Each leaf in the queue's dependency graph is
//!        executed, and then removed from the graph when finished. This repeats until the queue is
//!        empty.  Note that this is the only point in cargo that currently uses threads.
//! 7. The result of the compilation is stored in the [`Compilation`] struct. This can be used for
//!    various things, such as running tests after the compilation  has finished.
//!
//! **Note**: "target" inside this module generally refers to ["Cargo Target"],
//! which corresponds to artifact that will be built in a package. Not to be
//! confused with target-triple or target architecture.
//!
//! [`unit_dependencies`]: crate::core::compiler::unit_dependencies
//! [`Layout`]: crate::core::compiler::Layout
//! [`JobQueue`]: crate::core::compiler::job_queue
//! [`drain_the_queue`]: crate::core::compiler::job_queue
//! ["Cargo Target"]: https://doc.rust-lang.org/nightly/cargo/reference/cargo-targets.html

use std::collections::{HashMap, HashSet};
use std::hash::{Hash, Hasher};
use std::sync::Arc;

use crate::core::compiler::unit_dependencies::build_unit_dependencies;
use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph};
use crate::core::compiler::{standard_lib, CrateType, TargetInfo};
use crate::core::compiler::{BuildConfig, BuildContext, BuildRunner, Compilation};
use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit};
use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner};
use crate::core::profiles::Profiles;
use crate::core::resolver::features::{self, CliFeatures, FeaturesFor};
use crate::core::resolver::{HasDevUnits, Resolve};
use crate::core::{PackageId, PackageSet, SourceId, TargetKind, Workspace};
use crate::drop_println;
use crate::ops;
use crate::ops::resolve::WorkspaceResolve;
use crate::util::context::GlobalContext;
use crate::util::interning::InternedString;
use crate::util::{CargoResult, StableHasher};

mod compile_filter;
pub use compile_filter::{CompileFilter, FilterRule, LibRule};

mod unit_generator;
use unit_generator::UnitGenerator;

mod packages;

pub use packages::Packages;

/// Contains information about how a package should be compiled.
///
/// Note on distinction between `CompileOptions` and [`BuildConfig`]:
/// `BuildConfig` contains values that need to be retained after
/// [`BuildContext`] is created. The other fields are no longer necessary. Think
/// of it as `CompileOptions` are high-level settings requested on the
/// command-line, and `BuildConfig` are low-level settings for actually
/// driving `rustc`.
#[derive(Debug, Clone)]
pub struct CompileOptions {
    /// Configuration information for a rustc build
    pub build_config: BuildConfig,
    /// Feature flags requested by the user.
    pub cli_features: CliFeatures,
    /// A set of packages to build.
    pub spec: Packages,
    /// Filter to apply to the root package to select which targets will be
    /// built.
    pub filter: CompileFilter,
    /// Extra arguments to be passed to rustdoc (single target only)
    pub target_rustdoc_args: Option<Vec<String>>,
    /// The specified target will be compiled with all the available arguments,
    /// note that this only accounts for the *final* invocation of rustc
    pub target_rustc_args: Option<Vec<String>>,
    /// Crate types to be passed to rustc (single target only)
    pub target_rustc_crate_types: Option<Vec<String>>,
    /// Whether the `--document-private-items` flags was specified and should
    /// be forwarded to `rustdoc`.
    pub rustdoc_document_private_items: bool,
    /// Whether the build process should check the minimum Rust version
    /// defined in the cargo metadata for a crate.
    pub honor_rust_version: Option<bool>,
}

impl CompileOptions {
    pub fn new(gctx: &GlobalContext, mode: CompileMode) -> CargoResult<CompileOptions> {
        let jobs = None;
        let keep_going = false;
        Ok(CompileOptions {
            build_config: BuildConfig::new(gctx, jobs, keep_going, &[], mode)?,
            cli_features: CliFeatures::new_all(false),
            spec: ops::Packages::Packages(Vec::new()),
            filter: CompileFilter::Default {
                required_features_filterable: false,
            },
            target_rustdoc_args: None,
            target_rustc_args: None,
            target_rustc_crate_types: None,
            rustdoc_document_private_items: false,
            honor_rust_version: None,
        })
    }
}

/// Compiles!
///
/// This uses the [`DefaultExecutor`]. To use a custom [`Executor`], see [`compile_with_exec`].
pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult<Compilation<'a>> {
    let exec: Arc<dyn Executor> = Arc::new(DefaultExecutor);
    compile_with_exec(ws, options, &exec)
}

/// Like [`compile`] but allows specifying a custom [`Executor`]
/// that will be able to intercept build calls and add custom logic.
///
/// [`compile`] uses [`DefaultExecutor`] which just passes calls through.
pub fn compile_with_exec<'a>(
    ws: &Workspace<'a>,
    options: &CompileOptions,
    exec: &Arc<dyn Executor>,
) -> CargoResult<Compilation<'a>> {
    ws.emit_warnings()?;
    compile_ws(ws, options, exec)
}

/// Like [`compile_with_exec`] but without warnings from manifest parsing.
#[tracing::instrument(skip_all)]
pub fn compile_ws<'a>(
    ws: &Workspace<'a>,
    options: &CompileOptions,
    exec: &Arc<dyn Executor>,
) -> CargoResult<Compilation<'a>> {
    let interner = UnitInterner::new();
    let bcx = create_bcx(ws, options, &interner)?;
    if options.build_config.unit_graph {
        unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.gctx())?;
        return Compilation::new(&bcx);
    }
    crate::core::gc::auto_gc(bcx.gctx);
    let build_runner = BuildRunner::new(&bcx)?;
    if options.build_config.dry_run {
        build_runner.dry_run()
    } else {
        build_runner.compile(exec)
    }
}

/// Executes `rustc --print <VALUE>`.
///
/// * `print_opt_value` is the VALUE passed through.
pub fn print<'a>(
    ws: &Workspace<'a>,
    options: &CompileOptions,
    print_opt_value: &str,
) -> CargoResult<()> {
    let CompileOptions {
        ref build_config,
        ref target_rustc_args,
        ..
    } = *options;
    let gctx = ws.gctx();
    let rustc = gctx.load_global_rustc(Some(ws))?;
    for (index, kind) in build_config.requested_kinds.iter().enumerate() {
        if index != 0 {
            drop_println!(gctx);
        }
        let target_info = TargetInfo::new(gctx, &build_config.requested_kinds, &rustc, *kind)?;
        let mut process = rustc.process();
        process.args(&target_info.rustflags);
        if let Some(args) = target_rustc_args {
            process.args(args);
        }
        if let CompileKind::Target(t) = kind {
            process.arg("--target").arg(t.rustc_target());
        }
        process.arg("--print").arg(print_opt_value);
        process.exec()?;
    }
    Ok(())
}

/// Prepares all required information for the actual compilation.
///
/// For how it works and what data it collects,
/// please see the [module-level documentation](self).
#[tracing::instrument(skip_all)]
pub fn create_bcx<'a, 'gctx>(
    ws: &'a Workspace<'gctx>,
    options: &'a CompileOptions,
    interner: &'a UnitInterner,
) -> CargoResult<BuildContext<'a, 'gctx>> {
    let CompileOptions {
        ref build_config,
        ref spec,
        ref cli_features,
        ref filter,
        ref target_rustdoc_args,
        ref target_rustc_args,
        ref target_rustc_crate_types,
        rustdoc_document_private_items,
        honor_rust_version,
    } = *options;
    let gctx = ws.gctx();

    // Perform some pre-flight validation.
    match build_config.mode {
        CompileMode::Test
        | CompileMode::Build
        | CompileMode::Check { .. }
        | CompileMode::Bench
        | CompileMode::RunCustomBuild => {
            if ws.gctx().get_env("RUST_FLAGS").is_ok() {
                gctx.shell().warn(
                    "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?",
                )?;
            }
        }
        CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::Docscrape => {
            if ws.gctx().get_env("RUSTDOC_FLAGS").is_ok() {
                gctx.shell().warn(
                    "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?"
                )?;
            }
        }
    }
    gctx.validate_term_config()?;

    let mut target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?;

    let specs = spec.to_package_id_specs(ws)?;
    let has_dev_units = {
        // Rustdoc itself doesn't need dev-dependencies. But to scrape examples from packages in the
        // workspace, if any of those packages need dev-dependencies, then we need include dev-dependencies
        // to scrape those packages.
        let any_pkg_has_scrape_enabled = ws
            .members_with_features(&specs, cli_features)?
            .iter()
            .any(|(pkg, _)| {
                pkg.targets()
                    .iter()
                    .any(|target| target.is_example() && target.doc_scrape_examples().is_enabled())
            });

        if filter.need_dev_deps(build_config.mode)
            || (build_config.mode.is_doc() && any_pkg_has_scrape_enabled)
        {
            HasDevUnits::Yes
        } else {
            HasDevUnits::No
        }
    };
    let dry_run = false;
    let resolve = ops::resolve_ws_with_opts(
        ws,
        &mut target_data,
        &build_config.requested_kinds,
        cli_features,
        &specs,
        has_dev_units,
        crate::core::resolver::features::ForceAllTargets::No,
        dry_run,
    )?;
    let WorkspaceResolve {
        mut pkg_set,
        workspace_resolve,
        targeted_resolve: resolve,
        resolved_features,
    } = resolve;

    let std_resolve_features = if let Some(crates) = &gctx.cli_unstable().build_std {
        let (std_package_set, std_resolve, std_features) =
            standard_lib::resolve_std(ws, &mut target_data, &build_config, crates)?;
        pkg_set.add_set(std_package_set);
        Some((std_resolve, std_features))
    } else {
        None
    };

    // Find the packages in the resolver that the user wants to build (those
    // passed in with `-p` or the defaults from the workspace), and convert
    // Vec<PackageIdSpec> to a Vec<PackageId>.
    let to_build_ids = resolve.specs_to_ids(&specs)?;
    // Now get the `Package` for each `PackageId`. This may trigger a download
    // if the user specified `-p` for a dependency that is not downloaded.
    // Dependencies will be downloaded during build_unit_dependencies.
    let mut to_builds = pkg_set.get_many(to_build_ids)?;

    // The ordering here affects some error messages coming out of cargo, so
    // let's be test and CLI friendly by always printing in the same order if
    // there's an error.
    to_builds.sort_by_key(|p| p.package_id());

    for pkg in to_builds.iter() {
        pkg.manifest().print_teapot(gctx);

        if build_config.mode.is_any_test()
            && !ws.is_member(pkg)
            && pkg.dependencies().iter().any(|dep| !dep.is_transitive())
        {
            anyhow::bail!(
                "package `{}` cannot be tested because it requires dev-dependencies \
                 and is not a member of the workspace",
                pkg.name()
            );
        }
    }

    let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
        (Some(args), _) => (Some(args.clone()), "rustc"),
        (_, Some(args)) => (Some(args.clone()), "rustdoc"),
        _ => (None, ""),
    };

    if extra_args.is_some() && to_builds.len() != 1 {
        panic!(
            "`{}` should not accept multiple `-p` flags",
            extra_args_name
        );
    }

    let profiles = Profiles::new(ws, build_config.requested_profile)?;
    profiles.validate_packages(
        ws.profiles(),
        &mut gctx.shell(),
        workspace_resolve.as_ref().unwrap_or(&resolve),
    )?;

    // If `--target` has not been specified, then the unit graph is built
    // assuming `--target $HOST` was specified. See
    // `rebuild_unit_graph_shared` for more on why this is done.
    let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?);
    let explicit_host_kinds: Vec<_> = build_config
        .requested_kinds
        .iter()
        .map(|kind| match kind {
            CompileKind::Host => explicit_host_kind,
            CompileKind::Target(t) => CompileKind::Target(*t),
        })
        .collect();

    // Passing `build_config.requested_kinds` instead of
    // `explicit_host_kinds` here so that `generate_root_units` can do
    // its own special handling of `CompileKind::Host`. It will
    // internally replace the host kind by the `explicit_host_kind`
    // before setting as a unit.
    let generator = UnitGenerator {
        ws,
        packages: &to_builds,
        target_data: &target_data,
        filter,
        requested_kinds: &build_config.requested_kinds,
        explicit_host_kind,
        mode: build_config.mode,
        resolve: &resolve,
        workspace_resolve: &workspace_resolve,
        resolved_features: &resolved_features,
        package_set: &pkg_set,
        profiles: &profiles,
        interner,
        has_dev_units,
    };
    let mut units = generator.generate_root_units()?;

    if let Some(args) = target_rustc_crate_types {
        override_rustc_crate_types(&mut units, args, interner)?;
    }

    let should_scrape = build_config.mode.is_doc() && gctx.cli_unstable().rustdoc_scrape_examples;
    let mut scrape_units = if should_scrape {
        UnitGenerator {
            mode: CompileMode::Docscrape,
            ..generator
        }
        .generate_scrape_units(&units)?
    } else {
        Vec::new()
    };

    let std_roots = if let Some(crates) = standard_lib::std_crates(gctx, Some(&units)) {
        let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap();
        standard_lib::generate_std_roots(
            &crates,
            std_resolve,
            std_features,
            &explicit_host_kinds,
            &pkg_set,
            interner,
            &profiles,
            &target_data,
        )?
    } else {
        Default::default()
    };

    let mut unit_graph = build_unit_dependencies(
        ws,
        &pkg_set,
        &resolve,
        &resolved_features,
        std_resolve_features.as_ref(),
        &units,
        &scrape_units,
        &std_roots,
        build_config.mode,
        &target_data,
        &profiles,
        interner,
    )?;

    // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain
    // what heuristics to use in that case.
    if matches!(build_config.mode, CompileMode::Doc { deps: true, .. }) {
        remove_duplicate_doc(build_config, &units, &mut unit_graph);
    }

    let host_kind_requested = build_config
        .requested_kinds
        .iter()
        .any(CompileKind::is_host);
    // Rebuild the unit graph, replacing the explicit host targets with
    // CompileKind::Host, removing `artifact_target_for_features` and merging any dependencies
    // shared with build and artifact dependencies.
    (units, scrape_units, unit_graph) = rebuild_unit_graph_shared(
        interner,
        unit_graph,
        &units,
        &scrape_units,
        host_kind_requested.then_some(explicit_host_kind),
    );

    let mut extra_compiler_args = HashMap::new();
    if let Some(args) = extra_args {
        if units.len() != 1 {
            anyhow::bail!(
                "extra arguments to `{}` can only be passed to one \
                 target, consider filtering\nthe package by passing, \
                 e.g., `--lib` or `--bin NAME` to specify a single target",
                extra_args_name
            );
        }
        extra_compiler_args.insert(units[0].clone(), args);
    }

    for unit in units
        .iter()
        .filter(|unit| unit.mode.is_doc() || unit.mode.is_doc_test())
        .filter(|unit| rustdoc_document_private_items || unit.target.is_bin())
    {
        // Add `--document-private-items` rustdoc flag if requested or if
        // the target is a binary. Binary crates get their private items
        // documented by default.
        let mut args = vec!["--document-private-items".into()];
        if unit.target.is_bin() {
            // This warning only makes sense if it's possible to document private items
            // sometimes and ignore them at other times. But cargo consistently passes
            // `--document-private-items`, so the warning isn't useful.
            args.push("-Arustdoc::private-intra-doc-links".into());
        }
        extra_compiler_args
            .entry(unit.clone())
            .or_default()
            .extend(args);
    }

    if honor_rust_version.unwrap_or(true) {
        let rustc_version = target_data.rustc.version.clone().into();

        let mut incompatible = Vec::new();
        let mut local_incompatible = false;
        for unit in unit_graph.keys() {
            let Some(pkg_msrv) = unit.pkg.rust_version() else {
                continue;
            };

            if pkg_msrv.is_compatible_with(&rustc_version) {
                continue;
            }

            local_incompatible |= unit.is_local();
            incompatible.push((unit, pkg_msrv));
        }
        if !incompatible.is_empty() {
            use std::fmt::Write as _;

            let plural = if incompatible.len() == 1 { "" } else { "s" };
            let mut message = format!(
                "rustc {rustc_version} is not supported by the following package{plural}:\n"
            );
            incompatible.sort_by_key(|(unit, _)| (unit.pkg.name(), unit.pkg.version()));
            for (unit, msrv) in incompatible {
                let name = &unit.pkg.name();
                let version = &unit.pkg.version();
                writeln!(&mut message, "  {name}@{version} requires rustc {msrv}").unwrap();
            }
            if ws.is_ephemeral() {
                if ws.ignore_lock() {
                    writeln!(
                        &mut message,
                        "Try re-running `cargo install` with `--locked`"
                    )
                    .unwrap();
                }
            } else if !local_incompatible {
                writeln!(
                    &mut message,
                    "Either upgrade rustc or select compatible dependency versions with
`cargo update <name>@<current-ver> --precise <compatible-ver>`
where `<compatible-ver>` is the latest version supporting rustc {rustc_version}",
                )
                .unwrap();
            }
            return Err(anyhow::Error::msg(message));
        }
    }

    let bcx = BuildContext::new(
        ws,
        pkg_set,
        build_config,
        profiles,
        extra_compiler_args,
        target_data,
        units,
        unit_graph,
        scrape_units,
    )?;

    Ok(bcx)
}

/// This is used to rebuild the unit graph, sharing host dependencies if possible,
/// and applying other unit adjustments based on the whole graph.
///
/// This will translate any unit's `CompileKind::Target(host)` to
/// `CompileKind::Host` if `to_host` is not `None` and the kind is equal to `to_host`.
/// This also handles generating the unit `dep_hash`, and merging shared units if possible.
///
/// This is necessary because if normal dependencies used `CompileKind::Host`,
/// there would be no way to distinguish those units from build-dependency
/// units or artifact dependency units.
/// This can cause a problem if a shared normal/build/artifact dependency needs
/// to link to another dependency whose features differ based on whether or
/// not it is a normal, build or artifact dependency. If all units used
/// `CompileKind::Host`, then they would end up being identical, causing a
/// collision in the `UnitGraph`, and Cargo would end up randomly choosing one
/// value or the other.
///
/// The solution is to keep normal, build and artifact dependencies separate when
/// building the unit graph, and then run this second pass which will try to
/// combine shared dependencies safely. By adding a hash of the dependencies
/// to the `Unit`, this allows the `CompileKind` to be changed back to `Host`
/// and `artifact_target_for_features` to be removed without fear of an unwanted
/// collision for build or artifact dependencies.
///
/// This is also responsible for adjusting the `strip` profile option to
/// opportunistically strip if debug is 0 for all dependencies. This helps
/// remove debuginfo added by the standard library.
///
/// This is also responsible for adjusting the `debug` setting for host
/// dependencies, turning off debug if the user has not explicitly enabled it,
/// and the unit is not shared with a target unit.
fn rebuild_unit_graph_shared(
    interner: &UnitInterner,
    unit_graph: UnitGraph,
    roots: &[Unit],
    scrape_units: &[Unit],
    to_host: Option<CompileKind>,
) -> (Vec<Unit>, Vec<Unit>, UnitGraph) {
    let mut result = UnitGraph::new();
    // Map of the old unit to the new unit, used to avoid recursing into units
    // that have already been computed to improve performance.
    let mut memo = HashMap::new();
    let new_roots = roots
        .iter()
        .map(|root| {
            traverse_and_share(
                interner,
                &mut memo,
                &mut result,
                &unit_graph,
                root,
                false,
                to_host,
            )
        })
        .collect();
    // If no unit in the unit graph ended up having scrape units attached as dependencies,
    // then they won't have been discovered in traverse_and_share and hence won't be in
    // memo. So we filter out missing scrape units.
    let new_scrape_units = scrape_units
        .iter()
        .map(|unit| memo.get(unit).unwrap().clone())
        .collect();
    (new_roots, new_scrape_units, result)
}

/// Recursive function for rebuilding the graph.
///
/// This walks `unit_graph`, starting at the given `unit`. It inserts the new
/// units into `new_graph`, and returns a new updated version of the given
/// unit (`dep_hash` is filled in, and `kind` switched if necessary).
fn traverse_and_share(
    interner: &UnitInterner,
    memo: &mut HashMap<Unit, Unit>,
    new_graph: &mut UnitGraph,
    unit_graph: &UnitGraph,
    unit: &Unit,
    unit_is_for_host: bool,
    to_host: Option<CompileKind>,
) -> Unit {
    if let Some(new_unit) = memo.get(unit) {
        // Already computed, no need to recompute.
        return new_unit.clone();
    }
    let mut dep_hash = StableHasher::new();
    let new_deps: Vec<_> = unit_graph[unit]
        .iter()
        .map(|dep| {
            let new_dep_unit = traverse_and_share(
                interner,
                memo,
                new_graph,
                unit_graph,
                &dep.unit,
                dep.unit_for.is_for_host(),
                to_host,
            );
            new_dep_unit.hash(&mut dep_hash);
            UnitDep {
                unit: new_dep_unit,
                ..dep.clone()
            }
        })
        .collect();
    // Here, we have recursively traversed this unit's dependencies, and hashed them: we can
    // finalize the dep hash.
    let new_dep_hash = dep_hash.finish();

    // This is the key part of the sharing process: if the unit is a runtime dependency, whose
    // target is the same as the host, we canonicalize the compile kind to `CompileKind::Host`.
    // A possible host dependency counterpart to this unit would have that kind, and if such a unit
    // exists in the current `unit_graph`, they will unify in the new unit graph map `new_graph`.
    // The resulting unit graph will be optimized with less units, thanks to sharing these host
    // dependencies.
    let canonical_kind = match to_host {
        Some(to_host) if to_host == unit.kind => CompileKind::Host,
        _ => unit.kind,
    };

    let mut profile = unit.profile.clone();
    if profile.strip.is_deferred() {
        // If strip was not manually set, and all dependencies of this unit together
        // with this unit have debuginfo turned off, we enable debuginfo stripping.
        // This will remove pre-existing debug symbols coming from the standard library.
        if !profile.debuginfo.is_turned_on()
            && new_deps
                .iter()
                .all(|dep| !dep.unit.profile.debuginfo.is_turned_on())
        {
            profile.strip = profile.strip.strip_debuginfo();
        }
    }

    // If this is a build dependency, and it's not shared with runtime dependencies, we can weaken
    // its debuginfo level to optimize build times. We do nothing if it's an artifact dependency,
    // as it and its debuginfo may end up embedded in the main program.
    if unit_is_for_host
        && to_host.is_some()
        && profile.debuginfo.is_deferred()
        && !unit.artifact.is_true()
    {
        // We create a "probe" test to see if a unit with the same explicit debuginfo level exists
        // in the graph. This is the level we'd expect if it was set manually or the default value
        // set by a profile for a runtime dependency: its canonical value.
        let canonical_debuginfo = profile.debuginfo.finalize();
        let mut canonical_profile = profile.clone();
        canonical_profile.debuginfo = canonical_debuginfo;
        let unit_probe = interner.intern(
            &unit.pkg,
            &unit.target,
            canonical_profile,
            to_host.unwrap(),
            unit.mode,
            unit.features.clone(),
            unit.rustflags.clone(),
            unit.rustdocflags.clone(),
            unit.links_overrides.clone(),
            unit.is_std,
            unit.dep_hash,
            unit.artifact,
            unit.artifact_target_for_features,
        );

        // We can now turn the deferred value into its actual final value.
        profile.debuginfo = if unit_graph.contains_key(&unit_probe) {
            // The unit is present in both build time and runtime subgraphs: we canonicalize its
            // level to the other unit's, thus ensuring reuse between the two to optimize build times.
            canonical_debuginfo
        } else {
            // The unit is only present in the build time subgraph, we can weaken its debuginfo
            // level to optimize build times.
            canonical_debuginfo.weaken()
        }
    }

    let new_unit = interner.intern(
        &unit.pkg,
        &unit.target,
        profile,
        canonical_kind,
        unit.mode,
        unit.features.clone(),
        unit.rustflags.clone(),
        unit.rustdocflags.clone(),
        unit.links_overrides.clone(),
        unit.is_std,
        new_dep_hash,
        unit.artifact,
        // Since `dep_hash` is now filled in, there's no need to specify the artifact target
        // for target-dependent feature resolution
        None,
    );
    assert!(memo.insert(unit.clone(), new_unit.clone()).is_none());
    new_graph.entry(new_unit.clone()).or_insert(new_deps);
    new_unit
}

/// Removes duplicate CompileMode::Doc units that would cause problems with
/// filename collisions.
///
/// Rustdoc only separates units by crate name in the file directory
/// structure. If any two units with the same crate name exist, this would
/// cause a filename collision, causing different rustdoc invocations to stomp
/// on one another's files.
///
/// Unfortunately this does not remove all duplicates, as some of them are
/// either user error, or difficult to remove. Cases that I can think of:
///
/// - Same target name in different packages. See the `collision_doc` test.
/// - Different sources. See `collision_doc_sources` test.
///
/// Ideally this would not be necessary.
fn remove_duplicate_doc(
    build_config: &BuildConfig,
    root_units: &[Unit],
    unit_graph: &mut UnitGraph,
) {
    // First, create a mapping of crate_name -> Unit so we can see where the
    // duplicates are.
    let mut all_docs: HashMap<String, Vec<Unit>> = HashMap::new();
    for unit in unit_graph.keys() {
        if unit.mode.is_doc() {
            all_docs
                .entry(unit.target.crate_name())
                .or_default()
                .push(unit.clone());
        }
    }
    // Keep track of units to remove so that they can be efficiently removed
    // from the unit_deps.
    let mut removed_units: HashSet<Unit> = HashSet::new();
    let mut remove = |units: Vec<Unit>, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec<Unit> {
        let (to_remove, remaining_units): (Vec<Unit>, Vec<Unit>) = units
            .into_iter()
            .partition(|unit| cb(unit) && !root_units.contains(unit));
        for unit in to_remove {
            tracing::debug!(
                "removing duplicate doc due to {} for package {} target `{}`",
                reason,
                unit.pkg,
                unit.target.name()
            );
            unit_graph.remove(&unit);
            removed_units.insert(unit);
        }
        remaining_units
    };
    // Iterate over the duplicates and try to remove them from unit_graph.
    for (_crate_name, mut units) in all_docs {
        if units.len() == 1 {
            continue;
        }
        // Prefer target over host if --target was not specified.
        if build_config
            .requested_kinds
            .iter()
            .all(CompileKind::is_host)
        {
            // Note these duplicates may not be real duplicates, since they
            // might get merged in rebuild_unit_graph_shared. Either way, it
            // shouldn't hurt to remove them early (although the report in the
            // log might be confusing).
            units = remove(units, "host/target merger", &|unit| unit.kind.is_host());
            if units.len() == 1 {
                continue;
            }
        }
        // Prefer newer versions over older.
        let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec<Unit>> =
            HashMap::new();
        for unit in units {
            let pkg_id = unit.pkg.package_id();
            // Note, this does not detect duplicates from different sources.
            source_map
                .entry((pkg_id.name(), pkg_id.source_id(), unit.kind))
                .or_default()
                .push(unit);
        }
        let mut remaining_units = Vec::new();
        for (_key, mut units) in source_map {
            if units.len() > 1 {
                units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap());
                // Remove any entries with version < newest.
                let newest_version = units.last().unwrap().pkg.version().clone();
                let keep_units = remove(units, "older version", &|unit| {
                    unit.pkg.version() < &newest_version
                });
                remaining_units.extend(keep_units);
            } else {
                remaining_units.extend(units);
            }
        }
        if remaining_units.len() == 1 {
            continue;
        }
        // Are there other heuristics to remove duplicates that would make
        // sense? Maybe prefer path sources over all others?
    }
    // Also remove units from the unit_deps so there aren't any dangling edges.
    for unit_deps in unit_graph.values_mut() {
        unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit));
    }
    // Remove any orphan units that were detached from the graph.
    let mut visited = HashSet::new();
    fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet<Unit>) {
        if !visited.insert(unit.clone()) {
            return;
        }
        for dep in &graph[unit] {
            visit(&dep.unit, graph, visited);
        }
    }
    for unit in root_units {
        visit(unit, unit_graph, &mut visited);
    }
    unit_graph.retain(|unit, _| visited.contains(unit));
}

/// Override crate types for given units.
///
/// This is primarily used by `cargo rustc --crate-type`.
fn override_rustc_crate_types(
    units: &mut [Unit],
    args: &[String],
    interner: &UnitInterner,
) -> CargoResult<()> {
    if units.len() != 1 {
        anyhow::bail!(
            "crate types to rustc can only be passed to one \
            target, consider filtering\nthe package by passing, \
            e.g., `--lib` or `--example` to specify a single target"
        );
    }

    let unit = &units[0];
    let override_unit = |f: fn(Vec<CrateType>) -> TargetKind| {
        let crate_types = args.iter().map(|s| s.into()).collect();
        let mut target = unit.target.clone();
        target.set_kind(f(crate_types));
        interner.intern(
            &unit.pkg,
            &target,
            unit.profile.clone(),
            unit.kind,
            unit.mode,
            unit.features.clone(),
            unit.rustflags.clone(),
            unit.rustdocflags.clone(),
            unit.links_overrides.clone(),
            unit.is_std,
            unit.dep_hash,
            unit.artifact,
            unit.artifact_target_for_features,
        )
    };
    units[0] = match unit.target.kind() {
        TargetKind::Lib(_) => override_unit(TargetKind::Lib),
        TargetKind::ExampleLib(_) => override_unit(TargetKind::ExampleLib),
        _ => {
            anyhow::bail!(
                "crate types can only be specified for libraries and example libraries.\n\
                Binaries, tests, and benchmarks are always the `bin` crate type"
            );
        }
    };

    Ok(())
}

/// Gets all of the features enabled for a package, plus its dependencies'
/// features.
///
/// Dependencies are added as `dep_name/feat_name` because `required-features`
/// wants to support that syntax.
pub fn resolve_all_features(
    resolve_with_overrides: &Resolve,
    resolved_features: &features::ResolvedFeatures,
    package_set: &PackageSet<'_>,
    package_id: PackageId,
) -> HashSet<String> {
    let mut features: HashSet<String> = resolved_features
        .activated_features(package_id, FeaturesFor::NormalOrDev)
        .iter()
        .map(|s| s.to_string())
        .collect();

    // Include features enabled for use by dependencies so targets can also use them with the
    // required-features field when deciding whether to be built or skipped.
    for (dep_id, deps) in resolve_with_overrides.deps(package_id) {
        let is_proc_macro = package_set
            .get_one(dep_id)
            .expect("packages downloaded")
            .proc_macro();
        for dep in deps {
            let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build());
            for feature in resolved_features
                .activated_features_unverified(dep_id, features_for)
                .unwrap_or_default()
            {
                features.insert(format!("{}/{}", dep.name_in_toml(), feature));
            }
        }
    }

    features
}