bootstrap/core/build_steps/
perf.rs

1use std::fmt::{Display, Formatter};
2
3use crate::core::build_steps::compile::{Std, Sysroot};
4use crate::core::build_steps::tool::{RustcPerf, Rustdoc};
5use crate::core::builder::Builder;
6use crate::core::config::DebuginfoLevel;
7use crate::utils::exec::{BootstrapCommand, command};
8
9#[derive(Debug, Clone, clap::Parser)]
10pub struct PerfArgs {
11    #[clap(subcommand)]
12    cmd: PerfCommand,
13}
14
15#[derive(Debug, Clone, clap::Parser)]
16enum PerfCommand {
17    /// Run `profile_local eprintln`.
18    /// This executes the compiler on the given benchmarks and stores its stderr output.
19    Eprintln {
20        #[clap(flatten)]
21        opts: SharedOpts,
22    },
23    /// Run `profile_local samply`
24    /// This executes the compiler on the given benchmarks and profiles it with `samply`.
25    /// You need to install `samply`, e.g. using `cargo install samply`.
26    Samply {
27        #[clap(flatten)]
28        opts: SharedOpts,
29    },
30    /// Run `profile_local cachegrind`.
31    /// This executes the compiler on the given benchmarks under `Cachegrind`.
32    Cachegrind {
33        #[clap(flatten)]
34        opts: SharedOpts,
35    },
36    /// Run compile benchmarks with a locally built compiler.
37    Benchmark {
38        /// Identifier to associate benchmark results with
39        #[clap(name = "benchmark-id")]
40        id: String,
41
42        #[clap(flatten)]
43        opts: SharedOpts,
44    },
45    /// Compare the results of two previously executed benchmark runs.
46    Compare {
47        /// The name of the base artifact to be compared.
48        base: String,
49
50        /// The name of the modified artifact to be compared.
51        modified: String,
52    },
53}
54
55impl PerfCommand {
56    fn shared_opts(&self) -> Option<&SharedOpts> {
57        match self {
58            PerfCommand::Eprintln { opts, .. }
59            | PerfCommand::Samply { opts, .. }
60            | PerfCommand::Cachegrind { opts, .. }
61            | PerfCommand::Benchmark { opts, .. } => Some(opts),
62            PerfCommand::Compare { .. } => None,
63        }
64    }
65}
66
67#[derive(Debug, Clone, clap::Parser)]
68struct SharedOpts {
69    /// Select the benchmarks that you want to run (separated by commas).
70    /// If unspecified, all benchmarks will be executed.
71    #[clap(long, global = true, value_delimiter = ',')]
72    include: Vec<String>,
73
74    /// Select the benchmarks matching a prefix in this comma-separated list that you don't want to run.
75    #[clap(long, global = true, value_delimiter = ',')]
76    exclude: Vec<String>,
77
78    /// Select the scenarios that should be benchmarked.
79    #[clap(
80        long,
81        global = true,
82        value_delimiter = ',',
83        default_value = "Full,IncrFull,IncrUnchanged,IncrPatched"
84    )]
85    scenarios: Vec<Scenario>,
86    /// Select the profiles that should be benchmarked.
87    #[clap(long, global = true, value_delimiter = ',', default_value = "Check,Debug,Opt")]
88    profiles: Vec<Profile>,
89}
90
91#[derive(Clone, Copy, Debug, PartialEq, clap::ValueEnum)]
92#[value(rename_all = "PascalCase")]
93pub enum Profile {
94    Check,
95    Debug,
96    Doc,
97    Opt,
98    Clippy,
99}
100
101impl Display for Profile {
102    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
103        let name = match self {
104            Profile::Check => "Check",
105            Profile::Debug => "Debug",
106            Profile::Doc => "Doc",
107            Profile::Opt => "Opt",
108            Profile::Clippy => "Clippy",
109        };
110        f.write_str(name)
111    }
112}
113
114#[derive(Clone, Copy, Debug, clap::ValueEnum)]
115#[value(rename_all = "PascalCase")]
116pub enum Scenario {
117    Full,
118    IncrFull,
119    IncrUnchanged,
120    IncrPatched,
121}
122
123impl Display for Scenario {
124    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
125        let name = match self {
126            Scenario::Full => "Full",
127            Scenario::IncrFull => "IncrFull",
128            Scenario::IncrUnchanged => "IncrUnchanged",
129            Scenario::IncrPatched => "IncrPatched",
130        };
131        f.write_str(name)
132    }
133}
134
135/// Performs profiling using `rustc-perf` on a built version of the compiler.
136pub fn perf(builder: &Builder<'_>, args: &PerfArgs) {
137    let collector = builder.ensure(RustcPerf {
138        compiler: builder.compiler(0, builder.config.build),
139        target: builder.config.build,
140    });
141
142    let is_profiling = match &args.cmd {
143        PerfCommand::Eprintln { .. }
144        | PerfCommand::Samply { .. }
145        | PerfCommand::Cachegrind { .. } => true,
146        PerfCommand::Benchmark { .. } | PerfCommand::Compare { .. } => false,
147    };
148    if is_profiling && builder.build.config.rust_debuginfo_level_rustc == DebuginfoLevel::None {
149        builder.info(r#"WARNING: You are compiling rustc without debuginfo, this will make profiling less useful.
150Consider setting `rust.debuginfo-level = 1` in `config.toml`."#);
151    }
152
153    let compiler = builder.compiler(builder.top_stage, builder.config.build);
154    builder.ensure(Std::new(compiler, builder.config.build));
155
156    if let Some(opts) = args.cmd.shared_opts() {
157        if opts.profiles.contains(&Profile::Doc) {
158            builder.ensure(Rustdoc { compiler });
159        }
160    }
161
162    let sysroot = builder.ensure(Sysroot::new(compiler));
163    let rustc = sysroot.join("bin/rustc");
164
165    let rustc_perf_dir = builder.build.tempdir().join("rustc-perf");
166    let results_dir = rustc_perf_dir.join("results");
167    builder.create_dir(&results_dir);
168
169    let mut cmd = command(collector);
170
171    // We need to set the working directory to `src/tools/rustc-perf`, so that it can find the directory
172    // with compile-time benchmarks.
173    cmd.current_dir(builder.src.join("src/tools/rustc-perf"));
174
175    let db_path = results_dir.join("results.db");
176
177    match &args.cmd {
178        PerfCommand::Eprintln { opts }
179        | PerfCommand::Samply { opts }
180        | PerfCommand::Cachegrind { opts } => {
181            cmd.arg("profile_local");
182            cmd.arg(match &args.cmd {
183                PerfCommand::Eprintln { .. } => "eprintln",
184                PerfCommand::Samply { .. } => "samply",
185                PerfCommand::Cachegrind { .. } => "cachegrind",
186                _ => unreachable!(),
187            });
188
189            cmd.arg("--out-dir").arg(&results_dir);
190            cmd.arg(rustc);
191
192            apply_shared_opts(&mut cmd, opts);
193            cmd.run(builder);
194
195            println!("You can find the results at `{}`", &results_dir.display());
196        }
197        PerfCommand::Benchmark { id, opts } => {
198            cmd.arg("bench_local");
199            cmd.arg("--db").arg(&db_path);
200            cmd.arg("--id").arg(id);
201            cmd.arg(rustc);
202
203            apply_shared_opts(&mut cmd, opts);
204            cmd.run(builder);
205        }
206        PerfCommand::Compare { base, modified } => {
207            cmd.arg("bench_cmp");
208            cmd.arg("--db").arg(&db_path);
209            cmd.arg(base).arg(modified);
210
211            cmd.run(builder);
212        }
213    }
214}
215
216fn apply_shared_opts(cmd: &mut BootstrapCommand, opts: &SharedOpts) {
217    if !opts.include.is_empty() {
218        cmd.arg("--include").arg(opts.include.join(","));
219    }
220    if !opts.exclude.is_empty() {
221        cmd.arg("--exclude").arg(opts.exclude.join(","));
222    }
223    if !opts.profiles.is_empty() {
224        cmd.arg("--profiles")
225            .arg(opts.profiles.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(","));
226    }
227    if !opts.scenarios.is_empty() {
228        cmd.arg("--scenarios")
229            .arg(opts.scenarios.iter().map(|p| p.to_string()).collect::<Vec<_>>().join(","));
230    }
231}