1use std::ffi::OsStr;
4use std::fs::File;
5use std::io::BufReader;
6use std::path::Path;
7use std::path::PathBuf;
8
9use annotate_snippets::Level;
10use anyhow::Context as _;
11use cargo_util::paths;
12use indexmap::IndexMap;
13use indexmap::map::Entry;
14use itertools::Itertools as _;
15use tempfile::TempDir;
16
17use crate::AlreadyPrintedError;
18use crate::CargoResult;
19use crate::GlobalContext;
20use crate::core::Workspace;
21use crate::core::compiler::CompileMode;
22use crate::core::compiler::timings::CompilationSection;
23use crate::core::compiler::timings::UnitData;
24use crate::core::compiler::timings::report::RenderContext;
25use crate::core::compiler::timings::report::aggregate_sections;
26use crate::core::compiler::timings::report::compute_concurrency;
27use crate::core::compiler::timings::report::round_to_centisecond;
28use crate::core::compiler::timings::report::write_html;
29use crate::util::BuildLogger;
30use crate::util::important_paths::find_root_manifest_for_wd;
31use crate::util::log_message::LogMessage;
32use crate::util::logger::RunId;
33use crate::util::style;
34
35pub struct ReportTimingsOptions<'gctx> {
36 pub open_result: bool,
38 pub gctx: &'gctx GlobalContext,
39}
40
41struct UnitEntry {
43 data: UnitData,
44 sections: IndexMap<String, CompilationSection>,
45 rmeta_time: Option<f64>,
46}
47
48pub fn report_timings(gctx: &GlobalContext, opts: ReportTimingsOptions<'_>) -> CargoResult<()> {
49 let ws = find_root_manifest_for_wd(gctx.cwd())
50 .ok()
51 .and_then(|manifest_path| Workspace::new(&manifest_path, gctx).ok());
52 let Some((log, run_id)) = select_log_file(gctx, ws.as_ref())? else {
53 let title_extra = if let Some(ws) = ws {
54 format!(" for workspace at `{}`", ws.root().display())
55 } else {
56 String::new()
57 };
58 let title = format!("no build log files found{title_extra}");
59 let note = "run command with `-Z build-analysis` to generate log files";
60 let report = [Level::ERROR
61 .primary_title(title)
62 .element(Level::NOTE.message(note))];
63 gctx.shell().print_report(&report, false)?;
64 return Err(AlreadyPrintedError::new(anyhow::anyhow!("")).into());
65 };
66
67 let ctx = prepare_context(&log, &run_id)
68 .with_context(|| format!("failed to analyze log at `{}`", log.display()))?;
69
70 let reports_dir = if let Some(ws) = ws {
74 let target_dir = ws.target_dir();
75 let target_dir = target_dir.as_path_unlocked();
76 paths::create_dir_all_excluded_from_backups_atomic(target_dir)?;
77 let timings_dir = target_dir.join("cargo-timings");
78 paths::create_dir_all(&timings_dir)?;
79 timings_dir
80 } else if let Ok(path) = gctx.get_env("__CARGO_TEST_REPORT_TIMINGS_TEMPDIR") {
81 PathBuf::from(path.to_owned())
82 } else {
83 TempDir::with_prefix("cargo-timings-")?.keep()
84 };
85
86 let timing_path = reports_dir.join(format!("cargo-timing-{run_id}.html"));
87
88 let mut out_file = std::fs::OpenOptions::new()
89 .write(true)
90 .create(true)
91 .truncate(true)
92 .open(&timing_path)
93 .with_context(|| format!("failed to open `{}`", timing_path.display()))?;
94
95 write_html(ctx, &mut out_file)?;
96
97 let link = gctx.shell().err_file_hyperlink(&timing_path);
98 let msg = format!("report saved to {link}{}{link:#}", timing_path.display());
99 gctx.shell()
100 .status_with_color("Timing", msg, &style::NOTE)?;
101
102 if opts.open_result {
103 crate::util::open::open(&timing_path, gctx)?;
104 }
105
106 Ok(())
107}
108
109fn select_log_file(
114 gctx: &GlobalContext,
115 ws: Option<&Workspace<'_>>,
116) -> CargoResult<Option<(PathBuf, RunId)>> {
117 let log_dir = gctx.home().join("log");
118 let log_dir = log_dir.as_path_unlocked();
119
120 if !log_dir.exists() {
121 return Ok(None);
122 }
123
124 let mut walk = walkdir::WalkDir::new(log_dir)
126 .follow_links(true)
127 .sort_by(|a, b| a.file_name().cmp(b.file_name()).reverse())
128 .min_depth(1)
129 .max_depth(1)
130 .into_iter()
131 .filter_map(|entry| {
132 let entry = entry.ok()?;
133 let path = entry.path();
134
135 if !entry.file_type().is_file() {
137 return None;
138 }
139 if entry.path().extension() != Some(OsStr::new("jsonl")) {
140 return None;
141 }
142
143 let run_id = path.file_stem()?.to_str()?.parse::<RunId>().ok()?;
145 Some((entry, run_id))
146 });
147
148 let item = if let Some(ws) = ws {
149 let ws_run_id = BuildLogger::generate_run_id(ws);
151 walk.skip_while(|(_, run_id)| !run_id.same_workspace(&ws_run_id))
152 .next()
153 } else {
154 walk.next()
155 };
156
157 Ok(item.map(|(entry, run_id)| (entry.into_path(), run_id)))
158}
159
160fn prepare_context(log: &Path, run_id: &RunId) -> CargoResult<RenderContext<'static>> {
161 let reader = BufReader::new(File::open(&log)?);
162
163 let mut ctx = RenderContext {
164 start_str: run_id.timestamp().to_string(),
165 root_units: Default::default(),
166 profile: Default::default(),
167 total_fresh: Default::default(),
168 total_dirty: Default::default(),
169 unit_data: Default::default(),
170 concurrency: Default::default(),
171 cpu_usage: Default::default(),
172 rustc_version: Default::default(),
173 host: Default::default(),
174 requested_targets: Default::default(),
175 jobs: 0,
176 num_cpus: None,
177 error: &None,
178 };
179 let mut units: IndexMap<_, UnitEntry> = IndexMap::new();
180
181 for (log_index, result) in serde_json::Deserializer::from_reader(reader)
182 .into_iter::<LogMessage>()
183 .enumerate()
184 {
185 let msg = match result {
186 Ok(msg) => msg,
187 Err(e) => {
188 tracing::warn!("failed to parse log message at index {log_index}: {e}");
189 continue;
190 }
191 };
192
193 match msg {
194 LogMessage::BuildStarted {
195 cwd: _,
196 host,
197 jobs,
198 num_cpus,
199 profile,
200 rustc_version,
201 rustc_version_verbose: _,
202 target_dir: _,
203 workspace_root: _,
204 } => {
205 ctx.host = host;
206 ctx.jobs = jobs;
207 ctx.num_cpus = num_cpus;
208 ctx.profile = profile;
209 ctx.rustc_version = rustc_version;
210 }
211 LogMessage::UnitStarted {
212 package_id,
213 target,
214 mode,
215 index,
216 elapsed,
217 } => {
218 let version = package_id
219 .version()
220 .map(|v| v.to_string())
221 .unwrap_or_else(|| "N/A".into());
222
223 let mut target_str = if target.kind == "lib" && mode == CompileMode::Build {
227 "".to_string()
229 } else if target.kind == "build-script" {
230 " build-script".to_string()
231 } else {
232 format!(r#" {} "{}""#, target.name, target.kind)
233 };
234
235 match mode {
236 CompileMode::Test => target_str.push_str(" (test)"),
237 CompileMode::Build => {}
238 CompileMode::Check { test: true } => target_str.push_str(" (check-test)"),
239 CompileMode::Check { test: false } => target_str.push_str(" (check)"),
240 CompileMode::Doc { .. } => target_str.push_str(" (doc)"),
241 CompileMode::Doctest => target_str.push_str(" (doc test)"),
242 CompileMode::Docscrape => target_str.push_str(" (doc scrape)"),
243 CompileMode::RunCustomBuild => target_str.push_str(" (run)"),
244 }
245
246 let mode_str = if mode.is_run_custom_build() {
247 "run-custom-build"
248 } else {
249 "todo"
250 };
251
252 let data = UnitData {
253 i: index,
254 name: package_id.name().to_string(),
255 version,
256 mode: mode_str.to_owned(),
257 target: target_str,
258 features: Vec::new(),
259 start: elapsed,
260 duration: 0.0,
261 unblocked_units: Vec::new(),
262 unblocked_rmeta_units: Vec::new(),
263 sections: None,
264 };
265
266 units.insert(
267 index,
268 UnitEntry {
269 data,
270 sections: IndexMap::new(),
271 rmeta_time: None,
272 },
273 );
274 }
275 LogMessage::UnitRmetaFinished {
276 index,
277 elapsed,
278 unblocked,
279 } => match units.entry(index) {
280 Entry::Occupied(mut e) => {
281 let elapsed = f64::max(elapsed - e.get().data.start, 0.0);
282 e.get_mut().data.unblocked_rmeta_units = unblocked;
283 e.get_mut().data.duration = elapsed;
284 e.get_mut().rmeta_time = Some(elapsed);
285 }
286 Entry::Vacant(_) => {
287 tracing::warn!(
288 "section `frontend` ended, but unit {index} has no start recorded"
289 )
290 }
291 },
292 LogMessage::UnitSectionStarted {
293 index,
294 elapsed,
295 section,
296 } => match units.entry(index) {
297 Entry::Occupied(mut e) => {
298 let elapsed = f64::max(elapsed - e.get().data.start, 0.0);
299 if e.get_mut()
300 .sections
301 .insert(
302 section.clone(),
303 CompilationSection {
304 start: elapsed,
305 end: None,
306 },
307 )
308 .is_some()
309 {
310 tracing::warn!(
311 "section `{section}` for unit {index} started more than once",
312 );
313 }
314 }
315 Entry::Vacant(_) => {
316 tracing::warn!(
317 "section `{section}` started, but unit {index} has no start recorded"
318 )
319 }
320 },
321 LogMessage::UnitSectionFinished {
322 index,
323 elapsed,
324 section,
325 } => match units.entry(index) {
326 Entry::Occupied(mut e) => {
327 let elapsed = f64::max(elapsed - e.get().data.start, 0.0);
328 if let Some(section) = e.get_mut().sections.get_mut(§ion) {
329 section.end = Some(elapsed);
330 } else {
331 tracing::warn!(
332 "section `{section}` for unit {index} ended, but section `{section}` has no start recorded"
333 );
334 }
335 }
336 Entry::Vacant(_) => {
337 tracing::warn!(
338 "section `{section}` ended, but unit {index} has no start recorded"
339 )
340 }
341 },
342 LogMessage::UnitFinished {
343 index,
344 elapsed,
345 unblocked,
346 } => match units.entry(index) {
347 Entry::Occupied(mut e) => {
348 let elapsed = f64::max(elapsed - e.get().data.start, 0.0);
349 e.get_mut().data.duration = elapsed;
350 e.get_mut().data.unblocked_units = unblocked;
351 }
352 Entry::Vacant(_) => {
353 tracing::warn!("unit {index} ended, but it has no start recorded");
354 }
355 },
356 _ => {} }
358 }
359
360 let unit_data: Vec<_> = units
361 .into_values()
362 .map(
363 |UnitEntry {
364 mut data,
365 sections,
366 rmeta_time,
367 }| {
368 data.sections = aggregate_sections(sections, data.duration, rmeta_time);
370 data.start = round_to_centisecond(data.start);
371 data.duration = round_to_centisecond(data.duration);
372 data
373 },
374 )
375 .sorted_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap())
376 .collect();
377
378 if unit_data.is_empty() {
379 anyhow::bail!("no timing data found in log");
380 }
381
382 ctx.unit_data = unit_data;
383 ctx.concurrency = compute_concurrency(&ctx.unit_data);
384
385 Ok(ctx)
386}