1use std::ffi::{CStr, CString};
2use std::io::{self, Write};
3use std::path::{Path, PathBuf};
4use std::ptr::null_mut;
5use std::sync::Arc;
6use std::{fs, slice, str};
7
8use libc::{c_char, c_int, c_void, size_t};
9use rustc_codegen_ssa::back::link::ensure_removed;
10use rustc_codegen_ssa::back::versioned_llvm_target;
11use rustc_codegen_ssa::back::write::{
12 BitcodeSection, CodegenContext, EmitObj, InlineAsmError, ModuleConfig,
13 TargetMachineFactoryConfig, TargetMachineFactoryFn,
14};
15use rustc_codegen_ssa::base::wants_wasm_eh;
16use rustc_codegen_ssa::traits::*;
17use rustc_codegen_ssa::{CompiledModule, ModuleCodegen, ModuleKind};
18use rustc_data_structures::profiling::SelfProfilerRef;
19use rustc_data_structures::small_c_str::SmallCStr;
20use rustc_errors::{DiagCtxtHandle, Level};
21use rustc_fs_util::{link_or_copy, path_to_c_string};
22use rustc_middle::ty::TyCtxt;
23use rustc_session::Session;
24use rustc_session::config::{self, Lto, OutputType, Passes, SplitDwarfKind, SwitchWithOptPath};
25use rustc_span::{BytePos, InnerSpan, Pos, RemapPathScopeComponents, SpanData, SyntaxContext, sym};
26use rustc_target::spec::{
27 Arch, CodeModel, FloatAbi, RelocModel, SanitizerSet, SplitDebuginfo, TlsModel,
28};
29use tracing::{debug, trace};
30
31use crate::back::lto::ThinBuffer;
32use crate::back::owned_target_machine::OwnedTargetMachine;
33use crate::back::profiling::{
34 LlvmSelfProfiler, selfprofile_after_pass_callback, selfprofile_before_pass_callback,
35};
36use crate::common::AsCCharPtr;
37use crate::errors::{
38 CopyBitcode, FromLlvmDiag, FromLlvmOptimizationDiag, LlvmError, UnknownCompression,
39 WithLlvmError, WriteBytecode,
40};
41use crate::llvm::diagnostic::OptimizationDiagnosticKind::*;
42use crate::llvm::{self, DiagnosticInfo};
43use crate::type_::llvm_type_ptr;
44use crate::{LlvmCodegenBackend, ModuleLlvm, SimpleCx, attributes, base, common, llvm_util};
45
46pub(crate) fn llvm_err<'a>(dcx: DiagCtxtHandle<'_>, err: LlvmError<'a>) -> ! {
47 match llvm::last_error() {
48 Some(llvm_err) => dcx.emit_fatal(WithLlvmError(err, llvm_err)),
49 None => dcx.emit_fatal(err),
50 }
51}
52
53fn write_output_file<'ll>(
54 dcx: DiagCtxtHandle<'_>,
55 target: &'ll llvm::TargetMachine,
56 no_builtins: bool,
57 m: &'ll llvm::Module,
58 output: &Path,
59 dwo_output: Option<&Path>,
60 file_type: llvm::FileType,
61 self_profiler_ref: &SelfProfilerRef,
62 verify_llvm_ir: bool,
63) {
64 debug!("write_output_file output={:?} dwo_output={:?}", output, dwo_output);
65 let output_c = path_to_c_string(output);
66 let dwo_output_c;
67 let dwo_output_ptr = if let Some(dwo_output) = dwo_output {
68 dwo_output_c = path_to_c_string(dwo_output);
69 dwo_output_c.as_ptr()
70 } else {
71 std::ptr::null()
72 };
73 let result = unsafe {
74 let pm = llvm::LLVMCreatePassManager();
75 llvm::LLVMAddAnalysisPasses(target, pm);
76 llvm::LLVMRustAddLibraryInfo(target, pm, m, no_builtins);
77 llvm::LLVMRustWriteOutputFile(
78 target,
79 pm,
80 m,
81 output_c.as_ptr(),
82 dwo_output_ptr,
83 file_type,
84 verify_llvm_ir,
85 )
86 };
87
88 if result == llvm::LLVMRustResult::Success {
90 let artifact_kind = match file_type {
91 llvm::FileType::ObjectFile => "object_file",
92 llvm::FileType::AssemblyFile => "assembly_file",
93 };
94 record_artifact_size(self_profiler_ref, artifact_kind, output);
95 if let Some(dwo_file) = dwo_output {
96 record_artifact_size(self_profiler_ref, "dwo_file", dwo_file);
97 }
98 }
99
100 result.into_result().unwrap_or_else(|()| llvm_err(dcx, LlvmError::WriteOutput { path: output }))
101}
102
103pub(crate) fn create_informational_target_machine(
104 sess: &Session,
105 only_base_features: bool,
106) -> OwnedTargetMachine {
107 let config = TargetMachineFactoryConfig { split_dwarf_file: None, output_obj_file: None };
108 let features = llvm_util::global_llvm_features(sess, only_base_features);
111 target_machine_factory(sess, config::OptLevel::No, &features)(config)
112 .unwrap_or_else(|err| llvm_err(sess.dcx(), err))
113}
114
115pub(crate) fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> OwnedTargetMachine {
116 let split_dwarf_file = if tcx.sess.target_can_use_split_dwarf() {
117 tcx.output_filenames(()).split_dwarf_path(
118 tcx.sess.split_debuginfo(),
119 tcx.sess.opts.unstable_opts.split_dwarf_kind,
120 mod_name,
121 tcx.sess.invocation_temp.as_deref(),
122 )
123 } else {
124 None
125 };
126
127 let output_obj_file = Some(tcx.output_filenames(()).temp_path_for_cgu(
128 OutputType::Object,
129 mod_name,
130 tcx.sess.invocation_temp.as_deref(),
131 ));
132 let config = TargetMachineFactoryConfig { split_dwarf_file, output_obj_file };
133
134 target_machine_factory(
135 tcx.sess,
136 tcx.backend_optimization_level(()),
137 tcx.global_backend_features(()),
138 )(config)
139 .unwrap_or_else(|err| llvm_err(tcx.dcx(), err))
140}
141
142fn to_llvm_opt_settings(cfg: config::OptLevel) -> (llvm::CodeGenOptLevel, llvm::CodeGenOptSize) {
143 use self::config::OptLevel::*;
144 match cfg {
145 No => (llvm::CodeGenOptLevel::None, llvm::CodeGenOptSizeNone),
146 Less => (llvm::CodeGenOptLevel::Less, llvm::CodeGenOptSizeNone),
147 More => (llvm::CodeGenOptLevel::Default, llvm::CodeGenOptSizeNone),
148 Aggressive => (llvm::CodeGenOptLevel::Aggressive, llvm::CodeGenOptSizeNone),
149 Size => (llvm::CodeGenOptLevel::Default, llvm::CodeGenOptSizeDefault),
150 SizeMin => (llvm::CodeGenOptLevel::Default, llvm::CodeGenOptSizeAggressive),
151 }
152}
153
154fn to_pass_builder_opt_level(cfg: config::OptLevel) -> llvm::PassBuilderOptLevel {
155 use config::OptLevel::*;
156 match cfg {
157 No => llvm::PassBuilderOptLevel::O0,
158 Less => llvm::PassBuilderOptLevel::O1,
159 More => llvm::PassBuilderOptLevel::O2,
160 Aggressive => llvm::PassBuilderOptLevel::O3,
161 Size => llvm::PassBuilderOptLevel::Os,
162 SizeMin => llvm::PassBuilderOptLevel::Oz,
163 }
164}
165
166fn to_llvm_relocation_model(relocation_model: RelocModel) -> llvm::RelocModel {
167 match relocation_model {
168 RelocModel::Static => llvm::RelocModel::Static,
169 RelocModel::Pic | RelocModel::Pie => llvm::RelocModel::PIC,
172 RelocModel::DynamicNoPic => llvm::RelocModel::DynamicNoPic,
173 RelocModel::Ropi => llvm::RelocModel::ROPI,
174 RelocModel::Rwpi => llvm::RelocModel::RWPI,
175 RelocModel::RopiRwpi => llvm::RelocModel::ROPI_RWPI,
176 }
177}
178
179pub(crate) fn to_llvm_code_model(code_model: Option<CodeModel>) -> llvm::CodeModel {
180 match code_model {
181 Some(CodeModel::Tiny) => llvm::CodeModel::Tiny,
182 Some(CodeModel::Small) => llvm::CodeModel::Small,
183 Some(CodeModel::Kernel) => llvm::CodeModel::Kernel,
184 Some(CodeModel::Medium) => llvm::CodeModel::Medium,
185 Some(CodeModel::Large) => llvm::CodeModel::Large,
186 None => llvm::CodeModel::None,
187 }
188}
189
190fn to_llvm_float_abi(float_abi: Option<FloatAbi>) -> llvm::FloatAbi {
191 match float_abi {
192 None => llvm::FloatAbi::Default,
193 Some(FloatAbi::Soft) => llvm::FloatAbi::Soft,
194 Some(FloatAbi::Hard) => llvm::FloatAbi::Hard,
195 }
196}
197
198pub(crate) fn target_machine_factory(
199 sess: &Session,
200 optlvl: config::OptLevel,
201 target_features: &[String],
202) -> TargetMachineFactoryFn<LlvmCodegenBackend> {
203 let _prof_timer = sess.prof.generic_activity("target_machine_factory");
205
206 let reloc_model = to_llvm_relocation_model(sess.relocation_model());
207
208 let (opt_level, _) = to_llvm_opt_settings(optlvl);
209 let float_abi = if sess.target.arch == Arch::Arm && sess.opts.cg.soft_float {
210 llvm::FloatAbi::Soft
211 } else {
212 to_llvm_float_abi(sess.target.llvm_floatabi)
215 };
216
217 let ffunction_sections =
218 sess.opts.unstable_opts.function_sections.unwrap_or(sess.target.function_sections);
219 let fdata_sections = ffunction_sections;
220 let funique_section_names = !sess.opts.unstable_opts.no_unique_section_names;
221
222 let code_model = to_llvm_code_model(sess.code_model());
223
224 let mut singlethread = sess.target.singlethread;
225
226 if singlethread && sess.target.is_like_wasm && sess.target_features.contains(&sym::atomics) {
230 singlethread = false;
231 }
232
233 let triple = SmallCStr::new(&versioned_llvm_target(sess));
234 let cpu = SmallCStr::new(llvm_util::target_cpu(sess));
235 let features = CString::new(target_features.join(",")).unwrap();
236 let abi = SmallCStr::new(&sess.target.llvm_abiname);
237 let trap_unreachable =
238 sess.opts.unstable_opts.trap_unreachable.unwrap_or(sess.target.trap_unreachable);
239 let emit_stack_size_section = sess.opts.unstable_opts.emit_stack_sizes;
240
241 let verbose_asm = sess.opts.unstable_opts.verbose_asm;
242 let relax_elf_relocations =
243 sess.opts.unstable_opts.relax_elf_relocations.unwrap_or(sess.target.relax_elf_relocations);
244
245 let use_init_array =
246 !sess.opts.unstable_opts.use_ctors_section.unwrap_or(sess.target.use_ctors_section);
247
248 let path_mapping = sess.source_map().path_mapping().clone();
249 let working_dir = sess.source_map().working_dir().clone();
250
251 let use_emulated_tls = matches!(sess.tls_model(), TlsModel::Emulated);
252
253 let debuginfo_compression = match sess.opts.debuginfo_compression {
254 config::DebugInfoCompression::None => llvm::CompressionKind::None,
255 config::DebugInfoCompression::Zlib => {
256 if llvm::LLVMRustLLVMHasZlibCompression() {
257 llvm::CompressionKind::Zlib
258 } else {
259 sess.dcx().emit_warn(UnknownCompression { algorithm: "zlib" });
260 llvm::CompressionKind::None
261 }
262 }
263 config::DebugInfoCompression::Zstd => {
264 if llvm::LLVMRustLLVMHasZstdCompression() {
265 llvm::CompressionKind::Zstd
266 } else {
267 sess.dcx().emit_warn(UnknownCompression { algorithm: "zstd" });
268 llvm::CompressionKind::None
269 }
270 }
271 };
272
273 let use_wasm_eh = wants_wasm_eh(sess);
274
275 let prof = SelfProfilerRef::clone(&sess.prof);
276 Arc::new(move |config: TargetMachineFactoryConfig| {
277 let _prof_timer = prof.generic_activity("target_machine_factory_inner");
279
280 let path_to_cstring_helper = |path: Option<PathBuf>| -> CString {
281 let path = path.unwrap_or_default();
282 let path = path_mapping
283 .to_real_filename(&working_dir, path)
284 .path(RemapPathScopeComponents::DEBUGINFO)
285 .to_string_lossy()
286 .into_owned();
287 CString::new(path).unwrap()
288 };
289
290 let split_dwarf_file = path_to_cstring_helper(config.split_dwarf_file);
291 let output_obj_file = path_to_cstring_helper(config.output_obj_file);
292
293 OwnedTargetMachine::new(
294 &triple,
295 &cpu,
296 &features,
297 &abi,
298 code_model,
299 reloc_model,
300 opt_level,
301 float_abi,
302 ffunction_sections,
303 fdata_sections,
304 funique_section_names,
305 trap_unreachable,
306 singlethread,
307 verbose_asm,
308 emit_stack_size_section,
309 relax_elf_relocations,
310 use_init_array,
311 &split_dwarf_file,
312 &output_obj_file,
313 debuginfo_compression,
314 use_emulated_tls,
315 use_wasm_eh,
316 )
317 })
318}
319
320pub(crate) fn save_temp_bitcode(
321 cgcx: &CodegenContext<LlvmCodegenBackend>,
322 module: &ModuleCodegen<ModuleLlvm>,
323 name: &str,
324) {
325 if !cgcx.save_temps {
326 return;
327 }
328 let ext = format!("{name}.bc");
329 let path = cgcx.output_filenames.temp_path_ext_for_cgu(
330 &ext,
331 &module.name,
332 cgcx.invocation_temp.as_deref(),
333 );
334 write_bitcode_to_file(module, &path)
335}
336
337fn write_bitcode_to_file(module: &ModuleCodegen<ModuleLlvm>, path: &Path) {
338 unsafe {
339 let path = path_to_c_string(&path);
340 let llmod = module.module_llvm.llmod();
341 llvm::LLVMWriteBitcodeToFile(llmod, path.as_ptr());
342 }
343}
344
345pub(crate) enum CodegenDiagnosticsStage {
347 Opt,
349 LTO,
351 Codegen,
353}
354
355pub(crate) struct DiagnosticHandlers<'a> {
356 data: *mut (&'a CodegenContext<LlvmCodegenBackend>, DiagCtxtHandle<'a>),
357 llcx: &'a llvm::Context,
358 old_handler: Option<&'a llvm::DiagnosticHandler>,
359}
360
361impl<'a> DiagnosticHandlers<'a> {
362 pub(crate) fn new(
363 cgcx: &'a CodegenContext<LlvmCodegenBackend>,
364 dcx: DiagCtxtHandle<'a>,
365 llcx: &'a llvm::Context,
366 module: &ModuleCodegen<ModuleLlvm>,
367 stage: CodegenDiagnosticsStage,
368 ) -> Self {
369 let remark_passes_all: bool;
370 let remark_passes: Vec<CString>;
371 match &cgcx.remark {
372 Passes::All => {
373 remark_passes_all = true;
374 remark_passes = Vec::new();
375 }
376 Passes::Some(passes) => {
377 remark_passes_all = false;
378 remark_passes =
379 passes.iter().map(|name| CString::new(name.as_str()).unwrap()).collect();
380 }
381 };
382 let remark_passes: Vec<*const c_char> =
383 remark_passes.iter().map(|name: &CString| name.as_ptr()).collect();
384 let remark_file = cgcx
385 .remark_dir
386 .as_ref()
387 .map(|dir| {
389 let stage_suffix = match stage {
390 CodegenDiagnosticsStage::Codegen => "codegen",
391 CodegenDiagnosticsStage::Opt => "opt",
392 CodegenDiagnosticsStage::LTO => "lto",
393 };
394 dir.join(format!("{}.{stage_suffix}.opt.yaml", module.name))
395 })
396 .and_then(|dir| dir.to_str().and_then(|p| CString::new(p).ok()));
397
398 let pgo_available = cgcx.opts.cg.profile_use.is_some();
399 let data = Box::into_raw(Box::new((cgcx, dcx)));
400 unsafe {
401 let old_handler = llvm::LLVMRustContextGetDiagnosticHandler(llcx);
402 llvm::LLVMRustContextConfigureDiagnosticHandler(
403 llcx,
404 diagnostic_handler,
405 data.cast(),
406 remark_passes_all,
407 remark_passes.as_ptr(),
408 remark_passes.len(),
409 remark_file.as_ref().map(|dir| dir.as_ptr()).unwrap_or(std::ptr::null()),
412 pgo_available,
413 );
414 DiagnosticHandlers { data, llcx, old_handler }
415 }
416 }
417}
418
419impl<'a> Drop for DiagnosticHandlers<'a> {
420 fn drop(&mut self) {
421 unsafe {
422 llvm::LLVMRustContextSetDiagnosticHandler(self.llcx, self.old_handler);
423 drop(Box::from_raw(self.data));
424 }
425 }
426}
427
428fn report_inline_asm(
429 cgcx: &CodegenContext<LlvmCodegenBackend>,
430 msg: String,
431 level: llvm::DiagnosticLevel,
432 cookie: u64,
433 source: Option<(String, Vec<InnerSpan>)>,
434) -> InlineAsmError {
435 let span = if cookie == 0 || matches!(cgcx.lto, Lto::Fat | Lto::Thin) {
439 SpanData::default()
440 } else {
441 SpanData {
442 lo: BytePos::from_u32(cookie as u32),
443 hi: BytePos::from_u32((cookie >> 32) as u32),
444 ctxt: SyntaxContext::root(),
445 parent: None,
446 }
447 };
448 let level = match level {
449 llvm::DiagnosticLevel::Error => Level::Error,
450 llvm::DiagnosticLevel::Warning => Level::Warning,
451 llvm::DiagnosticLevel::Note | llvm::DiagnosticLevel::Remark => Level::Note,
452 };
453 let msg = msg.trim_prefix("error: ").to_string();
454 InlineAsmError { span, msg, level, source }
455}
456
457unsafe extern "C" fn diagnostic_handler(info: &DiagnosticInfo, user: *mut c_void) {
458 if user.is_null() {
459 return;
460 }
461 let (cgcx, dcx) =
462 unsafe { *(user as *const (&CodegenContext<LlvmCodegenBackend>, DiagCtxtHandle<'_>)) };
463
464 match unsafe { llvm::diagnostic::Diagnostic::unpack(info) } {
465 llvm::diagnostic::InlineAsm(inline) => {
466 cgcx.diag_emitter.inline_asm_error(report_inline_asm(
467 cgcx,
468 inline.message,
469 inline.level,
470 inline.cookie,
471 inline.source,
472 ));
473 }
474
475 llvm::diagnostic::Optimization(opt) => {
476 dcx.emit_note(FromLlvmOptimizationDiag {
477 filename: &opt.filename,
478 line: opt.line,
479 column: opt.column,
480 pass_name: &opt.pass_name,
481 kind: match opt.kind {
482 OptimizationRemark => "success",
483 OptimizationMissed | OptimizationFailure => "missed",
484 OptimizationAnalysis
485 | OptimizationAnalysisFPCommute
486 | OptimizationAnalysisAliasing => "analysis",
487 OptimizationRemarkOther => "other",
488 },
489 message: &opt.message,
490 });
491 }
492 llvm::diagnostic::PGO(diagnostic_ref) | llvm::diagnostic::Linker(diagnostic_ref) => {
493 let message = llvm::build_string(|s| unsafe {
494 llvm::LLVMRustWriteDiagnosticInfoToString(diagnostic_ref, s)
495 })
496 .expect("non-UTF8 diagnostic");
497 dcx.emit_warn(FromLlvmDiag { message });
498 }
499 llvm::diagnostic::Unsupported(diagnostic_ref) => {
500 let message = llvm::build_string(|s| unsafe {
501 llvm::LLVMRustWriteDiagnosticInfoToString(diagnostic_ref, s)
502 })
503 .expect("non-UTF8 diagnostic");
504 dcx.emit_err(FromLlvmDiag { message });
505 }
506 llvm::diagnostic::UnknownDiagnostic(..) => {}
507 }
508}
509
510fn get_pgo_gen_path(config: &ModuleConfig) -> Option<CString> {
511 match config.pgo_gen {
512 SwitchWithOptPath::Enabled(ref opt_dir_path) => {
513 let path = if let Some(dir_path) = opt_dir_path {
514 dir_path.join("default_%m.profraw")
515 } else {
516 PathBuf::from("default_%m.profraw")
517 };
518
519 Some(CString::new(format!("{}", path.display())).unwrap())
520 }
521 SwitchWithOptPath::Disabled => None,
522 }
523}
524
525fn get_pgo_use_path(config: &ModuleConfig) -> Option<CString> {
526 config
527 .pgo_use
528 .as_ref()
529 .map(|path_buf| CString::new(path_buf.to_string_lossy().as_bytes()).unwrap())
530}
531
532fn get_pgo_sample_use_path(config: &ModuleConfig) -> Option<CString> {
533 config
534 .pgo_sample_use
535 .as_ref()
536 .map(|path_buf| CString::new(path_buf.to_string_lossy().as_bytes()).unwrap())
537}
538
539fn get_instr_profile_output_path(config: &ModuleConfig) -> Option<CString> {
540 config.instrument_coverage.then(|| c"default_%m_%p.profraw".to_owned())
541}
542
543#[derive(Debug, Eq, PartialEq)]
547pub(crate) enum AutodiffStage {
548 PreAD,
549 DuringAD,
550 PostAD,
551}
552
553pub(crate) unsafe fn llvm_optimize(
554 cgcx: &CodegenContext<LlvmCodegenBackend>,
555 dcx: DiagCtxtHandle<'_>,
556 module: &ModuleCodegen<ModuleLlvm>,
557 thin_lto_buffer: Option<&mut *mut llvm::ThinLTOBuffer>,
558 config: &ModuleConfig,
559 opt_level: config::OptLevel,
560 opt_stage: llvm::OptStage,
561 autodiff_stage: AutodiffStage,
562) {
563 let consider_ad = config.autodiff.contains(&config::AutoDiff::Enable);
572 let run_enzyme = autodiff_stage == AutodiffStage::DuringAD;
573 let print_before_enzyme = config.autodiff.contains(&config::AutoDiff::PrintModBefore);
574 let print_after_enzyme = config.autodiff.contains(&config::AutoDiff::PrintModAfter);
575 let print_passes = config.autodiff.contains(&config::AutoDiff::PrintPasses);
576 let merge_functions;
577 let unroll_loops;
578 let vectorize_slp;
579 let vectorize_loop;
580
581 if consider_ad && autodiff_stage != AutodiffStage::PostAD {
590 merge_functions = false;
591 unroll_loops = false;
592 vectorize_slp = false;
593 vectorize_loop = false;
594 } else {
595 unroll_loops =
596 opt_level != config::OptLevel::Size && opt_level != config::OptLevel::SizeMin;
597 merge_functions = config.merge_functions;
598 vectorize_slp = config.vectorize_slp;
599 vectorize_loop = config.vectorize_loop;
600 }
601 trace!(?unroll_loops, ?vectorize_slp, ?vectorize_loop, ?run_enzyme);
602 if thin_lto_buffer.is_some() {
603 assert!(
604 matches!(
605 opt_stage,
606 llvm::OptStage::PreLinkNoLTO
607 | llvm::OptStage::PreLinkFatLTO
608 | llvm::OptStage::PreLinkThinLTO
609 ),
610 "the bitcode for LTO can only be obtained at the pre-link stage"
611 );
612 }
613 let pgo_gen_path = get_pgo_gen_path(config);
614 let pgo_use_path = get_pgo_use_path(config);
615 let pgo_sample_use_path = get_pgo_sample_use_path(config);
616 let is_lto = opt_stage == llvm::OptStage::ThinLTO || opt_stage == llvm::OptStage::FatLTO;
617 let instr_profile_output_path = get_instr_profile_output_path(config);
618 let sanitize_dataflow_abilist: Vec<_> = config
619 .sanitizer_dataflow_abilist
620 .iter()
621 .map(|file| CString::new(file.as_str()).unwrap())
622 .collect();
623 let sanitize_dataflow_abilist_ptrs: Vec<_> =
624 sanitize_dataflow_abilist.iter().map(|file| file.as_ptr()).collect();
625 let sanitizer_options = if !is_lto {
627 Some(llvm::SanitizerOptions {
628 sanitize_address: config.sanitizer.contains(SanitizerSet::ADDRESS),
629 sanitize_address_recover: config.sanitizer_recover.contains(SanitizerSet::ADDRESS),
630 sanitize_cfi: config.sanitizer.contains(SanitizerSet::CFI),
631 sanitize_dataflow: config.sanitizer.contains(SanitizerSet::DATAFLOW),
632 sanitize_dataflow_abilist: sanitize_dataflow_abilist_ptrs.as_ptr(),
633 sanitize_dataflow_abilist_len: sanitize_dataflow_abilist_ptrs.len(),
634 sanitize_kcfi: config.sanitizer.contains(SanitizerSet::KCFI),
635 sanitize_memory: config.sanitizer.contains(SanitizerSet::MEMORY),
636 sanitize_memory_recover: config.sanitizer_recover.contains(SanitizerSet::MEMORY),
637 sanitize_memory_track_origins: config.sanitizer_memory_track_origins as c_int,
638 sanitize_realtime: config.sanitizer.contains(SanitizerSet::REALTIME),
639 sanitize_thread: config.sanitizer.contains(SanitizerSet::THREAD),
640 sanitize_hwaddress: config.sanitizer.contains(SanitizerSet::HWADDRESS),
641 sanitize_hwaddress_recover: config.sanitizer_recover.contains(SanitizerSet::HWADDRESS),
642 sanitize_kernel_address: config.sanitizer.contains(SanitizerSet::KERNELADDRESS),
643 sanitize_kernel_address_recover: config
644 .sanitizer_recover
645 .contains(SanitizerSet::KERNELADDRESS),
646 })
647 } else {
648 None
649 };
650
651 fn handle_offload<'ll>(cx: &'ll SimpleCx<'_>, old_fn: &llvm::Value) {
652 let old_fn_ty = cx.get_type_of_global(old_fn);
653 let old_param_types = cx.func_params_types(old_fn_ty);
654 let old_param_count = old_param_types.len();
655 if old_param_count == 0 {
656 return;
657 }
658
659 let first_param = llvm::get_param(old_fn, 0);
660 let c_name = llvm::get_value_name(first_param);
661 let first_arg_name = str::from_utf8(&c_name).unwrap();
662 if first_arg_name == "dyn_ptr" {
666 return;
667 }
668
669 let mut new_param_types = Vec::with_capacity(old_param_count as usize + 1);
671 new_param_types.push(cx.type_ptr());
672 new_param_types.extend(old_param_types);
673
674 let ret_ty = unsafe { llvm::LLVMGetReturnType(old_fn_ty) };
676 let new_fn_ty = cx.type_func(&new_param_types, ret_ty);
677
678 let old_fn_name = String::from_utf8(llvm::get_value_name(old_fn)).unwrap();
680 let new_fn_name = format!("{}.offload", &old_fn_name);
681 let new_fn = cx.add_func(&new_fn_name, new_fn_ty);
682 let a0 = llvm::get_param(new_fn, 0);
683 llvm::set_value_name(a0, CString::new("dyn_ptr").unwrap().as_bytes());
684
685 unsafe {
688 llvm::LLVMRustOffloadMapper(old_fn, new_fn);
689 }
690
691 llvm::set_linkage(new_fn, llvm::get_linkage(old_fn));
692 llvm::set_visibility(new_fn, llvm::get_visibility(old_fn));
693
694 unsafe {
696 llvm::LLVMReplaceAllUsesWith(old_fn, new_fn);
697 }
698 let name = llvm::get_value_name(old_fn);
699 unsafe {
700 llvm::LLVMDeleteFunction(old_fn);
701 }
702 llvm::set_value_name(new_fn, &name);
704 }
705
706 if cgcx.target_is_like_gpu && config.offload.contains(&config::Offload::Device) {
707 let cx =
708 SimpleCx::new(module.module_llvm.llmod(), module.module_llvm.llcx, cgcx.pointer_size);
709 for func in cx.get_functions() {
710 let offload_kernel = "offload-kernel";
711 if attributes::has_string_attr(func, offload_kernel) {
712 handle_offload(&cx, func);
713 }
714 attributes::remove_string_attr_from_llfn(func, offload_kernel);
715 }
716 }
717
718 let mut llvm_profiler = cgcx
719 .prof
720 .llvm_recording_enabled()
721 .then(|| LlvmSelfProfiler::new(cgcx.prof.get_self_profiler().unwrap()));
722
723 let llvm_selfprofiler =
724 llvm_profiler.as_mut().map(|s| s as *mut _ as *mut c_void).unwrap_or(std::ptr::null_mut());
725
726 let extra_passes = if !is_lto { config.passes.join(",") } else { "".to_string() };
727
728 let llvm_plugins = config.llvm_plugins.join(",");
729
730 let enzyme_fn = if consider_ad {
731 let wrapper = llvm::EnzymeWrapper::get_instance();
732 wrapper.registerEnzymeAndPassPipeline
733 } else {
734 std::ptr::null()
735 };
736
737 let result = unsafe {
738 llvm::LLVMRustOptimize(
739 module.module_llvm.llmod(),
740 &*module.module_llvm.tm.raw(),
741 to_pass_builder_opt_level(opt_level),
742 opt_stage,
743 cgcx.opts.cg.linker_plugin_lto.enabled(),
744 config.no_prepopulate_passes,
745 config.verify_llvm_ir,
746 config.lint_llvm_ir,
747 thin_lto_buffer,
748 config.emit_thin_lto,
749 config.emit_thin_lto_summary,
750 merge_functions,
751 unroll_loops,
752 vectorize_slp,
753 vectorize_loop,
754 config.no_builtins,
755 config.emit_lifetime_markers,
756 enzyme_fn,
757 print_before_enzyme,
758 print_after_enzyme,
759 print_passes,
760 sanitizer_options.as_ref(),
761 pgo_gen_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
762 pgo_use_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
763 config.instrument_coverage,
764 instr_profile_output_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
765 pgo_sample_use_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
766 config.debug_info_for_profiling,
767 llvm_selfprofiler,
768 selfprofile_before_pass_callback,
769 selfprofile_after_pass_callback,
770 extra_passes.as_c_char_ptr(),
771 extra_passes.len(),
772 llvm_plugins.as_c_char_ptr(),
773 llvm_plugins.len(),
774 )
775 };
776
777 if cgcx.target_is_like_gpu && config.offload.contains(&config::Offload::Device) {
778 let device_path = cgcx.output_filenames.path(OutputType::Object);
779 let device_dir = device_path.parent().unwrap();
780 let device_out = device_dir.join("host.out");
781 let device_out_c = path_to_c_string(device_out.as_path());
782 unsafe {
783 let ok = llvm::LLVMRustBundleImages(
785 module.module_llvm.llmod(),
786 module.module_llvm.tm.raw(),
787 device_out_c.as_ptr(),
788 );
789 if !ok || !device_out.exists() {
790 dcx.emit_err(crate::errors::OffloadBundleImagesFailed);
791 }
792 }
793 }
794
795 if !cgcx.target_is_like_gpu {
801 if let Some(device_path) = config
802 .offload
803 .iter()
804 .find_map(|o| if let config::Offload::Host(path) = o { Some(path) } else { None })
805 {
806 let device_pathbuf = PathBuf::from(device_path);
807 if device_pathbuf.is_relative() {
808 dcx.emit_err(crate::errors::OffloadWithoutAbsPath);
809 } else if device_pathbuf
810 .file_name()
811 .and_then(|n| n.to_str())
812 .is_some_and(|n| n != "host.out")
813 {
814 dcx.emit_err(crate::errors::OffloadWrongFileName);
815 } else if !device_pathbuf.exists() {
816 dcx.emit_err(crate::errors::OffloadNonexistingPath);
817 }
818 let host_path = cgcx.output_filenames.path(OutputType::Object);
819 let host_dir = host_path.parent().unwrap();
820 let out_obj = host_dir.join("host.o");
821 let host_out_c = path_to_c_string(device_pathbuf.as_path());
822
823 let llmod2 = llvm::LLVMCloneModule(module.module_llvm.llmod());
827 let ok =
828 unsafe { llvm::LLVMRustOffloadEmbedBufferInModule(llmod2, host_out_c.as_ptr()) };
829 if !ok {
830 dcx.emit_err(crate::errors::OffloadEmbedFailed);
831 }
832 write_output_file(
833 dcx,
834 module.module_llvm.tm.raw(),
835 config.no_builtins,
836 llmod2,
837 &out_obj,
838 None,
839 llvm::FileType::ObjectFile,
840 &cgcx.prof,
841 true,
842 );
843 }
847 }
848 result.into_result().unwrap_or_else(|()| llvm_err(dcx, LlvmError::RunLlvmPasses))
849}
850
851pub(crate) fn optimize(
853 cgcx: &CodegenContext<LlvmCodegenBackend>,
854 dcx: DiagCtxtHandle<'_>,
855 module: &mut ModuleCodegen<ModuleLlvm>,
856 config: &ModuleConfig,
857) {
858 let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_optimize", &*module.name);
859
860 let llcx = &*module.module_llvm.llcx;
861 let _handlers = DiagnosticHandlers::new(cgcx, dcx, llcx, module, CodegenDiagnosticsStage::Opt);
862
863 if config.emit_no_opt_bc {
864 let out = cgcx.output_filenames.temp_path_ext_for_cgu(
865 "no-opt.bc",
866 &module.name,
867 cgcx.invocation_temp.as_deref(),
868 );
869 write_bitcode_to_file(module, &out)
870 }
871
872 if let Some(opt_level) = config.opt_level {
875 let opt_stage = match cgcx.lto {
876 Lto::Fat => llvm::OptStage::PreLinkFatLTO,
877 Lto::Thin | Lto::ThinLocal => llvm::OptStage::PreLinkThinLTO,
878 _ if cgcx.opts.cg.linker_plugin_lto.enabled() => llvm::OptStage::PreLinkThinLTO,
879 _ => llvm::OptStage::PreLinkNoLTO,
880 };
881
882 let consider_ad = config.autodiff.contains(&config::AutoDiff::Enable);
885 let autodiff_stage = if consider_ad { AutodiffStage::PreAD } else { AutodiffStage::PostAD };
886 let mut thin_lto_buffer = if (module.kind == ModuleKind::Regular
891 && config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full))
892 || config.emit_thin_lto_summary
893 {
894 Some(null_mut())
895 } else {
896 None
897 };
898 unsafe {
899 llvm_optimize(
900 cgcx,
901 dcx,
902 module,
903 thin_lto_buffer.as_mut(),
904 config,
905 opt_level,
906 opt_stage,
907 autodiff_stage,
908 )
909 };
910 if let Some(thin_lto_buffer) = thin_lto_buffer {
911 let thin_lto_buffer = unsafe { ThinBuffer::from_raw_ptr(thin_lto_buffer) };
912 module.thin_lto_buffer = Some(thin_lto_buffer.data().to_vec());
913 let bc_summary_out = cgcx.output_filenames.temp_path_for_cgu(
914 OutputType::ThinLinkBitcode,
915 &module.name,
916 cgcx.invocation_temp.as_deref(),
917 );
918 if config.emit_thin_lto_summary
919 && let Some(thin_link_bitcode_filename) = bc_summary_out.file_name()
920 {
921 let summary_data = thin_lto_buffer.thin_link_data();
922 cgcx.prof.artifact_size(
923 "llvm_bitcode_summary",
924 thin_link_bitcode_filename.to_string_lossy(),
925 summary_data.len() as u64,
926 );
927 let _timer = cgcx.prof.generic_activity_with_arg(
928 "LLVM_module_codegen_emit_bitcode_summary",
929 &*module.name,
930 );
931 if let Err(err) = fs::write(&bc_summary_out, summary_data) {
932 dcx.emit_err(WriteBytecode { path: &bc_summary_out, err });
933 }
934 }
935 }
936 }
937}
938
939pub(crate) fn codegen(
940 cgcx: &CodegenContext<LlvmCodegenBackend>,
941 module: ModuleCodegen<ModuleLlvm>,
942 config: &ModuleConfig,
943) -> CompiledModule {
944 let dcx = cgcx.create_dcx();
945 let dcx = dcx.handle();
946
947 let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_codegen", &*module.name);
948 {
949 let llmod = module.module_llvm.llmod();
950 let llcx = &*module.module_llvm.llcx;
951 let tm = &*module.module_llvm.tm;
952 let _handlers =
953 DiagnosticHandlers::new(cgcx, dcx, llcx, &module, CodegenDiagnosticsStage::Codegen);
954
955 if cgcx.msvc_imps_needed {
956 create_msvc_imps(cgcx, llcx, llmod);
957 }
958
959 let bc_out = cgcx.output_filenames.temp_path_for_cgu(
964 OutputType::Bitcode,
965 &module.name,
966 cgcx.invocation_temp.as_deref(),
967 );
968 let obj_out = cgcx.output_filenames.temp_path_for_cgu(
969 OutputType::Object,
970 &module.name,
971 cgcx.invocation_temp.as_deref(),
972 );
973
974 if config.bitcode_needed() {
975 if config.emit_bc || config.emit_obj == EmitObj::Bitcode {
976 let thin = {
977 let _timer = cgcx.prof.generic_activity_with_arg(
978 "LLVM_module_codegen_make_bitcode",
979 &*module.name,
980 );
981 ThinBuffer::new(llmod, config.emit_thin_lto)
982 };
983 let data = thin.data();
984 let _timer = cgcx
985 .prof
986 .generic_activity_with_arg("LLVM_module_codegen_emit_bitcode", &*module.name);
987 if let Some(bitcode_filename) = bc_out.file_name() {
988 cgcx.prof.artifact_size(
989 "llvm_bitcode",
990 bitcode_filename.to_string_lossy(),
991 data.len() as u64,
992 );
993 }
994 if let Err(err) = fs::write(&bc_out, data) {
995 dcx.emit_err(WriteBytecode { path: &bc_out, err });
996 }
997 }
998
999 if config.embed_bitcode() && module.kind == ModuleKind::Regular {
1000 let _timer = cgcx
1001 .prof
1002 .generic_activity_with_arg("LLVM_module_codegen_embed_bitcode", &*module.name);
1003 let thin_bc =
1004 module.thin_lto_buffer.as_deref().expect("cannot find embedded bitcode");
1005 embed_bitcode(cgcx, llcx, llmod, &thin_bc);
1006 }
1007 }
1008
1009 if config.emit_ir {
1010 let _timer =
1011 cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_ir", &*module.name);
1012 let out = cgcx.output_filenames.temp_path_for_cgu(
1013 OutputType::LlvmAssembly,
1014 &module.name,
1015 cgcx.invocation_temp.as_deref(),
1016 );
1017 let out_c = path_to_c_string(&out);
1018
1019 extern "C" fn demangle_callback(
1020 input_ptr: *const c_char,
1021 input_len: size_t,
1022 output_ptr: *mut c_char,
1023 output_len: size_t,
1024 ) -> size_t {
1025 let input =
1026 unsafe { slice::from_raw_parts(input_ptr as *const u8, input_len as usize) };
1027
1028 let Ok(input) = str::from_utf8(input) else { return 0 };
1029
1030 let output = unsafe {
1031 slice::from_raw_parts_mut(output_ptr as *mut u8, output_len as usize)
1032 };
1033 let mut cursor = io::Cursor::new(output);
1034
1035 let Ok(demangled) = rustc_demangle::try_demangle(input) else { return 0 };
1036
1037 if write!(cursor, "{demangled:#}").is_err() {
1038 return 0;
1040 }
1041
1042 cursor.position() as size_t
1043 }
1044
1045 let result =
1046 unsafe { llvm::LLVMRustPrintModule(llmod, out_c.as_ptr(), demangle_callback) };
1047
1048 if result == llvm::LLVMRustResult::Success {
1049 record_artifact_size(&cgcx.prof, "llvm_ir", &out);
1050 }
1051
1052 result
1053 .into_result()
1054 .unwrap_or_else(|()| llvm_err(dcx, LlvmError::WriteIr { path: &out }));
1055 }
1056
1057 if config.emit_asm {
1058 let _timer =
1059 cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_asm", &*module.name);
1060 let path = cgcx.output_filenames.temp_path_for_cgu(
1061 OutputType::Assembly,
1062 &module.name,
1063 cgcx.invocation_temp.as_deref(),
1064 );
1065
1066 let llmod = if let EmitObj::ObjectCode(_) = config.emit_obj {
1071 llvm::LLVMCloneModule(llmod)
1072 } else {
1073 llmod
1074 };
1075 write_output_file(
1076 dcx,
1077 tm.raw(),
1078 config.no_builtins,
1079 llmod,
1080 &path,
1081 None,
1082 llvm::FileType::AssemblyFile,
1083 &cgcx.prof,
1084 config.verify_llvm_ir,
1085 );
1086 }
1087
1088 match config.emit_obj {
1089 EmitObj::ObjectCode(_) => {
1090 let _timer = cgcx
1091 .prof
1092 .generic_activity_with_arg("LLVM_module_codegen_emit_obj", &*module.name);
1093
1094 let dwo_out = cgcx
1095 .output_filenames
1096 .temp_path_dwo_for_cgu(&module.name, cgcx.invocation_temp.as_deref());
1097 let dwo_out = match (cgcx.split_debuginfo, cgcx.split_dwarf_kind) {
1098 (SplitDebuginfo::Off, _) => None,
1100 _ if !cgcx.target_can_use_split_dwarf => None,
1103 (_, SplitDwarfKind::Single) => None,
1106 (_, SplitDwarfKind::Split) => Some(dwo_out.as_path()),
1109 };
1110
1111 write_output_file(
1112 dcx,
1113 tm.raw(),
1114 config.no_builtins,
1115 llmod,
1116 &obj_out,
1117 dwo_out,
1118 llvm::FileType::ObjectFile,
1119 &cgcx.prof,
1120 config.verify_llvm_ir,
1121 );
1122 }
1123
1124 EmitObj::Bitcode => {
1125 debug!("copying bitcode {:?} to obj {:?}", bc_out, obj_out);
1126 if let Err(err) = link_or_copy(&bc_out, &obj_out) {
1127 dcx.emit_err(CopyBitcode { err });
1128 }
1129
1130 if !config.emit_bc {
1131 debug!("removing_bitcode {:?}", bc_out);
1132 ensure_removed(dcx, &bc_out);
1133 }
1134 }
1135
1136 EmitObj::None => {}
1137 }
1138
1139 record_llvm_cgu_instructions_stats(&cgcx.prof, llmod);
1140 }
1141
1142 let dwarf_object_emitted = matches!(config.emit_obj, EmitObj::ObjectCode(_))
1151 && cgcx.target_can_use_split_dwarf
1152 && cgcx.split_debuginfo != SplitDebuginfo::Off
1153 && cgcx.split_dwarf_kind == SplitDwarfKind::Split;
1154 module.into_compiled_module(
1155 config.emit_obj != EmitObj::None,
1156 dwarf_object_emitted,
1157 config.emit_bc,
1158 config.emit_asm,
1159 config.emit_ir,
1160 &cgcx.output_filenames,
1161 cgcx.invocation_temp.as_deref(),
1162 )
1163}
1164
1165fn create_section_with_flags_asm(section_name: &str, section_flags: &str, data: &[u8]) -> Vec<u8> {
1166 let mut asm = format!(".section {section_name},\"{section_flags}\"\n").into_bytes();
1167 asm.extend_from_slice(b".ascii \"");
1168 asm.reserve(data.len());
1169 for &byte in data {
1170 if byte == b'\\' || byte == b'"' {
1171 asm.push(b'\\');
1172 asm.push(byte);
1173 } else if byte < 0x20 || byte >= 0x80 {
1174 asm.push(b'\\');
1177 asm.push(b'0' + ((byte >> 6) & 0x7));
1178 asm.push(b'0' + ((byte >> 3) & 0x7));
1179 asm.push(b'0' + ((byte >> 0) & 0x7));
1180 } else {
1181 asm.push(byte);
1182 }
1183 }
1184 asm.extend_from_slice(b"\"\n");
1185 asm
1186}
1187
1188pub(crate) fn bitcode_section_name(cgcx: &CodegenContext<LlvmCodegenBackend>) -> &'static CStr {
1189 if cgcx.target_is_like_darwin {
1190 c"__LLVM,__bitcode"
1191 } else if cgcx.target_is_like_aix {
1192 c".ipa"
1193 } else {
1194 c".llvmbc"
1195 }
1196}
1197
1198fn embed_bitcode(
1200 cgcx: &CodegenContext<LlvmCodegenBackend>,
1201 llcx: &llvm::Context,
1202 llmod: &llvm::Module,
1203 bitcode: &[u8],
1204) {
1205 if cgcx.target_is_like_darwin
1244 || cgcx.target_is_like_aix
1245 || cgcx.target_arch == "wasm32"
1246 || cgcx.target_arch == "wasm64"
1247 {
1248 let llconst = common::bytes_in_context(llcx, bitcode);
1250 let llglobal = llvm::add_global(llmod, common::val_ty(llconst), c"rustc.embedded.module");
1251 llvm::set_initializer(llglobal, llconst);
1252
1253 llvm::set_section(llglobal, bitcode_section_name(cgcx));
1254 llvm::set_linkage(llglobal, llvm::Linkage::PrivateLinkage);
1255 llvm::LLVMSetGlobalConstant(llglobal, llvm::TRUE);
1256
1257 let llconst = common::bytes_in_context(llcx, &[]);
1258 let llglobal = llvm::add_global(llmod, common::val_ty(llconst), c"rustc.embedded.cmdline");
1259 llvm::set_initializer(llglobal, llconst);
1260 let section = if cgcx.target_is_like_darwin {
1261 c"__LLVM,__cmdline"
1262 } else if cgcx.target_is_like_aix {
1263 c".info"
1264 } else {
1265 c".llvmcmd"
1266 };
1267 llvm::set_section(llglobal, section);
1268 llvm::set_linkage(llglobal, llvm::Linkage::PrivateLinkage);
1269 } else {
1270 let section_flags = if cgcx.is_pe_coff { "n" } else { "e" };
1272 let asm = create_section_with_flags_asm(".llvmbc", section_flags, bitcode);
1273 llvm::append_module_inline_asm(llmod, &asm);
1274 let asm = create_section_with_flags_asm(".llvmcmd", section_flags, &[]);
1275 llvm::append_module_inline_asm(llmod, &asm);
1276 }
1277}
1278
1279fn create_msvc_imps(
1285 cgcx: &CodegenContext<LlvmCodegenBackend>,
1286 llcx: &llvm::Context,
1287 llmod: &llvm::Module,
1288) {
1289 if !cgcx.msvc_imps_needed {
1290 return;
1291 }
1292 let prefix = if cgcx.target_arch == "x86" { "\x01__imp__" } else { "\x01__imp_" };
1297
1298 let ptr_ty = llvm_type_ptr(llcx);
1299 let globals = base::iter_globals(llmod)
1300 .filter(|&val| {
1301 llvm::get_linkage(val) == llvm::Linkage::ExternalLinkage && !llvm::is_declaration(val)
1302 })
1303 .filter_map(|val| {
1304 let name = llvm::get_value_name(val);
1306 if ignored(&name) { None } else { Some((val, name)) }
1307 })
1308 .map(move |(val, name)| {
1309 let mut imp_name = prefix.as_bytes().to_vec();
1310 imp_name.extend(name);
1311 let imp_name = CString::new(imp_name).unwrap();
1312 (imp_name, val)
1313 })
1314 .collect::<Vec<_>>();
1315
1316 for (imp_name, val) in globals {
1317 let imp = llvm::add_global(llmod, ptr_ty, &imp_name);
1318
1319 llvm::set_initializer(imp, val);
1320 llvm::set_linkage(imp, llvm::Linkage::ExternalLinkage);
1321 }
1322
1323 fn ignored(symbol_name: &[u8]) -> bool {
1325 symbol_name.starts_with(b"__llvm_profile_")
1327 }
1328}
1329
1330fn record_artifact_size(
1331 self_profiler_ref: &SelfProfilerRef,
1332 artifact_kind: &'static str,
1333 path: &Path,
1334) {
1335 if !self_profiler_ref.enabled() {
1337 return;
1338 }
1339
1340 if let Some(artifact_name) = path.file_name() {
1341 let file_size = std::fs::metadata(path).map(|m| m.len()).unwrap_or(0);
1342 self_profiler_ref.artifact_size(artifact_kind, artifact_name.to_string_lossy(), file_size);
1343 }
1344}
1345
1346fn record_llvm_cgu_instructions_stats(prof: &SelfProfilerRef, llmod: &llvm::Module) {
1347 if !prof.enabled() {
1348 return;
1349 }
1350
1351 let raw_stats =
1352 llvm::build_string(|s| unsafe { llvm::LLVMRustModuleInstructionStats(llmod, s) })
1353 .expect("cannot get module instruction stats");
1354
1355 #[derive(serde::Deserialize)]
1356 struct InstructionsStats {
1357 module: String,
1358 total: u64,
1359 }
1360
1361 let InstructionsStats { module, total } =
1362 serde_json::from_str(&raw_stats).expect("cannot parse llvm cgu instructions stats");
1363 prof.artifact_size("cgu_instructions", module, total);
1364}