1use std::ffi::{CStr, CString};
2use std::io::{self, Write};
3use std::path::{Path, PathBuf};
4use std::ptr::null_mut;
5use std::sync::Arc;
6use std::{fs, slice, str};
7
8use libc::{c_char, c_int, c_void, size_t};
9use rustc_codegen_ssa::back::link::ensure_removed;
10use rustc_codegen_ssa::back::versioned_llvm_target;
11use rustc_codegen_ssa::back::write::{
12 BitcodeSection, CodegenContext, EmitObj, InlineAsmError, ModuleConfig, SharedEmitter,
13 TargetMachineFactoryConfig, TargetMachineFactoryFn,
14};
15use rustc_codegen_ssa::base::wants_wasm_eh;
16use rustc_codegen_ssa::common::TypeKind;
17use rustc_codegen_ssa::traits::*;
18use rustc_codegen_ssa::{CompiledModule, ModuleCodegen, ModuleKind};
19use rustc_data_structures::profiling::SelfProfilerRef;
20use rustc_data_structures::small_c_str::SmallCStr;
21use rustc_errors::{DiagCtxt, DiagCtxtHandle, Level};
22use rustc_fs_util::{link_or_copy, path_to_c_string};
23use rustc_middle::ty::TyCtxt;
24use rustc_session::Session;
25use rustc_session::config::{self, Lto, OutputType, Passes, SplitDwarfKind, SwitchWithOptPath};
26use rustc_span::{BytePos, InnerSpan, Pos, RemapPathScopeComponents, SpanData, SyntaxContext, sym};
27use rustc_target::spec::{
28 Arch, CodeModel, FloatAbi, RelocModel, SanitizerSet, SplitDebuginfo, TlsModel,
29};
30use tracing::{debug, trace};
31
32use crate::back::lto::ThinBuffer;
33use crate::back::owned_target_machine::OwnedTargetMachine;
34use crate::back::profiling::{
35 LlvmSelfProfiler, selfprofile_after_pass_callback, selfprofile_before_pass_callback,
36};
37use crate::builder::SBuilder;
38use crate::builder::gpu_offload::scalar_width;
39use crate::common::AsCCharPtr;
40use crate::errors::{
41 CopyBitcode, FromLlvmDiag, FromLlvmOptimizationDiag, LlvmError, UnknownCompression,
42 WithLlvmError, WriteBytecode,
43};
44use crate::llvm::diagnostic::OptimizationDiagnosticKind::*;
45use crate::llvm::{self, DiagnosticInfo};
46use crate::type_::llvm_type_ptr;
47use crate::{LlvmCodegenBackend, ModuleLlvm, SimpleCx, attributes, base, common, llvm_util};
48
49pub(crate) fn llvm_err<'a>(dcx: DiagCtxtHandle<'_>, err: LlvmError<'a>) -> ! {
50 match llvm::last_error() {
51 Some(llvm_err) => dcx.emit_fatal(WithLlvmError(err, llvm_err)),
52 None => dcx.emit_fatal(err),
53 }
54}
55
56fn write_output_file<'ll>(
57 dcx: DiagCtxtHandle<'_>,
58 target: &'ll llvm::TargetMachine,
59 no_builtins: bool,
60 m: &'ll llvm::Module,
61 output: &Path,
62 dwo_output: Option<&Path>,
63 file_type: llvm::FileType,
64 self_profiler_ref: &SelfProfilerRef,
65 verify_llvm_ir: bool,
66) {
67 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/write.rs:67",
"rustc_codegen_llvm::back::write", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/write.rs"),
::tracing_core::__macro_support::Option::Some(67u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::write"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("write_output_file output={0:?} dwo_output={1:?}",
output, dwo_output) as &dyn Value))])
});
} else { ; }
};debug!("write_output_file output={:?} dwo_output={:?}", output, dwo_output);
68 let output_c = path_to_c_string(output);
69 let dwo_output_c;
70 let dwo_output_ptr = if let Some(dwo_output) = dwo_output {
71 dwo_output_c = path_to_c_string(dwo_output);
72 dwo_output_c.as_ptr()
73 } else {
74 std::ptr::null()
75 };
76 let result = unsafe {
77 let pm = llvm::LLVMCreatePassManager();
78 llvm::LLVMAddAnalysisPasses(target, pm);
79 llvm::LLVMRustAddLibraryInfo(target, pm, m, no_builtins);
80 llvm::LLVMRustWriteOutputFile(
81 target,
82 pm,
83 m,
84 output_c.as_ptr(),
85 dwo_output_ptr,
86 file_type,
87 verify_llvm_ir,
88 )
89 };
90
91 if result == llvm::LLVMRustResult::Success {
93 let artifact_kind = match file_type {
94 llvm::FileType::ObjectFile => "object_file",
95 llvm::FileType::AssemblyFile => "assembly_file",
96 };
97 record_artifact_size(self_profiler_ref, artifact_kind, output);
98 if let Some(dwo_file) = dwo_output {
99 record_artifact_size(self_profiler_ref, "dwo_file", dwo_file);
100 }
101 }
102
103 result.into_result().unwrap_or_else(|()| llvm_err(dcx, LlvmError::WriteOutput { path: output }))
104}
105
106pub(crate) fn create_informational_target_machine(
107 sess: &Session,
108 only_base_features: bool,
109) -> OwnedTargetMachine {
110 let config = TargetMachineFactoryConfig { split_dwarf_file: None, output_obj_file: None };
111 let features = llvm_util::global_llvm_features(sess, only_base_features);
114 target_machine_factory(sess, config::OptLevel::No, &features)(config)
115 .unwrap_or_else(|err| llvm_err(sess.dcx(), err))
116}
117
118pub(crate) fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> OwnedTargetMachine {
119 let split_dwarf_file = if tcx.sess.target_can_use_split_dwarf() {
120 tcx.output_filenames(()).split_dwarf_path(
121 tcx.sess.split_debuginfo(),
122 tcx.sess.opts.unstable_opts.split_dwarf_kind,
123 mod_name,
124 tcx.sess.invocation_temp.as_deref(),
125 )
126 } else {
127 None
128 };
129
130 let output_obj_file = Some(tcx.output_filenames(()).temp_path_for_cgu(
131 OutputType::Object,
132 mod_name,
133 tcx.sess.invocation_temp.as_deref(),
134 ));
135 let config = TargetMachineFactoryConfig { split_dwarf_file, output_obj_file };
136
137 target_machine_factory(
138 tcx.sess,
139 tcx.backend_optimization_level(()),
140 tcx.global_backend_features(()),
141 )(config)
142 .unwrap_or_else(|err| llvm_err(tcx.dcx(), err))
143}
144
145fn to_llvm_opt_settings(cfg: config::OptLevel) -> (llvm::CodeGenOptLevel, llvm::CodeGenOptSize) {
146 use self::config::OptLevel::*;
147 match cfg {
148 No => (llvm::CodeGenOptLevel::None, llvm::CodeGenOptSizeNone),
149 Less => (llvm::CodeGenOptLevel::Less, llvm::CodeGenOptSizeNone),
150 More => (llvm::CodeGenOptLevel::Default, llvm::CodeGenOptSizeNone),
151 Aggressive => (llvm::CodeGenOptLevel::Aggressive, llvm::CodeGenOptSizeNone),
152 Size => (llvm::CodeGenOptLevel::Default, llvm::CodeGenOptSizeDefault),
153 SizeMin => (llvm::CodeGenOptLevel::Default, llvm::CodeGenOptSizeAggressive),
154 }
155}
156
157fn to_pass_builder_opt_level(cfg: config::OptLevel) -> llvm::PassBuilderOptLevel {
158 use config::OptLevel::*;
159 match cfg {
160 No => llvm::PassBuilderOptLevel::O0,
161 Less => llvm::PassBuilderOptLevel::O1,
162 More => llvm::PassBuilderOptLevel::O2,
163 Aggressive => llvm::PassBuilderOptLevel::O3,
164 Size => llvm::PassBuilderOptLevel::Os,
165 SizeMin => llvm::PassBuilderOptLevel::Oz,
166 }
167}
168
169fn to_llvm_relocation_model(relocation_model: RelocModel) -> llvm::RelocModel {
170 match relocation_model {
171 RelocModel::Static => llvm::RelocModel::Static,
172 RelocModel::Pic | RelocModel::Pie => llvm::RelocModel::PIC,
175 RelocModel::DynamicNoPic => llvm::RelocModel::DynamicNoPic,
176 RelocModel::Ropi => llvm::RelocModel::ROPI,
177 RelocModel::Rwpi => llvm::RelocModel::RWPI,
178 RelocModel::RopiRwpi => llvm::RelocModel::ROPI_RWPI,
179 }
180}
181
182pub(crate) fn to_llvm_code_model(code_model: Option<CodeModel>) -> llvm::CodeModel {
183 match code_model {
184 Some(CodeModel::Tiny) => llvm::CodeModel::Tiny,
185 Some(CodeModel::Small) => llvm::CodeModel::Small,
186 Some(CodeModel::Kernel) => llvm::CodeModel::Kernel,
187 Some(CodeModel::Medium) => llvm::CodeModel::Medium,
188 Some(CodeModel::Large) => llvm::CodeModel::Large,
189 None => llvm::CodeModel::None,
190 }
191}
192
193fn to_llvm_float_abi(float_abi: Option<FloatAbi>) -> llvm::FloatAbi {
194 match float_abi {
195 None => llvm::FloatAbi::Default,
196 Some(FloatAbi::Soft) => llvm::FloatAbi::Soft,
197 Some(FloatAbi::Hard) => llvm::FloatAbi::Hard,
198 }
199}
200
201pub(crate) fn target_machine_factory(
202 sess: &Session,
203 optlvl: config::OptLevel,
204 target_features: &[String],
205) -> TargetMachineFactoryFn<LlvmCodegenBackend> {
206 let _prof_timer = sess.prof.generic_activity("target_machine_factory");
208
209 let reloc_model = to_llvm_relocation_model(sess.relocation_model());
210
211 let (opt_level, _) = to_llvm_opt_settings(optlvl);
212 let float_abi = if sess.target.arch == Arch::Arm && sess.opts.cg.soft_float {
213 llvm::FloatAbi::Soft
214 } else {
215 to_llvm_float_abi(sess.target.llvm_floatabi)
218 };
219
220 let ffunction_sections =
221 sess.opts.unstable_opts.function_sections.unwrap_or(sess.target.function_sections);
222 let fdata_sections = ffunction_sections;
223 let funique_section_names = !sess.opts.unstable_opts.no_unique_section_names;
224
225 let code_model = to_llvm_code_model(sess.code_model());
226
227 let mut singlethread = sess.target.singlethread;
228
229 if singlethread && sess.target.is_like_wasm && sess.target_features.contains(&sym::atomics) {
233 singlethread = false;
234 }
235
236 let triple = SmallCStr::new(&versioned_llvm_target(sess));
237 let cpu = SmallCStr::new(llvm_util::target_cpu(sess));
238 let features = CString::new(target_features.join(",")).unwrap();
239 let abi = SmallCStr::new(&sess.target.llvm_abiname);
240 let trap_unreachable =
241 sess.opts.unstable_opts.trap_unreachable.unwrap_or(sess.target.trap_unreachable);
242 let emit_stack_size_section = sess.opts.unstable_opts.emit_stack_sizes;
243
244 let verbose_asm = sess.opts.unstable_opts.verbose_asm;
245 let relax_elf_relocations =
246 sess.opts.unstable_opts.relax_elf_relocations.unwrap_or(sess.target.relax_elf_relocations);
247
248 let use_init_array =
249 !sess.opts.unstable_opts.use_ctors_section.unwrap_or(sess.target.use_ctors_section);
250
251 let path_mapping = sess.source_map().path_mapping().clone();
252 let working_dir = sess.source_map().working_dir().clone();
253
254 let use_emulated_tls = #[allow(non_exhaustive_omitted_patterns)] match sess.tls_model() {
TlsModel::Emulated => true,
_ => false,
}matches!(sess.tls_model(), TlsModel::Emulated);
255
256 let debuginfo_compression = match sess.opts.unstable_opts.debuginfo_compression {
257 config::DebugInfoCompression::None => llvm::CompressionKind::None,
258 config::DebugInfoCompression::Zlib => {
259 if llvm::LLVMRustLLVMHasZlibCompression() {
260 llvm::CompressionKind::Zlib
261 } else {
262 sess.dcx().emit_warn(UnknownCompression { algorithm: "zlib" });
263 llvm::CompressionKind::None
264 }
265 }
266 config::DebugInfoCompression::Zstd => {
267 if llvm::LLVMRustLLVMHasZstdCompression() {
268 llvm::CompressionKind::Zstd
269 } else {
270 sess.dcx().emit_warn(UnknownCompression { algorithm: "zstd" });
271 llvm::CompressionKind::None
272 }
273 }
274 };
275
276 let use_wasm_eh = wants_wasm_eh(sess);
277
278 let large_data_threshold = sess.opts.unstable_opts.large_data_threshold.unwrap_or(0);
279
280 let prof = SelfProfilerRef::clone(&sess.prof);
281 Arc::new(move |config: TargetMachineFactoryConfig| {
282 let _prof_timer = prof.generic_activity("target_machine_factory_inner");
284
285 let path_to_cstring_helper = |path: Option<PathBuf>| -> CString {
286 let path = path.unwrap_or_default();
287 let path = path_mapping
288 .to_real_filename(&working_dir, path)
289 .path(RemapPathScopeComponents::DEBUGINFO)
290 .to_string_lossy()
291 .into_owned();
292 CString::new(path).unwrap()
293 };
294
295 let split_dwarf_file = path_to_cstring_helper(config.split_dwarf_file);
296 let output_obj_file = path_to_cstring_helper(config.output_obj_file);
297
298 OwnedTargetMachine::new(
299 &triple,
300 &cpu,
301 &features,
302 &abi,
303 code_model,
304 reloc_model,
305 opt_level,
306 float_abi,
307 ffunction_sections,
308 fdata_sections,
309 funique_section_names,
310 trap_unreachable,
311 singlethread,
312 verbose_asm,
313 emit_stack_size_section,
314 relax_elf_relocations,
315 use_init_array,
316 &split_dwarf_file,
317 &output_obj_file,
318 debuginfo_compression,
319 use_emulated_tls,
320 use_wasm_eh,
321 large_data_threshold,
322 )
323 })
324}
325
326pub(crate) fn save_temp_bitcode(
327 cgcx: &CodegenContext<LlvmCodegenBackend>,
328 module: &ModuleCodegen<ModuleLlvm>,
329 name: &str,
330) {
331 if !cgcx.save_temps {
332 return;
333 }
334 let ext = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}.bc", name))
})format!("{name}.bc");
335 let path = cgcx.output_filenames.temp_path_ext_for_cgu(
336 &ext,
337 &module.name,
338 cgcx.invocation_temp.as_deref(),
339 );
340 write_bitcode_to_file(module, &path)
341}
342
343fn write_bitcode_to_file(module: &ModuleCodegen<ModuleLlvm>, path: &Path) {
344 unsafe {
345 let path = path_to_c_string(&path);
346 let llmod = module.module_llvm.llmod();
347 llvm::LLVMWriteBitcodeToFile(llmod, path.as_ptr());
348 }
349}
350
351pub(crate) enum CodegenDiagnosticsStage {
353 Opt,
355 LTO,
357 Codegen,
359}
360
361pub(crate) struct DiagnosticHandlers<'a> {
362 data: *mut (&'a CodegenContext<LlvmCodegenBackend>, &'a SharedEmitter),
363 llcx: &'a llvm::Context,
364 old_handler: Option<&'a llvm::DiagnosticHandler>,
365}
366
367impl<'a> DiagnosticHandlers<'a> {
368 pub(crate) fn new(
369 cgcx: &'a CodegenContext<LlvmCodegenBackend>,
370 shared_emitter: &'a SharedEmitter,
371 llcx: &'a llvm::Context,
372 module: &ModuleCodegen<ModuleLlvm>,
373 stage: CodegenDiagnosticsStage,
374 ) -> Self {
375 let remark_passes_all: bool;
376 let remark_passes: Vec<CString>;
377 match &cgcx.remark {
378 Passes::All => {
379 remark_passes_all = true;
380 remark_passes = Vec::new();
381 }
382 Passes::Some(passes) => {
383 remark_passes_all = false;
384 remark_passes =
385 passes.iter().map(|name| CString::new(name.as_str()).unwrap()).collect();
386 }
387 };
388 let remark_passes: Vec<*const c_char> =
389 remark_passes.iter().map(|name: &CString| name.as_ptr()).collect();
390 let remark_file = cgcx
391 .remark_dir
392 .as_ref()
393 .map(|dir| {
395 let stage_suffix = match stage {
396 CodegenDiagnosticsStage::Codegen => "codegen",
397 CodegenDiagnosticsStage::Opt => "opt",
398 CodegenDiagnosticsStage::LTO => "lto",
399 };
400 dir.join(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}.{1}.opt.yaml", module.name,
stage_suffix))
})format!("{}.{stage_suffix}.opt.yaml", module.name))
401 })
402 .and_then(|dir| dir.to_str().and_then(|p| CString::new(p).ok()));
403
404 let pgo_available = cgcx.module_config.pgo_use.is_some();
405 let data = Box::into_raw(Box::new((cgcx, shared_emitter)));
406 unsafe {
407 let old_handler = llvm::LLVMRustContextGetDiagnosticHandler(llcx);
408 llvm::LLVMRustContextConfigureDiagnosticHandler(
409 llcx,
410 diagnostic_handler,
411 data.cast(),
412 remark_passes_all,
413 remark_passes.as_ptr(),
414 remark_passes.len(),
415 remark_file.as_ref().map(|dir| dir.as_ptr()).unwrap_or(std::ptr::null()),
418 pgo_available,
419 );
420 DiagnosticHandlers { data, llcx, old_handler }
421 }
422 }
423}
424
425impl<'a> Drop for DiagnosticHandlers<'a> {
426 fn drop(&mut self) {
427 unsafe {
428 llvm::LLVMRustContextSetDiagnosticHandler(self.llcx, self.old_handler);
429 drop(Box::from_raw(self.data));
430 }
431 }
432}
433
434fn report_inline_asm(
435 cgcx: &CodegenContext<LlvmCodegenBackend>,
436 msg: String,
437 level: llvm::DiagnosticLevel,
438 cookie: u64,
439 source: Option<(String, Vec<InnerSpan>)>,
440) -> InlineAsmError {
441 let span = if cookie == 0 || #[allow(non_exhaustive_omitted_patterns)] match cgcx.lto {
Lto::Fat | Lto::Thin => true,
_ => false,
}matches!(cgcx.lto, Lto::Fat | Lto::Thin) {
445 SpanData::default()
446 } else {
447 SpanData {
448 lo: BytePos::from_u32(cookie as u32),
449 hi: BytePos::from_u32((cookie >> 32) as u32),
450 ctxt: SyntaxContext::root(),
451 parent: None,
452 }
453 };
454 let level = match level {
455 llvm::DiagnosticLevel::Error => Level::Error,
456 llvm::DiagnosticLevel::Warning => Level::Warning,
457 llvm::DiagnosticLevel::Note | llvm::DiagnosticLevel::Remark => Level::Note,
458 };
459 let msg = msg.trim_prefix("error: ").to_string();
460 InlineAsmError { span, msg, level, source }
461}
462
463unsafe extern "C" fn diagnostic_handler(info: &DiagnosticInfo, user: *mut c_void) {
464 if user.is_null() {
465 return;
466 }
467 let (cgcx, shared_emitter) =
468 unsafe { *(user as *const (&CodegenContext<LlvmCodegenBackend>, &SharedEmitter)) };
469
470 let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
471 let dcx = dcx.handle();
472
473 match unsafe { llvm::diagnostic::Diagnostic::unpack(info) } {
474 llvm::diagnostic::InlineAsm(inline) => {
475 shared_emitter.inline_asm_error(report_inline_asm(
477 cgcx,
478 inline.message,
479 inline.level,
480 inline.cookie,
481 inline.source,
482 ));
483 }
484
485 llvm::diagnostic::Optimization(opt) => {
486 dcx.emit_note(FromLlvmOptimizationDiag {
487 filename: &opt.filename,
488 line: opt.line,
489 column: opt.column,
490 pass_name: &opt.pass_name,
491 kind: match opt.kind {
492 OptimizationRemark => "success",
493 OptimizationMissed | OptimizationFailure => "missed",
494 OptimizationAnalysis
495 | OptimizationAnalysisFPCommute
496 | OptimizationAnalysisAliasing => "analysis",
497 OptimizationRemarkOther => "other",
498 },
499 message: &opt.message,
500 });
501 }
502 llvm::diagnostic::PGO(diagnostic_ref) | llvm::diagnostic::Linker(diagnostic_ref) => {
503 let message = llvm::build_string(|s| unsafe {
504 llvm::LLVMRustWriteDiagnosticInfoToString(diagnostic_ref, s)
505 })
506 .expect("non-UTF8 diagnostic");
507 dcx.emit_warn(FromLlvmDiag { message });
508 }
509 llvm::diagnostic::Unsupported(diagnostic_ref) => {
510 let message = llvm::build_string(|s| unsafe {
511 llvm::LLVMRustWriteDiagnosticInfoToString(diagnostic_ref, s)
512 })
513 .expect("non-UTF8 diagnostic");
514 dcx.emit_err(FromLlvmDiag { message });
515 }
516 llvm::diagnostic::UnknownDiagnostic(..) => {}
517 }
518}
519
520fn get_pgo_gen_path(config: &ModuleConfig) -> Option<CString> {
521 match config.pgo_gen {
522 SwitchWithOptPath::Enabled(ref opt_dir_path) => {
523 let path = if let Some(dir_path) = opt_dir_path {
524 dir_path.join("default_%m.profraw")
525 } else {
526 PathBuf::from("default_%m.profraw")
527 };
528
529 Some(CString::new(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}", path.display()))
})format!("{}", path.display())).unwrap())
530 }
531 SwitchWithOptPath::Disabled => None,
532 }
533}
534
535fn get_pgo_use_path(config: &ModuleConfig) -> Option<CString> {
536 config
537 .pgo_use
538 .as_ref()
539 .map(|path_buf| CString::new(path_buf.to_string_lossy().as_bytes()).unwrap())
540}
541
542fn get_pgo_sample_use_path(config: &ModuleConfig) -> Option<CString> {
543 config
544 .pgo_sample_use
545 .as_ref()
546 .map(|path_buf| CString::new(path_buf.to_string_lossy().as_bytes()).unwrap())
547}
548
549fn get_instr_profile_output_path(config: &ModuleConfig) -> Option<CString> {
550 config.instrument_coverage.then(|| c"default_%m_%p.profraw".to_owned())
551}
552
553#[derive(#[automatically_derived]
impl ::core::fmt::Debug for AutodiffStage {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
AutodiffStage::PreAD => "PreAD",
AutodiffStage::DuringAD => "DuringAD",
AutodiffStage::PostAD => "PostAD",
})
}
}Debug, #[automatically_derived]
impl ::core::cmp::Eq for AutodiffStage {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for AutodiffStage {
#[inline]
fn eq(&self, other: &AutodiffStage) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq)]
557pub(crate) enum AutodiffStage {
558 PreAD,
559 DuringAD,
560 PostAD,
561}
562
563pub(crate) unsafe fn llvm_optimize(
564 cgcx: &CodegenContext<LlvmCodegenBackend>,
565 dcx: DiagCtxtHandle<'_>,
566 module: &ModuleCodegen<ModuleLlvm>,
567 thin_lto_buffer: Option<&mut *mut llvm::ThinLTOBuffer>,
568 config: &ModuleConfig,
569 opt_level: config::OptLevel,
570 opt_stage: llvm::OptStage,
571 autodiff_stage: AutodiffStage,
572) {
573 let consider_ad = config.autodiff.contains(&config::AutoDiff::Enable);
582 let run_enzyme = autodiff_stage == AutodiffStage::DuringAD;
583 let print_before_enzyme = config.autodiff.contains(&config::AutoDiff::PrintModBefore);
584 let print_after_enzyme = config.autodiff.contains(&config::AutoDiff::PrintModAfter);
585 let print_passes = config.autodiff.contains(&config::AutoDiff::PrintPasses);
586 let merge_functions;
587 let unroll_loops;
588 let vectorize_slp;
589 let vectorize_loop;
590
591 if consider_ad && autodiff_stage != AutodiffStage::PostAD {
600 merge_functions = false;
601 unroll_loops = false;
602 vectorize_slp = false;
603 vectorize_loop = false;
604 } else {
605 unroll_loops =
606 opt_level != config::OptLevel::Size && opt_level != config::OptLevel::SizeMin;
607 merge_functions = config.merge_functions;
608 vectorize_slp = config.vectorize_slp;
609 vectorize_loop = config.vectorize_loop;
610 }
611 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/write.rs:611",
"rustc_codegen_llvm::back::write", ::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/write.rs"),
::tracing_core::__macro_support::Option::Some(611u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::write"),
::tracing_core::field::FieldSet::new(&["unroll_loops",
"vectorize_slp", "vectorize_loop", "run_enzyme"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&unroll_loops)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&vectorize_slp)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&vectorize_loop)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&run_enzyme)
as &dyn Value))])
});
} else { ; }
};trace!(?unroll_loops, ?vectorize_slp, ?vectorize_loop, ?run_enzyme);
612 if thin_lto_buffer.is_some() {
613 if !#[allow(non_exhaustive_omitted_patterns)] match opt_stage {
llvm::OptStage::PreLinkNoLTO | llvm::OptStage::PreLinkFatLTO |
llvm::OptStage::PreLinkThinLTO => true,
_ => false,
} {
{
::core::panicking::panic_fmt(format_args!("the bitcode for LTO can only be obtained at the pre-link stage"));
}
};assert!(
614 matches!(
615 opt_stage,
616 llvm::OptStage::PreLinkNoLTO
617 | llvm::OptStage::PreLinkFatLTO
618 | llvm::OptStage::PreLinkThinLTO
619 ),
620 "the bitcode for LTO can only be obtained at the pre-link stage"
621 );
622 }
623 let pgo_gen_path = get_pgo_gen_path(config);
624 let pgo_use_path = get_pgo_use_path(config);
625 let pgo_sample_use_path = get_pgo_sample_use_path(config);
626 let is_lto = opt_stage == llvm::OptStage::ThinLTO || opt_stage == llvm::OptStage::FatLTO;
627 let instr_profile_output_path = get_instr_profile_output_path(config);
628 let sanitize_dataflow_abilist: Vec<_> = config
629 .sanitizer_dataflow_abilist
630 .iter()
631 .map(|file| CString::new(file.as_str()).unwrap())
632 .collect();
633 let sanitize_dataflow_abilist_ptrs: Vec<_> =
634 sanitize_dataflow_abilist.iter().map(|file| file.as_ptr()).collect();
635 let sanitizer_options = if !is_lto {
637 Some(llvm::SanitizerOptions {
638 sanitize_address: config.sanitizer.contains(SanitizerSet::ADDRESS),
639 sanitize_address_recover: config.sanitizer_recover.contains(SanitizerSet::ADDRESS),
640 sanitize_cfi: config.sanitizer.contains(SanitizerSet::CFI),
641 sanitize_dataflow: config.sanitizer.contains(SanitizerSet::DATAFLOW),
642 sanitize_dataflow_abilist: sanitize_dataflow_abilist_ptrs.as_ptr(),
643 sanitize_dataflow_abilist_len: sanitize_dataflow_abilist_ptrs.len(),
644 sanitize_kcfi: config.sanitizer.contains(SanitizerSet::KCFI),
645 sanitize_memory: config.sanitizer.contains(SanitizerSet::MEMORY),
646 sanitize_memory_recover: config.sanitizer_recover.contains(SanitizerSet::MEMORY),
647 sanitize_memory_track_origins: config.sanitizer_memory_track_origins as c_int,
648 sanitize_realtime: config.sanitizer.contains(SanitizerSet::REALTIME),
649 sanitize_thread: config.sanitizer.contains(SanitizerSet::THREAD),
650 sanitize_hwaddress: config.sanitizer.contains(SanitizerSet::HWADDRESS),
651 sanitize_hwaddress_recover: config.sanitizer_recover.contains(SanitizerSet::HWADDRESS),
652 sanitize_kernel_address: config.sanitizer.contains(SanitizerSet::KERNELADDRESS),
653 sanitize_kernel_address_recover: config
654 .sanitizer_recover
655 .contains(SanitizerSet::KERNELADDRESS),
656 })
657 } else {
658 None
659 };
660
661 fn handle_offload<'ll>(cx: &'ll SimpleCx<'_>, old_fn: &llvm::Value) {
662 let old_fn_ty = cx.get_type_of_global(old_fn);
663 let old_param_types = cx.func_params_types(old_fn_ty);
664 let old_param_count = old_param_types.len();
665 if old_param_count == 0 {
666 return;
667 }
668
669 let first_param = llvm::get_param(old_fn, 0);
670 let c_name = llvm::get_value_name(first_param);
671 let first_arg_name = str::from_utf8(&c_name).unwrap();
672 if first_arg_name == "dyn_ptr" {
676 return;
677 }
678
679 let mut new_param_types = Vec::with_capacity(old_param_count as usize + 1);
681 new_param_types.push(cx.type_ptr());
682
683 for &old_ty in &old_param_types {
685 let new_ty = match cx.type_kind(old_ty) {
686 TypeKind::Half | TypeKind::Float | TypeKind::Double | TypeKind::Integer => {
687 cx.type_i64()
688 }
689 _ => old_ty,
690 };
691 new_param_types.push(new_ty);
692 }
693
694 let ret_ty = unsafe { llvm::LLVMGetReturnType(old_fn_ty) };
696 let new_fn_ty = cx.type_func(&new_param_types, ret_ty);
697
698 let old_fn_name = String::from_utf8(llvm::get_value_name(old_fn)).unwrap();
700 let new_fn_name = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}.offload", &old_fn_name))
})format!("{}.offload", &old_fn_name);
701 let new_fn = cx.add_func(&new_fn_name, new_fn_ty);
702 let a0 = llvm::get_param(new_fn, 0);
703 llvm::set_value_name(a0, CString::new("dyn_ptr").unwrap().as_bytes());
704
705 let bb = SBuilder::append_block(cx, new_fn, "entry");
706 let mut builder = SBuilder::build(cx, bb);
707
708 let mut old_args_rebuilt = Vec::with_capacity(old_param_types.len());
709
710 for (i, &old_ty) in old_param_types.iter().enumerate() {
711 let new_arg = llvm::get_param(new_fn, (i + 1) as u32);
712
713 let rebuilt = match cx.type_kind(old_ty) {
714 TypeKind::Half | TypeKind::Float | TypeKind::Double | TypeKind::Integer => {
715 let num_bits = scalar_width(cx, old_ty);
716
717 let trunc = builder.trunc(new_arg, cx.type_ix(num_bits));
718 builder.bitcast(trunc, old_ty)
719 }
720 _ => new_arg,
721 };
722
723 old_args_rebuilt.push(rebuilt);
724 }
725
726 builder.ret_void();
727
728 unsafe {
731 llvm::LLVMRustOffloadMapper(old_fn, new_fn, old_args_rebuilt.as_ptr());
732 }
733
734 llvm::set_linkage(new_fn, llvm::get_linkage(old_fn));
735 llvm::set_visibility(new_fn, llvm::get_visibility(old_fn));
736
737 unsafe {
739 llvm::LLVMReplaceAllUsesWith(old_fn, new_fn);
740 }
741 let name = llvm::get_value_name(old_fn);
742 unsafe {
743 llvm::LLVMDeleteFunction(old_fn);
744 }
745 llvm::set_value_name(new_fn, &name);
747 }
748
749 if cgcx.target_is_like_gpu && config.offload.contains(&config::Offload::Device) {
750 let cx =
751 SimpleCx::new(module.module_llvm.llmod(), module.module_llvm.llcx, cgcx.pointer_size);
752 for func in cx.get_functions() {
753 let offload_kernel = "offload-kernel";
754 if attributes::has_string_attr(func, offload_kernel) {
755 handle_offload(&cx, func);
756 }
757 attributes::remove_string_attr_from_llfn(func, offload_kernel);
758 }
759 }
760
761 let mut llvm_profiler = cgcx
762 .prof
763 .llvm_recording_enabled()
764 .then(|| LlvmSelfProfiler::new(cgcx.prof.get_self_profiler().unwrap()));
765
766 let llvm_selfprofiler =
767 llvm_profiler.as_mut().map(|s| s as *mut _ as *mut c_void).unwrap_or(std::ptr::null_mut());
768
769 let extra_passes = if !is_lto { config.passes.join(",") } else { "".to_string() };
770
771 let llvm_plugins = config.llvm_plugins.join(",");
772
773 let enzyme_fn = if consider_ad {
774 let wrapper = llvm::EnzymeWrapper::get_instance();
775 wrapper.registerEnzymeAndPassPipeline
776 } else {
777 std::ptr::null()
778 };
779
780 let result = unsafe {
781 llvm::LLVMRustOptimize(
782 module.module_llvm.llmod(),
783 &*module.module_llvm.tm.raw(),
784 to_pass_builder_opt_level(opt_level),
785 opt_stage,
786 cgcx.use_linker_plugin_lto,
787 config.no_prepopulate_passes,
788 config.verify_llvm_ir,
789 config.lint_llvm_ir,
790 thin_lto_buffer,
791 config.emit_thin_lto,
792 config.emit_thin_lto_summary,
793 merge_functions,
794 unroll_loops,
795 vectorize_slp,
796 vectorize_loop,
797 config.no_builtins,
798 config.emit_lifetime_markers,
799 enzyme_fn,
800 print_before_enzyme,
801 print_after_enzyme,
802 print_passes,
803 sanitizer_options.as_ref(),
804 pgo_gen_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
805 pgo_use_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
806 config.instrument_coverage,
807 instr_profile_output_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
808 pgo_sample_use_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
809 config.debug_info_for_profiling,
810 llvm_selfprofiler,
811 selfprofile_before_pass_callback,
812 selfprofile_after_pass_callback,
813 extra_passes.as_c_char_ptr(),
814 extra_passes.len(),
815 llvm_plugins.as_c_char_ptr(),
816 llvm_plugins.len(),
817 )
818 };
819
820 if cgcx.target_is_like_gpu && config.offload.contains(&config::Offload::Device) {
821 let device_path = cgcx.output_filenames.path(OutputType::Object);
822 let device_dir = device_path.parent().unwrap();
823 let device_out = device_dir.join("host.out");
824 let device_out_c = path_to_c_string(device_out.as_path());
825 unsafe {
826 let ok = llvm::LLVMRustBundleImages(
828 module.module_llvm.llmod(),
829 module.module_llvm.tm.raw(),
830 device_out_c.as_ptr(),
831 );
832 if !ok || !device_out.exists() {
833 dcx.emit_err(crate::errors::OffloadBundleImagesFailed);
834 }
835 }
836 }
837
838 if !cgcx.target_is_like_gpu {
844 if let Some(device_path) = config
845 .offload
846 .iter()
847 .find_map(|o| if let config::Offload::Host(path) = o { Some(path) } else { None })
848 {
849 let device_pathbuf = PathBuf::from(device_path);
850 if device_pathbuf.is_relative() {
851 dcx.emit_err(crate::errors::OffloadWithoutAbsPath);
852 } else if device_pathbuf
853 .file_name()
854 .and_then(|n| n.to_str())
855 .is_some_and(|n| n != "host.out")
856 {
857 dcx.emit_err(crate::errors::OffloadWrongFileName);
858 } else if !device_pathbuf.exists() {
859 dcx.emit_err(crate::errors::OffloadNonexistingPath);
860 }
861 let host_path = cgcx.output_filenames.path(OutputType::Object);
862 let host_dir = host_path.parent().unwrap();
863 let out_obj = host_dir.join("host.o");
864 let host_out_c = path_to_c_string(device_pathbuf.as_path());
865
866 let llmod2 = llvm::LLVMCloneModule(module.module_llvm.llmod());
870 let ok =
871 unsafe { llvm::LLVMRustOffloadEmbedBufferInModule(llmod2, host_out_c.as_ptr()) };
872 if !ok {
873 dcx.emit_err(crate::errors::OffloadEmbedFailed);
874 }
875 write_output_file(
876 dcx,
877 module.module_llvm.tm.raw(),
878 config.no_builtins,
879 llmod2,
880 &out_obj,
881 None,
882 llvm::FileType::ObjectFile,
883 &cgcx.prof,
884 true,
885 );
886 }
890 }
891 result.into_result().unwrap_or_else(|()| llvm_err(dcx, LlvmError::RunLlvmPasses))
892}
893
894pub(crate) fn optimize(
896 cgcx: &CodegenContext<LlvmCodegenBackend>,
897 shared_emitter: &SharedEmitter,
898 module: &mut ModuleCodegen<ModuleLlvm>,
899 config: &ModuleConfig,
900) {
901 let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_optimize", &*module.name);
902
903 let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
904 let dcx = dcx.handle();
905
906 let llcx = &*module.module_llvm.llcx;
907 let _handlers =
908 DiagnosticHandlers::new(cgcx, shared_emitter, llcx, module, CodegenDiagnosticsStage::Opt);
909
910 if config.emit_no_opt_bc {
911 let out = cgcx.output_filenames.temp_path_ext_for_cgu(
912 "no-opt.bc",
913 &module.name,
914 cgcx.invocation_temp.as_deref(),
915 );
916 write_bitcode_to_file(module, &out)
917 }
918
919 if let Some(opt_level) = config.opt_level {
922 let opt_stage = match cgcx.lto {
923 Lto::Fat => llvm::OptStage::PreLinkFatLTO,
924 Lto::Thin | Lto::ThinLocal => llvm::OptStage::PreLinkThinLTO,
925 _ if cgcx.use_linker_plugin_lto => llvm::OptStage::PreLinkThinLTO,
926 _ => llvm::OptStage::PreLinkNoLTO,
927 };
928
929 let consider_ad = config.autodiff.contains(&config::AutoDiff::Enable);
932 let autodiff_stage = if consider_ad { AutodiffStage::PreAD } else { AutodiffStage::PostAD };
933 let mut thin_lto_buffer = if (module.kind == ModuleKind::Regular
938 && config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full))
939 || config.emit_thin_lto_summary
940 {
941 Some(null_mut())
942 } else {
943 None
944 };
945 unsafe {
946 llvm_optimize(
947 cgcx,
948 dcx,
949 module,
950 thin_lto_buffer.as_mut(),
951 config,
952 opt_level,
953 opt_stage,
954 autodiff_stage,
955 )
956 };
957 if let Some(thin_lto_buffer) = thin_lto_buffer {
958 let thin_lto_buffer = unsafe { ThinBuffer::from_raw_ptr(thin_lto_buffer) };
959 module.thin_lto_buffer = Some(thin_lto_buffer.data().to_vec());
960 let bc_summary_out = cgcx.output_filenames.temp_path_for_cgu(
961 OutputType::ThinLinkBitcode,
962 &module.name,
963 cgcx.invocation_temp.as_deref(),
964 );
965 if config.emit_thin_lto_summary
966 && let Some(thin_link_bitcode_filename) = bc_summary_out.file_name()
967 {
968 let summary_data = thin_lto_buffer.thin_link_data();
969 cgcx.prof.artifact_size(
970 "llvm_bitcode_summary",
971 thin_link_bitcode_filename.to_string_lossy(),
972 summary_data.len() as u64,
973 );
974 let _timer = cgcx.prof.generic_activity_with_arg(
975 "LLVM_module_codegen_emit_bitcode_summary",
976 &*module.name,
977 );
978 if let Err(err) = fs::write(&bc_summary_out, summary_data) {
979 dcx.emit_err(WriteBytecode { path: &bc_summary_out, err });
980 }
981 }
982 }
983 }
984}
985
986pub(crate) fn codegen(
987 cgcx: &CodegenContext<LlvmCodegenBackend>,
988 shared_emitter: &SharedEmitter,
989 module: ModuleCodegen<ModuleLlvm>,
990 config: &ModuleConfig,
991) -> CompiledModule {
992 let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_codegen", &*module.name);
993
994 let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
995 let dcx = dcx.handle();
996
997 {
998 let llmod = module.module_llvm.llmod();
999 let llcx = &*module.module_llvm.llcx;
1000 let tm = &*module.module_llvm.tm;
1001 let _handlers = DiagnosticHandlers::new(
1002 cgcx,
1003 shared_emitter,
1004 llcx,
1005 &module,
1006 CodegenDiagnosticsStage::Codegen,
1007 );
1008
1009 if cgcx.msvc_imps_needed {
1010 create_msvc_imps(cgcx, llcx, llmod);
1011 }
1012
1013 let bc_out = cgcx.output_filenames.temp_path_for_cgu(
1018 OutputType::Bitcode,
1019 &module.name,
1020 cgcx.invocation_temp.as_deref(),
1021 );
1022 let obj_out = cgcx.output_filenames.temp_path_for_cgu(
1023 OutputType::Object,
1024 &module.name,
1025 cgcx.invocation_temp.as_deref(),
1026 );
1027
1028 if config.bitcode_needed() {
1029 if config.emit_bc || config.emit_obj == EmitObj::Bitcode {
1030 let thin = {
1031 let _timer = cgcx.prof.generic_activity_with_arg(
1032 "LLVM_module_codegen_make_bitcode",
1033 &*module.name,
1034 );
1035 ThinBuffer::new(llmod, config.emit_thin_lto)
1036 };
1037 let data = thin.data();
1038 let _timer = cgcx
1039 .prof
1040 .generic_activity_with_arg("LLVM_module_codegen_emit_bitcode", &*module.name);
1041 if let Some(bitcode_filename) = bc_out.file_name() {
1042 cgcx.prof.artifact_size(
1043 "llvm_bitcode",
1044 bitcode_filename.to_string_lossy(),
1045 data.len() as u64,
1046 );
1047 }
1048 if let Err(err) = fs::write(&bc_out, data) {
1049 dcx.emit_err(WriteBytecode { path: &bc_out, err });
1050 }
1051 }
1052
1053 if config.embed_bitcode() && module.kind == ModuleKind::Regular {
1054 let _timer = cgcx
1055 .prof
1056 .generic_activity_with_arg("LLVM_module_codegen_embed_bitcode", &*module.name);
1057 let thin_bc =
1058 module.thin_lto_buffer.as_deref().expect("cannot find embedded bitcode");
1059 embed_bitcode(cgcx, llcx, llmod, &thin_bc);
1060 }
1061 }
1062
1063 if config.emit_ir {
1064 let _timer =
1065 cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_ir", &*module.name);
1066 let out = cgcx.output_filenames.temp_path_for_cgu(
1067 OutputType::LlvmAssembly,
1068 &module.name,
1069 cgcx.invocation_temp.as_deref(),
1070 );
1071 let out_c = path_to_c_string(&out);
1072
1073 extern "C" fn demangle_callback(
1074 input_ptr: *const c_char,
1075 input_len: size_t,
1076 output_ptr: *mut c_char,
1077 output_len: size_t,
1078 ) -> size_t {
1079 let input =
1080 unsafe { slice::from_raw_parts(input_ptr as *const u8, input_len as usize) };
1081
1082 let Ok(input) = str::from_utf8(input) else { return 0 };
1083
1084 let output = unsafe {
1085 slice::from_raw_parts_mut(output_ptr as *mut u8, output_len as usize)
1086 };
1087 let mut cursor = io::Cursor::new(output);
1088
1089 let Ok(demangled) = rustc_demangle::try_demangle(input) else { return 0 };
1090
1091 if cursor.write_fmt(format_args!("{0:#}", demangled))write!(cursor, "{demangled:#}").is_err() {
1092 return 0;
1094 }
1095
1096 cursor.position() as size_t
1097 }
1098
1099 let result =
1100 unsafe { llvm::LLVMRustPrintModule(llmod, out_c.as_ptr(), demangle_callback) };
1101
1102 if result == llvm::LLVMRustResult::Success {
1103 record_artifact_size(&cgcx.prof, "llvm_ir", &out);
1104 }
1105
1106 result
1107 .into_result()
1108 .unwrap_or_else(|()| llvm_err(dcx, LlvmError::WriteIr { path: &out }));
1109 }
1110
1111 if config.emit_asm {
1112 let _timer =
1113 cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_asm", &*module.name);
1114 let path = cgcx.output_filenames.temp_path_for_cgu(
1115 OutputType::Assembly,
1116 &module.name,
1117 cgcx.invocation_temp.as_deref(),
1118 );
1119
1120 let llmod = if let EmitObj::ObjectCode(_) = config.emit_obj {
1125 llvm::LLVMCloneModule(llmod)
1126 } else {
1127 llmod
1128 };
1129 write_output_file(
1130 dcx,
1131 tm.raw(),
1132 config.no_builtins,
1133 llmod,
1134 &path,
1135 None,
1136 llvm::FileType::AssemblyFile,
1137 &cgcx.prof,
1138 config.verify_llvm_ir,
1139 );
1140 }
1141
1142 match config.emit_obj {
1143 EmitObj::ObjectCode(_) => {
1144 let _timer = cgcx
1145 .prof
1146 .generic_activity_with_arg("LLVM_module_codegen_emit_obj", &*module.name);
1147
1148 let dwo_out = cgcx
1149 .output_filenames
1150 .temp_path_dwo_for_cgu(&module.name, cgcx.invocation_temp.as_deref());
1151 let dwo_out = match (cgcx.split_debuginfo, cgcx.split_dwarf_kind) {
1152 (SplitDebuginfo::Off, _) => None,
1154 _ if !cgcx.target_can_use_split_dwarf => None,
1157 (_, SplitDwarfKind::Single) => None,
1160 (_, SplitDwarfKind::Split) => Some(dwo_out.as_path()),
1163 };
1164
1165 write_output_file(
1166 dcx,
1167 tm.raw(),
1168 config.no_builtins,
1169 llmod,
1170 &obj_out,
1171 dwo_out,
1172 llvm::FileType::ObjectFile,
1173 &cgcx.prof,
1174 config.verify_llvm_ir,
1175 );
1176 }
1177
1178 EmitObj::Bitcode => {
1179 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/write.rs:1179",
"rustc_codegen_llvm::back::write", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/write.rs"),
::tracing_core::__macro_support::Option::Some(1179u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::write"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("copying bitcode {0:?} to obj {1:?}",
bc_out, obj_out) as &dyn Value))])
});
} else { ; }
};debug!("copying bitcode {:?} to obj {:?}", bc_out, obj_out);
1180 if let Err(err) = link_or_copy(&bc_out, &obj_out) {
1181 dcx.emit_err(CopyBitcode { err });
1182 }
1183
1184 if !config.emit_bc {
1185 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/write.rs:1185",
"rustc_codegen_llvm::back::write", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/write.rs"),
::tracing_core::__macro_support::Option::Some(1185u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::write"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("removing_bitcode {0:?}",
bc_out) as &dyn Value))])
});
} else { ; }
};debug!("removing_bitcode {:?}", bc_out);
1186 ensure_removed(dcx, &bc_out);
1187 }
1188 }
1189
1190 EmitObj::None => {}
1191 }
1192
1193 record_llvm_cgu_instructions_stats(&cgcx.prof, &module.name, llmod);
1194 }
1195
1196 let dwarf_object_emitted = #[allow(non_exhaustive_omitted_patterns)] match config.emit_obj {
EmitObj::ObjectCode(_) => true,
_ => false,
}matches!(config.emit_obj, EmitObj::ObjectCode(_))
1205 && cgcx.target_can_use_split_dwarf
1206 && cgcx.split_debuginfo != SplitDebuginfo::Off
1207 && cgcx.split_dwarf_kind == SplitDwarfKind::Split;
1208 module.into_compiled_module(
1209 config.emit_obj != EmitObj::None,
1210 dwarf_object_emitted,
1211 config.emit_bc,
1212 config.emit_asm,
1213 config.emit_ir,
1214 &cgcx.output_filenames,
1215 cgcx.invocation_temp.as_deref(),
1216 )
1217}
1218
1219fn create_section_with_flags_asm(section_name: &str, section_flags: &str, data: &[u8]) -> Vec<u8> {
1220 let mut asm = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!(".section {0},\"{1}\"\n",
section_name, section_flags))
})format!(".section {section_name},\"{section_flags}\"\n").into_bytes();
1221 asm.extend_from_slice(b".ascii \"");
1222 asm.reserve(data.len());
1223 for &byte in data {
1224 if byte == b'\\' || byte == b'"' {
1225 asm.push(b'\\');
1226 asm.push(byte);
1227 } else if byte < 0x20 || byte >= 0x80 {
1228 asm.push(b'\\');
1231 asm.push(b'0' + ((byte >> 6) & 0x7));
1232 asm.push(b'0' + ((byte >> 3) & 0x7));
1233 asm.push(b'0' + ((byte >> 0) & 0x7));
1234 } else {
1235 asm.push(byte);
1236 }
1237 }
1238 asm.extend_from_slice(b"\"\n");
1239 asm
1240}
1241
1242pub(crate) fn bitcode_section_name(cgcx: &CodegenContext<LlvmCodegenBackend>) -> &'static CStr {
1243 if cgcx.target_is_like_darwin {
1244 c"__LLVM,__bitcode"
1245 } else if cgcx.target_is_like_aix {
1246 c".ipa"
1247 } else {
1248 c".llvmbc"
1249 }
1250}
1251
1252fn embed_bitcode(
1254 cgcx: &CodegenContext<LlvmCodegenBackend>,
1255 llcx: &llvm::Context,
1256 llmod: &llvm::Module,
1257 bitcode: &[u8],
1258) {
1259 if cgcx.target_is_like_darwin
1298 || cgcx.target_is_like_aix
1299 || cgcx.target_arch == "wasm32"
1300 || cgcx.target_arch == "wasm64"
1301 {
1302 let llconst = common::bytes_in_context(llcx, bitcode);
1304 let llglobal = llvm::add_global(llmod, common::val_ty(llconst), c"rustc.embedded.module");
1305 llvm::set_initializer(llglobal, llconst);
1306
1307 llvm::set_section(llglobal, bitcode_section_name(cgcx));
1308 llvm::set_linkage(llglobal, llvm::Linkage::PrivateLinkage);
1309 llvm::LLVMSetGlobalConstant(llglobal, llvm::TRUE);
1310
1311 let llconst = common::bytes_in_context(llcx, &[]);
1312 let llglobal = llvm::add_global(llmod, common::val_ty(llconst), c"rustc.embedded.cmdline");
1313 llvm::set_initializer(llglobal, llconst);
1314 let section = if cgcx.target_is_like_darwin {
1315 c"__LLVM,__cmdline"
1316 } else if cgcx.target_is_like_aix {
1317 c".info"
1318 } else {
1319 c".llvmcmd"
1320 };
1321 llvm::set_section(llglobal, section);
1322 llvm::set_linkage(llglobal, llvm::Linkage::PrivateLinkage);
1323 } else {
1324 let section_flags = if cgcx.is_pe_coff { "n" } else { "e" };
1326 let asm = create_section_with_flags_asm(".llvmbc", section_flags, bitcode);
1327 llvm::append_module_inline_asm(llmod, &asm);
1328 let asm = create_section_with_flags_asm(".llvmcmd", section_flags, &[]);
1329 llvm::append_module_inline_asm(llmod, &asm);
1330 }
1331}
1332
1333fn create_msvc_imps(
1339 cgcx: &CodegenContext<LlvmCodegenBackend>,
1340 llcx: &llvm::Context,
1341 llmod: &llvm::Module,
1342) {
1343 if !cgcx.msvc_imps_needed {
1344 return;
1345 }
1346 let prefix = if cgcx.target_arch == "x86" { "\x01__imp__" } else { "\x01__imp_" };
1351
1352 let ptr_ty = llvm_type_ptr(llcx);
1353 let globals = base::iter_globals(llmod)
1354 .filter(|&val| {
1355 llvm::get_linkage(val) == llvm::Linkage::ExternalLinkage && !llvm::is_declaration(val)
1356 })
1357 .filter_map(|val| {
1358 let name = llvm::get_value_name(val);
1360 if ignored(&name) { None } else { Some((val, name)) }
1361 })
1362 .map(move |(val, name)| {
1363 let mut imp_name = prefix.as_bytes().to_vec();
1364 imp_name.extend(name);
1365 let imp_name = CString::new(imp_name).unwrap();
1366 (imp_name, val)
1367 })
1368 .collect::<Vec<_>>();
1369
1370 for (imp_name, val) in globals {
1371 let imp = llvm::add_global(llmod, ptr_ty, &imp_name);
1372
1373 llvm::set_initializer(imp, val);
1374 llvm::set_linkage(imp, llvm::Linkage::ExternalLinkage);
1375 }
1376
1377 fn ignored(symbol_name: &[u8]) -> bool {
1379 symbol_name.starts_with(b"__llvm_profile_")
1381 }
1382}
1383
1384fn record_artifact_size(
1385 self_profiler_ref: &SelfProfilerRef,
1386 artifact_kind: &'static str,
1387 path: &Path,
1388) {
1389 if !self_profiler_ref.enabled() {
1391 return;
1392 }
1393
1394 if let Some(artifact_name) = path.file_name() {
1395 let file_size = std::fs::metadata(path).map(|m| m.len()).unwrap_or(0);
1396 self_profiler_ref.artifact_size(artifact_kind, artifact_name.to_string_lossy(), file_size);
1397 }
1398}
1399
1400fn record_llvm_cgu_instructions_stats(prof: &SelfProfilerRef, name: &str, llmod: &llvm::Module) {
1401 if !prof.enabled() {
1402 return;
1403 }
1404
1405 let total = unsafe { llvm::LLVMRustModuleInstructionStats(llmod) };
1406 prof.artifact_size("cgu_instructions", name, total);
1407}