1use std::ffi::{CStr, CString};
2use std::io::{self, Write};
3use std::path::{Path, PathBuf};
4use std::sync::Arc;
5use std::{fs, slice, str};
6
7use libc::{c_char, c_int, c_void, size_t};
8use rustc_codegen_ssa::back::link::ensure_removed;
9use rustc_codegen_ssa::back::versioned_llvm_target;
10use rustc_codegen_ssa::back::write::{
11 BitcodeSection, CodegenContext, EmitObj, InlineAsmError, ModuleConfig, SharedEmitter,
12 TargetMachineFactoryConfig, TargetMachineFactoryFn,
13};
14use rustc_codegen_ssa::base::wants_wasm_eh;
15use rustc_codegen_ssa::common::TypeKind;
16use rustc_codegen_ssa::traits::*;
17use rustc_codegen_ssa::{CompiledModule, ModuleCodegen, ModuleKind};
18use rustc_data_structures::profiling::SelfProfilerRef;
19use rustc_data_structures::small_c_str::SmallCStr;
20use rustc_errors::{DiagCtxt, DiagCtxtHandle, Level};
21use rustc_fs_util::{link_or_copy, path_to_c_string};
22use rustc_middle::ty::TyCtxt;
23use rustc_session::Session;
24use rustc_session::config::{self, Lto, OutputType, Passes, SplitDwarfKind, SwitchWithOptPath};
25use rustc_span::{BytePos, InnerSpan, Pos, RemapPathScopeComponents, SpanData, SyntaxContext, sym};
26use rustc_target::spec::{CodeModel, FloatAbi, RelocModel, SanitizerSet, SplitDebuginfo, TlsModel};
27use tracing::{debug, trace};
28
29use crate::back::lto::{Buffer, ModuleBuffer};
30use crate::back::owned_target_machine::OwnedTargetMachine;
31use crate::back::profiling::{
32 LlvmSelfProfiler, selfprofile_after_pass_callback, selfprofile_before_pass_callback,
33};
34use crate::builder::SBuilder;
35use crate::builder::gpu_offload::scalar_width;
36use crate::common::AsCCharPtr;
37use crate::errors::{
38 CopyBitcode, FromLlvmDiag, FromLlvmOptimizationDiag, LlvmError, ParseTargetMachineConfig,
39 UnsupportedCompression, WithLlvmError, WriteBytecode,
40};
41use crate::llvm::diagnostic::OptimizationDiagnosticKind::*;
42use crate::llvm::{self, DiagnosticInfo};
43use crate::type_::llvm_type_ptr;
44use crate::{LlvmCodegenBackend, ModuleLlvm, SimpleCx, attributes, base, common, llvm_util};
45
46pub(crate) fn llvm_err<'a>(dcx: DiagCtxtHandle<'_>, err: LlvmError<'a>) -> ! {
47 match llvm::last_error() {
48 Some(llvm_err) => dcx.emit_fatal(WithLlvmError(err, llvm_err)),
49 None => dcx.emit_fatal(err),
50 }
51}
52
53fn write_output_file<'ll>(
54 dcx: DiagCtxtHandle<'_>,
55 target: &'ll llvm::TargetMachine,
56 no_builtins: bool,
57 m: &'ll llvm::Module,
58 output: &Path,
59 dwo_output: Option<&Path>,
60 file_type: llvm::FileType,
61 self_profiler_ref: &SelfProfilerRef,
62 verify_llvm_ir: bool,
63) {
64 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/write.rs:64",
"rustc_codegen_llvm::back::write", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/write.rs"),
::tracing_core::__macro_support::Option::Some(64u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::write"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("write_output_file output={0:?} dwo_output={1:?}",
output, dwo_output) as &dyn Value))])
});
} else { ; }
};debug!("write_output_file output={:?} dwo_output={:?}", output, dwo_output);
65 let output_c = path_to_c_string(output);
66 let dwo_output_c;
67 let dwo_output_ptr = if let Some(dwo_output) = dwo_output {
68 dwo_output_c = path_to_c_string(dwo_output);
69 dwo_output_c.as_ptr()
70 } else {
71 std::ptr::null()
72 };
73 let result = unsafe {
74 let pm = llvm::LLVMCreatePassManager();
75 llvm::LLVMAddAnalysisPasses(target, pm);
76 llvm::LLVMRustAddLibraryInfo(target, pm, m, no_builtins);
77 llvm::LLVMRustWriteOutputFile(
78 target,
79 pm,
80 m,
81 output_c.as_ptr(),
82 dwo_output_ptr,
83 file_type,
84 verify_llvm_ir,
85 )
86 };
87
88 if result == llvm::LLVMRustResult::Success {
90 let artifact_kind = match file_type {
91 llvm::FileType::ObjectFile => "object_file",
92 llvm::FileType::AssemblyFile => "assembly_file",
93 };
94 record_artifact_size(self_profiler_ref, artifact_kind, output);
95 if let Some(dwo_file) = dwo_output {
96 record_artifact_size(self_profiler_ref, "dwo_file", dwo_file);
97 }
98 }
99
100 result.into_result().unwrap_or_else(|()| llvm_err(dcx, LlvmError::WriteOutput { path: output }))
101}
102
103pub(crate) fn create_informational_target_machine(
104 sess: &Session,
105 only_base_features: bool,
106) -> OwnedTargetMachine {
107 let config = TargetMachineFactoryConfig { split_dwarf_file: None, output_obj_file: None };
108 let features = llvm_util::global_llvm_features(sess, only_base_features);
111 target_machine_factory(sess, config::OptLevel::No, &features)(sess.dcx(), config)
112}
113
114pub(crate) fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> OwnedTargetMachine {
115 let split_dwarf_file = if tcx.sess.target_can_use_split_dwarf() {
116 tcx.output_filenames(()).split_dwarf_path(
117 tcx.sess.split_debuginfo(),
118 tcx.sess.opts.unstable_opts.split_dwarf_kind,
119 mod_name,
120 tcx.sess.invocation_temp.as_deref(),
121 )
122 } else {
123 None
124 };
125
126 let output_obj_file = Some(tcx.output_filenames(()).temp_path_for_cgu(
127 OutputType::Object,
128 mod_name,
129 tcx.sess.invocation_temp.as_deref(),
130 ));
131 let config = TargetMachineFactoryConfig { split_dwarf_file, output_obj_file };
132
133 target_machine_factory(
134 tcx.sess,
135 tcx.backend_optimization_level(()),
136 tcx.global_backend_features(()),
137 )(tcx.dcx(), config)
138}
139
140fn to_llvm_opt_settings(cfg: config::OptLevel) -> (llvm::CodeGenOptLevel, llvm::CodeGenOptSize) {
141 use self::config::OptLevel::*;
142 match cfg {
143 No => (llvm::CodeGenOptLevel::None, llvm::CodeGenOptSizeNone),
144 Less => (llvm::CodeGenOptLevel::Less, llvm::CodeGenOptSizeNone),
145 More => (llvm::CodeGenOptLevel::Default, llvm::CodeGenOptSizeNone),
146 Aggressive => (llvm::CodeGenOptLevel::Aggressive, llvm::CodeGenOptSizeNone),
147 Size => (llvm::CodeGenOptLevel::Default, llvm::CodeGenOptSizeDefault),
148 SizeMin => (llvm::CodeGenOptLevel::Default, llvm::CodeGenOptSizeAggressive),
149 }
150}
151
152fn to_pass_builder_opt_level(cfg: config::OptLevel) -> llvm::PassBuilderOptLevel {
153 use config::OptLevel::*;
154 match cfg {
155 No => llvm::PassBuilderOptLevel::O0,
156 Less => llvm::PassBuilderOptLevel::O1,
157 More => llvm::PassBuilderOptLevel::O2,
158 Aggressive => llvm::PassBuilderOptLevel::O3,
159 Size => llvm::PassBuilderOptLevel::Os,
160 SizeMin => llvm::PassBuilderOptLevel::Oz,
161 }
162}
163
164fn to_llvm_relocation_model(relocation_model: RelocModel) -> llvm::RelocModel {
165 match relocation_model {
166 RelocModel::Static => llvm::RelocModel::Static,
167 RelocModel::Pic | RelocModel::Pie => llvm::RelocModel::PIC,
170 RelocModel::DynamicNoPic => llvm::RelocModel::DynamicNoPic,
171 RelocModel::Ropi => llvm::RelocModel::ROPI,
172 RelocModel::Rwpi => llvm::RelocModel::RWPI,
173 RelocModel::RopiRwpi => llvm::RelocModel::ROPI_RWPI,
174 }
175}
176
177pub(crate) fn to_llvm_code_model(code_model: Option<CodeModel>) -> llvm::CodeModel {
178 match code_model {
179 Some(CodeModel::Tiny) => llvm::CodeModel::Tiny,
180 Some(CodeModel::Small) => llvm::CodeModel::Small,
181 Some(CodeModel::Kernel) => llvm::CodeModel::Kernel,
182 Some(CodeModel::Medium) => llvm::CodeModel::Medium,
183 Some(CodeModel::Large) => llvm::CodeModel::Large,
184 None => llvm::CodeModel::None,
185 }
186}
187
188fn to_llvm_float_abi(float_abi: Option<FloatAbi>) -> llvm::FloatAbi {
189 match float_abi {
190 None => llvm::FloatAbi::Default,
191 Some(FloatAbi::Soft) => llvm::FloatAbi::Soft,
192 Some(FloatAbi::Hard) => llvm::FloatAbi::Hard,
193 }
194}
195
196pub(crate) fn target_machine_factory(
197 sess: &Session,
198 optlvl: config::OptLevel,
199 target_features: &[String],
200) -> TargetMachineFactoryFn<LlvmCodegenBackend> {
201 let _prof_timer = sess.prof.generic_activity("target_machine_factory");
203
204 let reloc_model = to_llvm_relocation_model(sess.relocation_model());
205
206 let (opt_level, _) = to_llvm_opt_settings(optlvl);
207 let float_abi = to_llvm_float_abi(sess.target.llvm_floatabi);
208
209 let ffunction_sections =
210 sess.opts.unstable_opts.function_sections.unwrap_or(sess.target.function_sections);
211 let fdata_sections = ffunction_sections;
212 let funique_section_names = !sess.opts.unstable_opts.no_unique_section_names;
213
214 let code_model = to_llvm_code_model(sess.code_model());
215
216 let mut singlethread = sess.target.singlethread;
217
218 if singlethread && sess.target.is_like_wasm && sess.target_features.contains(&sym::atomics) {
222 singlethread = false;
223 }
224
225 let triple = SmallCStr::new(&versioned_llvm_target(sess));
226 let cpu = SmallCStr::new(llvm_util::target_cpu(sess));
227 let features = CString::new(target_features.join(",")).unwrap();
228 let abi = SmallCStr::new(sess.target.llvm_abiname.desc());
229 let trap_unreachable =
230 sess.opts.unstable_opts.trap_unreachable.unwrap_or(sess.target.trap_unreachable);
231 let emit_stack_size_section = sess.opts.unstable_opts.emit_stack_sizes;
232
233 let verbose_asm = sess.opts.unstable_opts.verbose_asm;
234 let relax_elf_relocations =
235 sess.opts.unstable_opts.relax_elf_relocations.unwrap_or(sess.target.relax_elf_relocations);
236
237 let use_init_array =
238 !sess.opts.unstable_opts.use_ctors_section.unwrap_or(sess.target.use_ctors_section);
239
240 let path_mapping = sess.source_map().path_mapping().clone();
241 let working_dir = sess.source_map().working_dir().clone();
242
243 let use_emulated_tls = #[allow(non_exhaustive_omitted_patterns)] match sess.tls_model() {
TlsModel::Emulated => true,
_ => false,
}matches!(sess.tls_model(), TlsModel::Emulated);
244
245 let debuginfo_compression = match sess.opts.unstable_opts.debuginfo_compression {
246 config::DebugInfoCompression::None => llvm::CompressionKind::None,
247 config::DebugInfoCompression::Zlib => {
248 if llvm::LLVMRustLLVMHasZlibCompression() {
249 llvm::CompressionKind::Zlib
250 } else {
251 sess.dcx().emit_warn(UnsupportedCompression { algorithm: "zlib" });
252 llvm::CompressionKind::None
253 }
254 }
255 config::DebugInfoCompression::Zstd => {
256 if llvm::LLVMRustLLVMHasZstdCompression() {
257 llvm::CompressionKind::Zstd
258 } else {
259 sess.dcx().emit_warn(UnsupportedCompression { algorithm: "zstd" });
260 llvm::CompressionKind::None
261 }
262 }
263 };
264
265 let use_wasm_eh = wants_wasm_eh(sess);
266
267 let large_data_threshold = sess.opts.unstable_opts.large_data_threshold.unwrap_or(0);
268
269 let prof = SelfProfilerRef::clone(&sess.prof);
270 Arc::new(move |dcx: DiagCtxtHandle<'_>, config: TargetMachineFactoryConfig| {
271 let _prof_timer = prof.generic_activity("target_machine_factory_inner");
273
274 let path_to_cstring_helper = |path: Option<PathBuf>| -> CString {
275 let path = path.unwrap_or_default();
276 let path = path_mapping
277 .to_real_filename(&working_dir, path)
278 .path(RemapPathScopeComponents::DEBUGINFO)
279 .to_string_lossy()
280 .into_owned();
281 CString::new(path).unwrap()
282 };
283
284 let split_dwarf_file = path_to_cstring_helper(config.split_dwarf_file);
285 let output_obj_file = path_to_cstring_helper(config.output_obj_file);
286
287 OwnedTargetMachine::new(
288 &triple,
289 &cpu,
290 &features,
291 &abi,
292 code_model,
293 reloc_model,
294 opt_level,
295 float_abi,
296 ffunction_sections,
297 fdata_sections,
298 funique_section_names,
299 trap_unreachable,
300 singlethread,
301 verbose_asm,
302 emit_stack_size_section,
303 relax_elf_relocations,
304 use_init_array,
305 &split_dwarf_file,
306 &output_obj_file,
307 debuginfo_compression,
308 use_emulated_tls,
309 use_wasm_eh,
310 large_data_threshold,
311 )
312 .unwrap_or_else(|err| dcx.emit_fatal(ParseTargetMachineConfig(err)))
313 })
314}
315
316pub(crate) fn save_temp_bitcode(
317 cgcx: &CodegenContext,
318 module: &ModuleCodegen<ModuleLlvm>,
319 name: &str,
320) {
321 if !cgcx.save_temps {
322 return;
323 }
324 let ext = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}.bc", name))
})format!("{name}.bc");
325 let path = cgcx.output_filenames.temp_path_ext_for_cgu(
326 &ext,
327 &module.name,
328 cgcx.invocation_temp.as_deref(),
329 );
330 write_bitcode_to_file(&module.module_llvm, &path)
331}
332
333fn write_bitcode_to_file(module: &ModuleLlvm, path: &Path) {
334 unsafe {
335 let path = path_to_c_string(&path);
336 let llmod = module.llmod();
337 llvm::LLVMWriteBitcodeToFile(llmod, path.as_ptr());
338 }
339}
340
341pub(crate) enum CodegenDiagnosticsStage {
343 Opt,
345 LTO,
347 Codegen,
349}
350
351pub(crate) struct DiagnosticHandlers<'a> {
352 data: *mut (&'a CodegenContext, &'a SharedEmitter),
353 llcx: &'a llvm::Context,
354 old_handler: Option<&'a llvm::DiagnosticHandler>,
355}
356
357impl<'a> DiagnosticHandlers<'a> {
358 pub(crate) fn new(
359 cgcx: &'a CodegenContext,
360 shared_emitter: &'a SharedEmitter,
361 llcx: &'a llvm::Context,
362 module: &ModuleCodegen<ModuleLlvm>,
363 stage: CodegenDiagnosticsStage,
364 ) -> Self {
365 let remark_passes_all: bool;
366 let remark_passes: Vec<CString>;
367 match &cgcx.remark {
368 Passes::All => {
369 remark_passes_all = true;
370 remark_passes = Vec::new();
371 }
372 Passes::Some(passes) => {
373 remark_passes_all = false;
374 remark_passes =
375 passes.iter().map(|name| CString::new(name.as_str()).unwrap()).collect();
376 }
377 };
378 let remark_passes: Vec<*const c_char> =
379 remark_passes.iter().map(|name: &CString| name.as_ptr()).collect();
380 let remark_file = cgcx
381 .remark_dir
382 .as_ref()
383 .map(|dir| {
385 let stage_suffix = match stage {
386 CodegenDiagnosticsStage::Codegen => "codegen",
387 CodegenDiagnosticsStage::Opt => "opt",
388 CodegenDiagnosticsStage::LTO => "lto",
389 };
390 dir.join(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}.{1}.opt.yaml", module.name,
stage_suffix))
})format!("{}.{stage_suffix}.opt.yaml", module.name))
391 })
392 .and_then(|dir| dir.to_str().and_then(|p| CString::new(p).ok()));
393
394 let pgo_available = cgcx.module_config.pgo_use.is_some();
395 let data = Box::into_raw(Box::new((cgcx, shared_emitter)));
396 unsafe {
397 let old_handler = llvm::LLVMRustContextGetDiagnosticHandler(llcx);
398 llvm::LLVMRustContextConfigureDiagnosticHandler(
399 llcx,
400 diagnostic_handler,
401 data.cast(),
402 remark_passes_all,
403 remark_passes.as_ptr(),
404 remark_passes.len(),
405 remark_file.as_ref().map(|dir| dir.as_ptr()).unwrap_or(std::ptr::null()),
408 pgo_available,
409 );
410 DiagnosticHandlers { data, llcx, old_handler }
411 }
412 }
413}
414
415impl<'a> Drop for DiagnosticHandlers<'a> {
416 fn drop(&mut self) {
417 unsafe {
418 llvm::LLVMRustContextSetDiagnosticHandler(self.llcx, self.old_handler);
419 drop(Box::from_raw(self.data));
420 }
421 }
422}
423
424fn report_inline_asm(
425 cgcx: &CodegenContext,
426 msg: String,
427 level: llvm::DiagnosticLevel,
428 cookie: u64,
429 source: Option<(String, Vec<InnerSpan>)>,
430) -> InlineAsmError {
431 let span = if cookie == 0 || #[allow(non_exhaustive_omitted_patterns)] match cgcx.lto {
Lto::Fat | Lto::Thin => true,
_ => false,
}matches!(cgcx.lto, Lto::Fat | Lto::Thin) {
435 SpanData::default()
436 } else {
437 SpanData {
438 lo: BytePos::from_u32(cookie as u32),
439 hi: BytePos::from_u32((cookie >> 32) as u32),
440 ctxt: SyntaxContext::root(),
441 parent: None,
442 }
443 };
444 let level = match level {
445 llvm::DiagnosticLevel::Error => Level::Error,
446 llvm::DiagnosticLevel::Warning => Level::Warning,
447 llvm::DiagnosticLevel::Note | llvm::DiagnosticLevel::Remark => Level::Note,
448 };
449 let msg = msg.trim_prefix("error: ").to_string();
450 InlineAsmError { span, msg, level, source }
451}
452
453unsafe extern "C" fn diagnostic_handler(info: &DiagnosticInfo, user: *mut c_void) {
454 if user.is_null() {
455 return;
456 }
457 let (cgcx, shared_emitter) = unsafe { *(user as *const (&CodegenContext, &SharedEmitter)) };
458
459 let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
460 let dcx = dcx.handle();
461
462 match unsafe { llvm::diagnostic::Diagnostic::unpack(info) } {
463 llvm::diagnostic::InlineAsm(inline) => {
464 shared_emitter.inline_asm_error(report_inline_asm(
466 cgcx,
467 inline.message,
468 inline.level,
469 inline.cookie,
470 inline.source,
471 ));
472 }
473
474 llvm::diagnostic::Optimization(opt) => {
475 dcx.emit_note(FromLlvmOptimizationDiag {
476 filename: &opt.filename,
477 line: opt.line,
478 column: opt.column,
479 pass_name: &opt.pass_name,
480 kind: match opt.kind {
481 OptimizationRemark => "success",
482 OptimizationMissed | OptimizationFailure => "missed",
483 OptimizationAnalysis
484 | OptimizationAnalysisFPCommute
485 | OptimizationAnalysisAliasing => "analysis",
486 OptimizationRemarkOther => "other",
487 },
488 message: &opt.message,
489 });
490 }
491 llvm::diagnostic::PGO(diagnostic_ref) | llvm::diagnostic::Linker(diagnostic_ref) => {
492 let message = llvm::build_string(|s| unsafe {
493 llvm::LLVMRustWriteDiagnosticInfoToString(diagnostic_ref, s)
494 })
495 .expect("non-UTF8 diagnostic");
496 dcx.emit_warn(FromLlvmDiag { message });
497 }
498 llvm::diagnostic::Unsupported(diagnostic_ref) => {
499 let message = llvm::build_string(|s| unsafe {
500 llvm::LLVMRustWriteDiagnosticInfoToString(diagnostic_ref, s)
501 })
502 .expect("non-UTF8 diagnostic");
503 dcx.emit_err(FromLlvmDiag { message });
504 }
505 llvm::diagnostic::UnknownDiagnostic(..) => {}
506 }
507}
508
509fn get_pgo_gen_path(config: &ModuleConfig) -> Option<CString> {
510 match config.pgo_gen {
511 SwitchWithOptPath::Enabled(ref opt_dir_path) => {
512 let path = if let Some(dir_path) = opt_dir_path {
513 dir_path.join("default_%m.profraw")
514 } else {
515 PathBuf::from("default_%m.profraw")
516 };
517
518 Some(CString::new(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}", path.display()))
})format!("{}", path.display())).unwrap())
519 }
520 SwitchWithOptPath::Disabled => None,
521 }
522}
523
524fn get_pgo_use_path(config: &ModuleConfig) -> Option<CString> {
525 config
526 .pgo_use
527 .as_ref()
528 .map(|path_buf| CString::new(path_buf.to_string_lossy().as_bytes()).unwrap())
529}
530
531fn get_pgo_sample_use_path(config: &ModuleConfig) -> Option<CString> {
532 config
533 .pgo_sample_use
534 .as_ref()
535 .map(|path_buf| CString::new(path_buf.to_string_lossy().as_bytes()).unwrap())
536}
537
538fn get_instr_profile_output_path(config: &ModuleConfig) -> Option<CString> {
539 config.instrument_coverage.then(|| c"default_%m_%p.profraw".to_owned())
540}
541
542#[derive(#[automatically_derived]
impl ::core::fmt::Debug for AutodiffStage {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
AutodiffStage::PreAD => "PreAD",
AutodiffStage::DuringAD => "DuringAD",
AutodiffStage::PostAD => "PostAD",
})
}
}Debug, #[automatically_derived]
impl ::core::cmp::Eq for AutodiffStage {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for AutodiffStage {
#[inline]
fn eq(&self, other: &AutodiffStage) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq)]
546pub(crate) enum AutodiffStage {
547 PreAD,
548 DuringAD,
549 PostAD,
550}
551
552pub(crate) unsafe fn llvm_optimize(
553 cgcx: &CodegenContext,
554 prof: &SelfProfilerRef,
555 dcx: DiagCtxtHandle<'_>,
556 module: &ModuleCodegen<ModuleLlvm>,
557 thin_lto_buffer: Option<&mut Option<Buffer>>,
558 thin_lto_summary_buffer: Option<&mut Option<Buffer>>,
559 config: &ModuleConfig,
560 opt_level: config::OptLevel,
561 opt_stage: llvm::OptStage,
562 autodiff_stage: AutodiffStage,
563) {
564 let consider_ad = config.autodiff.contains(&config::AutoDiff::Enable);
573 let run_enzyme = autodiff_stage == AutodiffStage::DuringAD;
574 let print_before_enzyme = config.autodiff.contains(&config::AutoDiff::PrintModBefore);
575 let print_after_enzyme = config.autodiff.contains(&config::AutoDiff::PrintModAfter);
576 let print_passes = config.autodiff.contains(&config::AutoDiff::PrintPasses);
577 let merge_functions;
578 let unroll_loops;
579 let vectorize_slp;
580 let vectorize_loop;
581
582 if consider_ad && autodiff_stage != AutodiffStage::PostAD {
591 merge_functions = false;
592 unroll_loops = false;
593 vectorize_slp = false;
594 vectorize_loop = false;
595 } else {
596 unroll_loops =
597 opt_level != config::OptLevel::Size && opt_level != config::OptLevel::SizeMin;
598 merge_functions = config.merge_functions;
599 vectorize_slp = config.vectorize_slp;
600 vectorize_loop = config.vectorize_loop;
601 }
602 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/write.rs:602",
"rustc_codegen_llvm::back::write", ::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/write.rs"),
::tracing_core::__macro_support::Option::Some(602u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::write"),
::tracing_core::field::FieldSet::new(&["unroll_loops",
"vectorize_slp", "vectorize_loop", "run_enzyme"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&unroll_loops)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&vectorize_slp)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&vectorize_loop)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&run_enzyme)
as &dyn Value))])
});
} else { ; }
};trace!(?unroll_loops, ?vectorize_slp, ?vectorize_loop, ?run_enzyme);
603 if thin_lto_buffer.is_some() {
604 if !#[allow(non_exhaustive_omitted_patterns)] match opt_stage {
llvm::OptStage::PreLinkNoLTO | llvm::OptStage::PreLinkFatLTO |
llvm::OptStage::PreLinkThinLTO => true,
_ => false,
} {
{
::core::panicking::panic_fmt(format_args!("the bitcode for LTO can only be obtained at the pre-link stage"));
}
};assert!(
605 matches!(
606 opt_stage,
607 llvm::OptStage::PreLinkNoLTO
608 | llvm::OptStage::PreLinkFatLTO
609 | llvm::OptStage::PreLinkThinLTO
610 ),
611 "the bitcode for LTO can only be obtained at the pre-link stage"
612 );
613 }
614 let pgo_gen_path = get_pgo_gen_path(config);
615 let pgo_use_path = get_pgo_use_path(config);
616 let pgo_sample_use_path = get_pgo_sample_use_path(config);
617 let is_lto = opt_stage == llvm::OptStage::ThinLTO || opt_stage == llvm::OptStage::FatLTO;
618 let instr_profile_output_path = get_instr_profile_output_path(config);
619 let sanitize_dataflow_abilist: Vec<_> = config
620 .sanitizer_dataflow_abilist
621 .iter()
622 .map(|file| CString::new(file.as_str()).unwrap())
623 .collect();
624 let sanitize_dataflow_abilist_ptrs: Vec<_> =
625 sanitize_dataflow_abilist.iter().map(|file| file.as_ptr()).collect();
626 let sanitizer_options = if !is_lto {
628 Some(llvm::SanitizerOptions {
629 sanitize_address: config.sanitizer.contains(SanitizerSet::ADDRESS),
630 sanitize_address_recover: config.sanitizer_recover.contains(SanitizerSet::ADDRESS),
631 sanitize_cfi: config.sanitizer.contains(SanitizerSet::CFI),
632 sanitize_dataflow: config.sanitizer.contains(SanitizerSet::DATAFLOW),
633 sanitize_dataflow_abilist: sanitize_dataflow_abilist_ptrs.as_ptr(),
634 sanitize_dataflow_abilist_len: sanitize_dataflow_abilist_ptrs.len(),
635 sanitize_kcfi: config.sanitizer.contains(SanitizerSet::KCFI),
636 sanitize_memory: config.sanitizer.contains(SanitizerSet::MEMORY),
637 sanitize_memory_recover: config.sanitizer_recover.contains(SanitizerSet::MEMORY),
638 sanitize_memory_track_origins: config.sanitizer_memory_track_origins as c_int,
639 sanitize_realtime: config.sanitizer.contains(SanitizerSet::REALTIME),
640 sanitize_thread: config.sanitizer.contains(SanitizerSet::THREAD),
641 sanitize_hwaddress: config.sanitizer.contains(SanitizerSet::HWADDRESS),
642 sanitize_hwaddress_recover: config.sanitizer_recover.contains(SanitizerSet::HWADDRESS),
643 sanitize_kernel_address: config.sanitizer.contains(SanitizerSet::KERNELADDRESS),
644 sanitize_kernel_address_recover: config
645 .sanitizer_recover
646 .contains(SanitizerSet::KERNELADDRESS),
647 sanitize_kernel_hwaddress: config.sanitizer.contains(SanitizerSet::KERNELHWADDRESS),
648 sanitize_kernel_hwaddress_recover: config
649 .sanitizer_recover
650 .contains(SanitizerSet::KERNELHWADDRESS),
651 })
652 } else {
653 None
654 };
655
656 fn handle_offload<'ll>(cx: &'ll SimpleCx<'_>, old_fn: &llvm::Value) {
657 let old_fn_ty = cx.get_type_of_global(old_fn);
658 let old_param_types = cx.func_params_types(old_fn_ty);
659 let old_param_count = old_param_types.len();
660 if old_param_count == 0 {
661 return;
662 }
663
664 let first_param = llvm::get_param(old_fn, 0);
665 let c_name = llvm::get_value_name(first_param);
666 let first_arg_name = str::from_utf8(&c_name).unwrap();
667 if first_arg_name == "dyn_ptr" {
671 return;
672 }
673
674 let mut new_param_types = Vec::with_capacity(old_param_count as usize + 1);
676 new_param_types.push(cx.type_ptr());
677
678 for &old_ty in &old_param_types {
680 let new_ty = match cx.type_kind(old_ty) {
681 TypeKind::Half | TypeKind::Float | TypeKind::Double | TypeKind::Integer => {
682 cx.type_i64()
683 }
684 _ => old_ty,
685 };
686 new_param_types.push(new_ty);
687 }
688
689 let ret_ty = unsafe { llvm::LLVMGetReturnType(old_fn_ty) };
691 let new_fn_ty = cx.type_func(&new_param_types, ret_ty);
692
693 let old_fn_name = String::from_utf8(llvm::get_value_name(old_fn)).unwrap();
695 let new_fn_name = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}.offload", &old_fn_name))
})format!("{}.offload", &old_fn_name);
696 let new_fn = cx.add_func(&new_fn_name, new_fn_ty);
697 let a0 = llvm::get_param(new_fn, 0);
698 llvm::set_value_name(a0, CString::new("dyn_ptr").unwrap().as_bytes());
699
700 let bb = SBuilder::append_block(cx, new_fn, "entry");
701 let mut builder = SBuilder::build(cx, bb);
702
703 let mut old_args_rebuilt = Vec::with_capacity(old_param_types.len());
704
705 for (i, &old_ty) in old_param_types.iter().enumerate() {
706 let new_arg = llvm::get_param(new_fn, (i + 1) as u32);
707
708 let rebuilt = match cx.type_kind(old_ty) {
709 TypeKind::Half | TypeKind::Float | TypeKind::Double | TypeKind::Integer => {
710 let num_bits = scalar_width(cx, old_ty);
711
712 let trunc = builder.trunc(new_arg, cx.type_ix(num_bits));
713 builder.bitcast(trunc, old_ty)
714 }
715 _ => new_arg,
716 };
717
718 old_args_rebuilt.push(rebuilt);
719 }
720
721 builder.ret_void();
722
723 unsafe {
726 llvm::LLVMRustOffloadMapper(old_fn, new_fn, old_args_rebuilt.as_ptr());
727 }
728
729 llvm::set_linkage(new_fn, llvm::get_linkage(old_fn));
730 llvm::set_visibility(new_fn, llvm::get_visibility(old_fn));
731
732 unsafe {
734 llvm::LLVMReplaceAllUsesWith(old_fn, new_fn);
735 }
736 let name = llvm::get_value_name(old_fn);
737 unsafe {
738 llvm::LLVMDeleteFunction(old_fn);
739 }
740 llvm::set_value_name(new_fn, &name);
742 }
743
744 if cgcx.target_is_like_gpu && config.offload.contains(&config::Offload::Device) {
745 let cx =
746 SimpleCx::new(module.module_llvm.llmod(), module.module_llvm.llcx, cgcx.pointer_size);
747 for func in cx.get_functions() {
748 let offload_kernel = "offload-kernel";
749 if attributes::has_string_attr(func, offload_kernel) {
750 handle_offload(&cx, func);
751 }
752 attributes::remove_string_attr_from_llfn(func, offload_kernel);
753 }
754 }
755
756 let mut llvm_profiler = prof
757 .llvm_recording_enabled()
758 .then(|| LlvmSelfProfiler::new(prof.get_self_profiler().unwrap()));
759
760 let llvm_selfprofiler =
761 llvm_profiler.as_mut().map(|s| s as *mut _ as *mut c_void).unwrap_or(std::ptr::null_mut());
762
763 let extra_passes = if !is_lto { config.passes.join(",") } else { "".to_string() };
764
765 let llvm_plugins = config.llvm_plugins.join(",");
766
767 let enzyme_fn = if consider_ad {
768 let wrapper = llvm::EnzymeWrapper::get_instance();
769 wrapper.registerEnzymeAndPassPipeline
770 } else {
771 std::ptr::null()
772 };
773
774 let result = unsafe {
775 llvm::LLVMRustOptimize(
776 module.module_llvm.llmod(),
777 &*module.module_llvm.tm.raw(),
778 to_pass_builder_opt_level(opt_level),
779 opt_stage,
780 cgcx.use_linker_plugin_lto,
781 config.no_prepopulate_passes,
782 config.verify_llvm_ir,
783 config.lint_llvm_ir,
784 thin_lto_buffer,
785 thin_lto_summary_buffer,
786 merge_functions,
787 unroll_loops,
788 vectorize_slp,
789 vectorize_loop,
790 config.no_builtins,
791 config.emit_lifetime_markers,
792 enzyme_fn,
793 print_before_enzyme,
794 print_after_enzyme,
795 print_passes,
796 sanitizer_options.as_ref(),
797 pgo_gen_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
798 pgo_use_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
799 config.instrument_coverage,
800 instr_profile_output_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
801 pgo_sample_use_path.as_ref().map_or(std::ptr::null(), |s| s.as_ptr()),
802 config.debug_info_for_profiling,
803 llvm_selfprofiler,
804 selfprofile_before_pass_callback,
805 selfprofile_after_pass_callback,
806 extra_passes.as_c_char_ptr(),
807 extra_passes.len(),
808 llvm_plugins.as_c_char_ptr(),
809 llvm_plugins.len(),
810 )
811 };
812
813 if cgcx.target_is_like_gpu && config.offload.contains(&config::Offload::Device) {
814 let device_path = cgcx.output_filenames.path(OutputType::Object);
815 let device_dir = device_path.parent().unwrap();
816 let device_out = device_dir.join("host.out");
817 let device_out_c = path_to_c_string(device_out.as_path());
818 unsafe {
819 let ok = llvm::LLVMRustBundleImages(
821 module.module_llvm.llmod(),
822 module.module_llvm.tm.raw(),
823 device_out_c.as_ptr(),
824 );
825 if !ok || !device_out.exists() {
826 dcx.emit_err(crate::errors::OffloadBundleImagesFailed);
827 }
828 }
829 }
830
831 if !cgcx.target_is_like_gpu {
837 if let Some(device_path) = config
838 .offload
839 .iter()
840 .find_map(|o| if let config::Offload::Host(path) = o { Some(path) } else { None })
841 {
842 let device_pathbuf = PathBuf::from(device_path);
843 if device_pathbuf.is_relative() {
844 dcx.emit_err(crate::errors::OffloadWithoutAbsPath);
845 } else if device_pathbuf
846 .file_name()
847 .and_then(|n| n.to_str())
848 .is_some_and(|n| n != "host.out")
849 {
850 dcx.emit_err(crate::errors::OffloadWrongFileName);
851 } else if !device_pathbuf.exists() {
852 dcx.emit_err(crate::errors::OffloadNonexistingPath);
853 }
854 let host_path = cgcx.output_filenames.path(OutputType::Object);
855 let host_dir = host_path.parent().unwrap();
856 let out_obj = host_dir.join("host.o");
857 let host_out_c = path_to_c_string(device_pathbuf.as_path());
858
859 let llmod2 = llvm::LLVMCloneModule(module.module_llvm.llmod());
863 let ok =
864 unsafe { llvm::LLVMRustOffloadEmbedBufferInModule(llmod2, host_out_c.as_ptr()) };
865 if !ok {
866 dcx.emit_err(crate::errors::OffloadEmbedFailed);
867 }
868 write_output_file(
869 dcx,
870 module.module_llvm.tm.raw(),
871 config.no_builtins,
872 llmod2,
873 &out_obj,
874 None,
875 llvm::FileType::ObjectFile,
876 prof,
877 true,
878 );
879 }
883 }
884 result.into_result().unwrap_or_else(|()| llvm_err(dcx, LlvmError::RunLlvmPasses))
885}
886
887pub(crate) fn optimize(
889 cgcx: &CodegenContext,
890 prof: &SelfProfilerRef,
891 shared_emitter: &SharedEmitter,
892 module: &mut ModuleCodegen<ModuleLlvm>,
893 config: &ModuleConfig,
894) {
895 let _timer = prof.generic_activity_with_arg("LLVM_module_optimize", &*module.name);
896
897 let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
898 let dcx = dcx.handle();
899
900 let llcx = &*module.module_llvm.llcx;
901 let _handlers =
902 DiagnosticHandlers::new(cgcx, shared_emitter, llcx, module, CodegenDiagnosticsStage::Opt);
903
904 if module.kind == ModuleKind::Regular {
905 save_temp_bitcode(cgcx, module, "no-opt");
906 }
907
908 if let Some(opt_level) = config.opt_level {
911 let opt_stage = match cgcx.lto {
912 Lto::Fat => llvm::OptStage::PreLinkFatLTO,
913 Lto::Thin | Lto::ThinLocal => llvm::OptStage::PreLinkThinLTO,
914 _ if cgcx.use_linker_plugin_lto => llvm::OptStage::PreLinkThinLTO,
915 _ => llvm::OptStage::PreLinkNoLTO,
916 };
917
918 let consider_ad = config.autodiff.contains(&config::AutoDiff::Enable);
921 let autodiff_stage = if consider_ad { AutodiffStage::PreAD } else { AutodiffStage::PostAD };
922 let (mut thin_lto_buffer, mut thin_lto_summary_buffer) = if (module.kind
927 == ModuleKind::Regular
928 && config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full))
929 || config.emit_thin_lto_summary
930 {
931 (Some(None), config.emit_thin_lto_summary.then_some(None))
932 } else {
933 (None, None)
934 };
935 unsafe {
936 llvm_optimize(
937 cgcx,
938 prof,
939 dcx,
940 module,
941 thin_lto_buffer.as_mut(),
942 thin_lto_summary_buffer.as_mut(),
943 config,
944 opt_level,
945 opt_stage,
946 autodiff_stage,
947 )
948 };
949 if let Some(thin_lto_buffer) = thin_lto_buffer {
950 let thin_lto_buffer = thin_lto_buffer.unwrap();
951 module.thin_lto_buffer = Some(thin_lto_buffer.data().to_vec());
952 let bc_summary_out = cgcx.output_filenames.temp_path_for_cgu(
953 OutputType::ThinLinkBitcode,
954 &module.name,
955 cgcx.invocation_temp.as_deref(),
956 );
957 if let Some(thin_lto_summary_buffer) = thin_lto_summary_buffer
958 && let Some(thin_link_bitcode_filename) = bc_summary_out.file_name()
959 {
960 let thin_lto_summary_buffer = thin_lto_summary_buffer.unwrap();
961 let summary_data = thin_lto_summary_buffer.data();
962 prof.artifact_size(
963 "llvm_bitcode_summary",
964 thin_link_bitcode_filename.to_string_lossy(),
965 summary_data.len() as u64,
966 );
967 let _timer = prof.generic_activity_with_arg(
968 "LLVM_module_codegen_emit_bitcode_summary",
969 &*module.name,
970 );
971 if let Err(err) = fs::write(&bc_summary_out, summary_data) {
972 dcx.emit_err(WriteBytecode { path: &bc_summary_out, err });
973 }
974 }
975 }
976 }
977}
978
979pub(crate) fn codegen(
980 cgcx: &CodegenContext,
981 prof: &SelfProfilerRef,
982 shared_emitter: &SharedEmitter,
983 module: ModuleCodegen<ModuleLlvm>,
984 config: &ModuleConfig,
985) -> CompiledModule {
986 let _timer = prof.generic_activity_with_arg("LLVM_module_codegen", &*module.name);
987
988 let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
989 let dcx = dcx.handle();
990
991 {
992 let llmod = module.module_llvm.llmod();
993 let llcx = &*module.module_llvm.llcx;
994 let tm = &*module.module_llvm.tm;
995 let _handlers = DiagnosticHandlers::new(
996 cgcx,
997 shared_emitter,
998 llcx,
999 &module,
1000 CodegenDiagnosticsStage::Codegen,
1001 );
1002
1003 if cgcx.msvc_imps_needed {
1004 create_msvc_imps(cgcx, llcx, llmod);
1005 }
1006
1007 let bc_out = cgcx.output_filenames.temp_path_for_cgu(
1012 OutputType::Bitcode,
1013 &module.name,
1014 cgcx.invocation_temp.as_deref(),
1015 );
1016 let obj_out = cgcx.output_filenames.temp_path_for_cgu(
1017 OutputType::Object,
1018 &module.name,
1019 cgcx.invocation_temp.as_deref(),
1020 );
1021
1022 if config.bitcode_needed() {
1023 if config.emit_bc || config.emit_obj == EmitObj::Bitcode {
1024 let thin = {
1025 let _timer = prof.generic_activity_with_arg(
1026 "LLVM_module_codegen_make_bitcode",
1027 &*module.name,
1028 );
1029 ModuleBuffer::new(llmod, cgcx.lto != Lto::Fat)
1030 };
1031 let data = thin.data();
1032 let _timer = prof
1033 .generic_activity_with_arg("LLVM_module_codegen_emit_bitcode", &*module.name);
1034 if let Some(bitcode_filename) = bc_out.file_name() {
1035 prof.artifact_size(
1036 "llvm_bitcode",
1037 bitcode_filename.to_string_lossy(),
1038 data.len() as u64,
1039 );
1040 }
1041 if let Err(err) = fs::write(&bc_out, data) {
1042 dcx.emit_err(WriteBytecode { path: &bc_out, err });
1043 }
1044 }
1045
1046 if config.embed_bitcode() && module.kind == ModuleKind::Regular {
1047 let _timer = prof
1048 .generic_activity_with_arg("LLVM_module_codegen_embed_bitcode", &*module.name);
1049 let thin_bc =
1050 module.thin_lto_buffer.as_deref().expect("cannot find embedded bitcode");
1051 embed_bitcode(cgcx, llcx, llmod, &thin_bc);
1052 }
1053 }
1054
1055 if config.emit_ir {
1056 let _timer =
1057 prof.generic_activity_with_arg("LLVM_module_codegen_emit_ir", &*module.name);
1058 let out = cgcx.output_filenames.temp_path_for_cgu(
1059 OutputType::LlvmAssembly,
1060 &module.name,
1061 cgcx.invocation_temp.as_deref(),
1062 );
1063 let out_c = path_to_c_string(&out);
1064
1065 extern "C" fn demangle_callback(
1066 input_ptr: *const c_char,
1067 input_len: size_t,
1068 output_ptr: *mut c_char,
1069 output_len: size_t,
1070 ) -> size_t {
1071 let input =
1072 unsafe { slice::from_raw_parts(input_ptr as *const u8, input_len as usize) };
1073
1074 let Ok(input) = str::from_utf8(input) else { return 0 };
1075
1076 let output = unsafe {
1077 slice::from_raw_parts_mut(output_ptr as *mut u8, output_len as usize)
1078 };
1079 let mut cursor = io::Cursor::new(output);
1080
1081 let Ok(demangled) = rustc_demangle::try_demangle(input) else { return 0 };
1082
1083 if cursor.write_fmt(format_args!("{0:#}", demangled))write!(cursor, "{demangled:#}").is_err() {
1084 return 0;
1086 }
1087
1088 cursor.position() as size_t
1089 }
1090
1091 let result =
1092 unsafe { llvm::LLVMRustPrintModule(llmod, out_c.as_ptr(), demangle_callback) };
1093
1094 if result == llvm::LLVMRustResult::Success {
1095 record_artifact_size(prof, "llvm_ir", &out);
1096 }
1097
1098 result
1099 .into_result()
1100 .unwrap_or_else(|()| llvm_err(dcx, LlvmError::WriteIr { path: &out }));
1101 }
1102
1103 if config.emit_asm {
1104 let _timer =
1105 prof.generic_activity_with_arg("LLVM_module_codegen_emit_asm", &*module.name);
1106 let path = cgcx.output_filenames.temp_path_for_cgu(
1107 OutputType::Assembly,
1108 &module.name,
1109 cgcx.invocation_temp.as_deref(),
1110 );
1111
1112 let llmod = if let EmitObj::ObjectCode(_) = config.emit_obj {
1117 llvm::LLVMCloneModule(llmod)
1118 } else {
1119 llmod
1120 };
1121 write_output_file(
1122 dcx,
1123 tm.raw(),
1124 config.no_builtins,
1125 llmod,
1126 &path,
1127 None,
1128 llvm::FileType::AssemblyFile,
1129 prof,
1130 config.verify_llvm_ir,
1131 );
1132 }
1133
1134 match config.emit_obj {
1135 EmitObj::ObjectCode(_) => {
1136 let _timer =
1137 prof.generic_activity_with_arg("LLVM_module_codegen_emit_obj", &*module.name);
1138
1139 let dwo_out = cgcx
1140 .output_filenames
1141 .temp_path_dwo_for_cgu(&module.name, cgcx.invocation_temp.as_deref());
1142 let dwo_out = match (cgcx.split_debuginfo, cgcx.split_dwarf_kind) {
1143 (SplitDebuginfo::Off, _) => None,
1145 _ if !cgcx.target_can_use_split_dwarf => None,
1148 (_, SplitDwarfKind::Single) => None,
1151 (_, SplitDwarfKind::Split) => Some(dwo_out.as_path()),
1154 };
1155
1156 write_output_file(
1157 dcx,
1158 tm.raw(),
1159 config.no_builtins,
1160 llmod,
1161 &obj_out,
1162 dwo_out,
1163 llvm::FileType::ObjectFile,
1164 prof,
1165 config.verify_llvm_ir,
1166 );
1167 }
1168
1169 EmitObj::Bitcode => {
1170 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/write.rs:1170",
"rustc_codegen_llvm::back::write", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/write.rs"),
::tracing_core::__macro_support::Option::Some(1170u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::write"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("copying bitcode {0:?} to obj {1:?}",
bc_out, obj_out) as &dyn Value))])
});
} else { ; }
};debug!("copying bitcode {:?} to obj {:?}", bc_out, obj_out);
1171 if let Err(err) = link_or_copy(&bc_out, &obj_out) {
1172 dcx.emit_err(CopyBitcode { err });
1173 }
1174
1175 if !config.emit_bc {
1176 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/write.rs:1176",
"rustc_codegen_llvm::back::write", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/write.rs"),
::tracing_core::__macro_support::Option::Some(1176u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::write"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("removing_bitcode {0:?}",
bc_out) as &dyn Value))])
});
} else { ; }
};debug!("removing_bitcode {:?}", bc_out);
1177 ensure_removed(dcx, &bc_out);
1178 }
1179 }
1180
1181 EmitObj::None => {}
1182 }
1183
1184 record_llvm_cgu_instructions_stats(prof, &module.name, llmod);
1185 }
1186
1187 let dwarf_object_emitted = #[allow(non_exhaustive_omitted_patterns)] match config.emit_obj {
EmitObj::ObjectCode(_) => true,
_ => false,
}matches!(config.emit_obj, EmitObj::ObjectCode(_))
1196 && cgcx.target_can_use_split_dwarf
1197 && cgcx.split_debuginfo != SplitDebuginfo::Off
1198 && cgcx.split_dwarf_kind == SplitDwarfKind::Split;
1199 module.into_compiled_module(
1200 config.emit_obj != EmitObj::None,
1201 dwarf_object_emitted,
1202 config.emit_bc,
1203 config.emit_asm,
1204 config.emit_ir,
1205 &cgcx.output_filenames,
1206 cgcx.invocation_temp.as_deref(),
1207 )
1208}
1209
1210fn create_section_with_flags_asm(section_name: &str, section_flags: &str, data: &[u8]) -> Vec<u8> {
1211 let mut asm = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!(".section {0},\"{1}\"\n",
section_name, section_flags))
})format!(".section {section_name},\"{section_flags}\"\n").into_bytes();
1212 asm.extend_from_slice(b".ascii \"");
1213 asm.reserve(data.len());
1214 for &byte in data {
1215 if byte == b'\\' || byte == b'"' {
1216 asm.push(b'\\');
1217 asm.push(byte);
1218 } else if byte < 0x20 || byte >= 0x80 {
1219 asm.push(b'\\');
1222 asm.push(b'0' + ((byte >> 6) & 0x7));
1223 asm.push(b'0' + ((byte >> 3) & 0x7));
1224 asm.push(b'0' + ((byte >> 0) & 0x7));
1225 } else {
1226 asm.push(byte);
1227 }
1228 }
1229 asm.extend_from_slice(b"\"\n");
1230 asm
1231}
1232
1233pub(crate) fn bitcode_section_name(cgcx: &CodegenContext) -> &'static CStr {
1234 if cgcx.target_is_like_darwin {
1235 c"__LLVM,__bitcode"
1236 } else if cgcx.target_is_like_aix {
1237 c".ipa"
1238 } else {
1239 c".llvmbc"
1240 }
1241}
1242
1243fn embed_bitcode(
1245 cgcx: &CodegenContext,
1246 llcx: &llvm::Context,
1247 llmod: &llvm::Module,
1248 bitcode: &[u8],
1249) {
1250 if cgcx.target_is_like_darwin
1289 || cgcx.target_is_like_aix
1290 || cgcx.target_arch == "wasm32"
1291 || cgcx.target_arch == "wasm64"
1292 {
1293 let llconst = common::bytes_in_context(llcx, bitcode);
1295 let llglobal = llvm::add_global(llmod, common::val_ty(llconst), c"rustc.embedded.module");
1296 llvm::set_initializer(llglobal, llconst);
1297
1298 llvm::set_section(llglobal, bitcode_section_name(cgcx));
1299 llvm::set_linkage(llglobal, llvm::Linkage::PrivateLinkage);
1300 llvm::LLVMSetGlobalConstant(llglobal, llvm::TRUE);
1301
1302 let llconst = common::bytes_in_context(llcx, &[]);
1303 let llglobal = llvm::add_global(llmod, common::val_ty(llconst), c"rustc.embedded.cmdline");
1304 llvm::set_initializer(llglobal, llconst);
1305 let section = if cgcx.target_is_like_darwin {
1306 c"__LLVM,__cmdline"
1307 } else if cgcx.target_is_like_aix {
1308 c".info"
1309 } else {
1310 c".llvmcmd"
1311 };
1312 llvm::set_section(llglobal, section);
1313 llvm::set_linkage(llglobal, llvm::Linkage::PrivateLinkage);
1314 } else {
1315 let section_flags = if cgcx.is_pe_coff { "n" } else { "e" };
1317 let asm = create_section_with_flags_asm(".llvmbc", section_flags, bitcode);
1318 llvm::append_module_inline_asm(llmod, &asm);
1319 let asm = create_section_with_flags_asm(".llvmcmd", section_flags, &[]);
1320 llvm::append_module_inline_asm(llmod, &asm);
1321 }
1322}
1323
1324fn create_msvc_imps(cgcx: &CodegenContext, llcx: &llvm::Context, llmod: &llvm::Module) {
1330 if !cgcx.msvc_imps_needed {
1331 return;
1332 }
1333 let prefix = if cgcx.target_arch == "x86" { "\x01__imp__" } else { "\x01__imp_" };
1338
1339 let ptr_ty = llvm_type_ptr(llcx);
1340 let globals = base::iter_globals(llmod)
1341 .filter(|&val| {
1342 llvm::get_linkage(val) == llvm::Linkage::ExternalLinkage && !llvm::is_declaration(val)
1343 })
1344 .filter_map(|val| {
1345 let name = llvm::get_value_name(val);
1347 if ignored(&name) { None } else { Some((val, name)) }
1348 })
1349 .map(move |(val, name)| {
1350 let mut imp_name = prefix.as_bytes().to_vec();
1351 imp_name.extend(name);
1352 let imp_name = CString::new(imp_name).unwrap();
1353 (imp_name, val)
1354 })
1355 .collect::<Vec<_>>();
1356
1357 for (imp_name, val) in globals {
1358 let imp = llvm::add_global(llmod, ptr_ty, &imp_name);
1359
1360 llvm::set_initializer(imp, val);
1361 llvm::set_linkage(imp, llvm::Linkage::ExternalLinkage);
1362 }
1363
1364 fn ignored(symbol_name: &[u8]) -> bool {
1366 symbol_name.starts_with(b"__llvm_profile_")
1368 }
1369}
1370
1371fn record_artifact_size(
1372 self_profiler_ref: &SelfProfilerRef,
1373 artifact_kind: &'static str,
1374 path: &Path,
1375) {
1376 if !self_profiler_ref.enabled() {
1378 return;
1379 }
1380
1381 if let Some(artifact_name) = path.file_name() {
1382 let file_size = std::fs::metadata(path).map(|m| m.len()).unwrap_or(0);
1383 self_profiler_ref.artifact_size(artifact_kind, artifact_name.to_string_lossy(), file_size);
1384 }
1385}
1386
1387fn record_llvm_cgu_instructions_stats(prof: &SelfProfilerRef, name: &str, llmod: &llvm::Module) {
1388 if !prof.enabled() {
1389 return;
1390 }
1391
1392 let total = unsafe { llvm::LLVMRustModuleInstructionStats(llmod) };
1393 prof.artifact_size("cgu_instructions", name, total);
1394}