1use std::collections::BTreeMap;
2use std::ffi::{CStr, CString};
3use std::fs::File;
4use std::path::{Path, PathBuf};
5use std::ptr::NonNull;
6use std::sync::Arc;
7use std::{io, iter, slice};
89use object::read::archive::ArchiveFile;
10use object::{Object, ObjectSection};
11use rustc_codegen_ssa::back::lto::{SerializedModule, ThinModule, ThinShared};
12use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput, SharedEmitter};
13use rustc_codegen_ssa::traits::*;
14use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file};
15use rustc_data_structures::fx::FxHashMap;
16use rustc_data_structures::memmap::Mmap;
17use rustc_errors::{DiagCtxt, DiagCtxtHandle};
18use rustc_hir::attrs::SanitizerSet;
19use rustc_middle::bug;
20use rustc_middle::dep_graph::WorkProduct;
21use rustc_session::config::{self, Lto};
22use tracing::{debug, info};
2324use crate::back::write::{
25self, CodegenDiagnosticsStage, DiagnosticHandlers, bitcode_section_name, save_temp_bitcode,
26};
27use crate::errors::{LlvmError, LtoBitcodeFromRlib};
28use crate::llvm::{self, build_string};
29use crate::{LlvmCodegenBackend, ModuleLlvm};
3031/// We keep track of the computed LTO cache keys from the previous
32/// session to determine which CGUs we can reuse.
33const THIN_LTO_KEYS_INCR_COMP_FILE_NAME: &str = "thin-lto-past-keys.bin";
3435fn prepare_lto(
36 cgcx: &CodegenContext<LlvmCodegenBackend>,
37 exported_symbols_for_lto: &[String],
38 each_linked_rlib_for_lto: &[PathBuf],
39 dcx: DiagCtxtHandle<'_>,
40) -> (Vec<CString>, Vec<(SerializedModule<ModuleBuffer>, CString)>) {
41let mut symbols_below_threshold = exported_symbols_for_lto42 .iter()
43 .map(|symbol| CString::new(symbol.to_owned()).unwrap())
44 .collect::<Vec<CString>>();
4546if cgcx.module_config.instrument_coverage || cgcx.module_config.pgo_gen.enabled() {
47// These are weak symbols that point to the profile version and the
48 // profile name, which need to be treated as exported so LTO doesn't nix
49 // them.
50const PROFILER_WEAK_SYMBOLS: [&CStr; 2] =
51 [c"__llvm_profile_raw_version", c"__llvm_profile_filename"];
5253symbols_below_threshold.extend(PROFILER_WEAK_SYMBOLS.iter().map(|&sym| sym.to_owned()));
54 }
5556if cgcx.module_config.sanitizer.contains(SanitizerSet::MEMORY) {
57let mut msan_weak_symbols = Vec::new();
5859// Similar to profiling, preserve weak msan symbol during LTO.
60if cgcx.module_config.sanitizer_recover.contains(SanitizerSet::MEMORY) {
61msan_weak_symbols.push(c"__msan_keep_going");
62 }
6364if cgcx.module_config.sanitizer_memory_track_origins != 0 {
65msan_weak_symbols.push(c"__msan_track_origins");
66 }
6768symbols_below_threshold.extend(msan_weak_symbols.into_iter().map(|sym| sym.to_owned()));
69 }
7071// Preserve LLVM-injected, ASAN-related symbols.
72 // See also https://github.com/rust-lang/rust/issues/113404.
73symbols_below_threshold.push(c"___asan_globals_registered".to_owned());
7475// __llvm_profile_counter_bias is pulled in at link time by an undefined reference to
76 // __llvm_profile_runtime, therefore we won't know until link time if this symbol
77 // should have default visibility.
78symbols_below_threshold.push(c"__llvm_profile_counter_bias".to_owned());
7980// LTO seems to discard this otherwise under certain circumstances.
81symbols_below_threshold.push(c"rust_eh_personality".to_owned());
8283// If we're performing LTO for the entire crate graph, then for each of our
84 // upstream dependencies, find the corresponding rlib and load the bitcode
85 // from the archive.
86 //
87 // We save off all the bytecode and LLVM module ids for later processing
88 // with either fat or thin LTO
89let mut upstream_modules = Vec::new();
90if cgcx.lto != Lto::ThinLocal {
91for path in each_linked_rlib_for_lto {
92let archive_data = unsafe {
93 Mmap::map(std::fs::File::open(&path).expect("couldn't open rlib"))
94 .expect("couldn't map rlib")
95 };
96let archive = ArchiveFile::parse(&*archive_data).expect("wanted an rlib");
97let obj_files = archive
98 .members()
99 .filter_map(|child| {
100 child.ok().and_then(|c| {
101 std::str::from_utf8(c.name()).ok().map(|name| (name.trim(), c))
102 })
103 })
104 .filter(|&(name, _)| looks_like_rust_object_file(name));
105for (name, child) in obj_files {
106{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:106",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(106u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("adding bitcode from {0}",
name) as &dyn Value))])
});
} else { ; }
};info!("adding bitcode from {}", name);
107match get_bitcode_slice_from_object_data(
108 child.data(&*archive_data).expect("corrupt rlib"),
109 cgcx,
110 ) {
111Ok(data) => {
112let module = SerializedModule::FromRlib(data.to_vec());
113 upstream_modules.push((module, CString::new(name).unwrap()));
114 }
115Err(e) => dcx.emit_fatal(e),
116 }
117 }
118 }
119 }
120121 (symbols_below_threshold, upstream_modules)
122}
123124fn get_bitcode_slice_from_object_data<'a>(
125 obj: &'a [u8],
126 cgcx: &CodegenContext<LlvmCodegenBackend>,
127) -> Result<&'a [u8], LtoBitcodeFromRlib> {
128// We're about to assume the data here is an object file with sections, but if it's raw LLVM IR
129 // that won't work. Fortunately, if that's what we have we can just return the object directly,
130 // so we sniff the relevant magic strings here and return.
131if obj.starts_with(b"\xDE\xC0\x17\x0B") || obj.starts_with(b"BC\xC0\xDE") {
132return Ok(obj);
133 }
134// We drop the "__LLVM," prefix here because on Apple platforms there's a notion of "segment
135 // name" which in the public API for sections gets treated as part of the section name, but
136 // internally in MachOObjectFile.cpp gets treated separately.
137let section_name = bitcode_section_name(cgcx).to_str().unwrap().trim_start_matches("__LLVM,");
138139let obj =
140 object::File::parse(obj).map_err(|err| LtoBitcodeFromRlib { err: err.to_string() })?;
141142let section = obj143 .section_by_name(section_name)
144 .ok_or_else(|| LtoBitcodeFromRlib { err: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Can\'t find section {0}",
section_name))
})format!("Can't find section {section_name}") })?;
145146section.data().map_err(|err| LtoBitcodeFromRlib { err: err.to_string() })
147}
148149/// Performs fat LTO by merging all modules into a single one and returning it
150/// for further optimization.
151pub(crate) fn run_fat(
152 cgcx: &CodegenContext<LlvmCodegenBackend>,
153 shared_emitter: &SharedEmitter,
154 exported_symbols_for_lto: &[String],
155 each_linked_rlib_for_lto: &[PathBuf],
156 modules: Vec<FatLtoInput<LlvmCodegenBackend>>,
157) -> ModuleCodegen<ModuleLlvm> {
158let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
159let dcx = dcx.handle();
160let (symbols_below_threshold, upstream_modules) =
161prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx);
162let symbols_below_threshold =
163symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();
164fat_lto(cgcx, dcx, shared_emitter, modules, upstream_modules, &symbols_below_threshold)
165}
166167/// Performs thin LTO by performing necessary global analysis and returning two
168/// lists, one of the modules that need optimization and another for modules that
169/// can simply be copied over from the incr. comp. cache.
170pub(crate) fn run_thin(
171 cgcx: &CodegenContext<LlvmCodegenBackend>,
172 dcx: DiagCtxtHandle<'_>,
173 exported_symbols_for_lto: &[String],
174 each_linked_rlib_for_lto: &[PathBuf],
175 modules: Vec<(String, ThinBuffer)>,
176 cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
177) -> (Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>) {
178let (symbols_below_threshold, upstream_modules) =
179prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx);
180let symbols_below_threshold =
181symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();
182if cgcx.use_linker_plugin_lto {
183{
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("We should never reach this case if the LTO step is deferred to the linker")));
};unreachable!(
184"We should never reach this case if the LTO step \
185 is deferred to the linker"
186);
187 }
188thin_lto(cgcx, dcx, modules, upstream_modules, cached_modules, &symbols_below_threshold)
189}
190191pub(crate) fn prepare_thin(module: ModuleCodegen<ModuleLlvm>) -> (String, ThinBuffer) {
192let name = module.name;
193let buffer = ThinBuffer::new(module.module_llvm.llmod(), true);
194 (name, buffer)
195}
196197fn fat_lto(
198 cgcx: &CodegenContext<LlvmCodegenBackend>,
199 dcx: DiagCtxtHandle<'_>,
200 shared_emitter: &SharedEmitter,
201 modules: Vec<FatLtoInput<LlvmCodegenBackend>>,
202mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
203 symbols_below_threshold: &[*const libc::c_char],
204) -> ModuleCodegen<ModuleLlvm> {
205let _timer = cgcx.prof.generic_activity("LLVM_fat_lto_build_monolithic_module");
206{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:206",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(206u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("going for a fat lto")
as &dyn Value))])
});
} else { ; }
};info!("going for a fat lto");
207208// Sort out all our lists of incoming modules into two lists.
209 //
210 // * `serialized_modules` (also and argument to this function) contains all
211 // modules that are serialized in-memory.
212 // * `in_memory` contains modules which are already parsed and in-memory,
213 // such as from multi-CGU builds.
214let mut in_memory = Vec::new();
215for module in modules {
216match module {
217 FatLtoInput::InMemory(m) => in_memory.push(m),
218 FatLtoInput::Serialized { name, buffer } => {
219{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:219",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(219u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("pushing serialized module {0:?}",
name) as &dyn Value))])
});
} else { ; }
};info!("pushing serialized module {:?}", name);
220 serialized_modules.push((buffer, CString::new(name).unwrap()));
221 }
222 }
223 }
224225// Find the "costliest" module and merge everything into that codegen unit.
226 // All the other modules will be serialized and reparsed into the new
227 // context, so this hopefully avoids serializing and parsing the largest
228 // codegen unit.
229 //
230 // Additionally use a regular module as the base here to ensure that various
231 // file copy operations in the backend work correctly. The only other kind
232 // of module here should be an allocator one, and if your crate is smaller
233 // than the allocator module then the size doesn't really matter anyway.
234let costliest_module = in_memory235 .iter()
236 .enumerate()
237 .filter(|&(_, module)| module.kind == ModuleKind::Regular)
238 .map(|(i, module)| {
239let cost = unsafe { llvm::LLVMRustModuleCost(module.module_llvm.llmod()) };
240 (cost, i)
241 })
242 .max();
243244// If we found a costliest module, we're good to go. Otherwise all our
245 // inputs were serialized which could happen in the case, for example, that
246 // all our inputs were incrementally reread from the cache and we're just
247 // re-executing the LTO passes. If that's the case deserialize the first
248 // module and create a linker with it.
249let module: ModuleCodegen<ModuleLlvm> = match costliest_module {
250Some((_cost, i)) => in_memory.remove(i),
251None => {
252if !!serialized_modules.is_empty() {
{
::core::panicking::panic_fmt(format_args!("must have at least one serialized module"));
}
};assert!(!serialized_modules.is_empty(), "must have at least one serialized module");
253let (buffer, name) = serialized_modules.remove(0);
254{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:254",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(254u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("no in-memory regular modules to choose from, parsing {0:?}",
name) as &dyn Value))])
});
} else { ; }
};info!("no in-memory regular modules to choose from, parsing {:?}", name);
255let llvm_module = ModuleLlvm::parse(cgcx, &name, buffer.data(), dcx);
256 ModuleCodegen::new_regular(name.into_string().unwrap(), llvm_module)
257 }
258 };
259 {
260let (llcx, llmod) = {
261let llvm = &module.module_llvm;
262 (&llvm.llcx, llvm.llmod())
263 };
264{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:264",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(264u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("using {0:?} as a base module",
module.name) as &dyn Value))])
});
} else { ; }
};info!("using {:?} as a base module", module.name);
265266// The linking steps below may produce errors and diagnostics within LLVM
267 // which we'd like to handle and print, so set up our diagnostic handlers
268 // (which get unregistered when they go out of scope below).
269let _handler = DiagnosticHandlers::new(
270cgcx,
271shared_emitter,
272llcx,
273&module,
274 CodegenDiagnosticsStage::LTO,
275 );
276277// For all other modules we codegened we'll need to link them into our own
278 // bitcode. All modules were codegened in their own LLVM context, however,
279 // and we want to move everything to the same LLVM context. Currently the
280 // way we know of to do that is to serialize them to a string and them parse
281 // them later. Not great but hey, that's why it's "fat" LTO, right?
282for module in in_memory {
283let buffer = ModuleBuffer::new(module.module_llvm.llmod());
284let llmod_id = CString::new(&module.name[..]).unwrap();
285 serialized_modules.push((SerializedModule::Local(buffer), llmod_id));
286 }
287// Sort the modules to ensure we produce deterministic results.
288serialized_modules.sort_by(|module1, module2| module1.1.cmp(&module2.1));
289290// For all serialized bitcode files we parse them and link them in as we did
291 // above, this is all mostly handled in C++.
292let mut linker = Linker::new(llmod);
293for (bc_decoded, name) in serialized_modules {
294let _timer = cgcx
295 .prof
296 .generic_activity_with_arg_recorder("LLVM_fat_lto_link_module", |recorder| {
297 recorder.record_arg(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:?}", name))
})format!("{name:?}"))
298 });
299{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:299",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(299u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("linking {0:?}",
name) as &dyn Value))])
});
} else { ; }
};info!("linking {:?}", name);
300let data = bc_decoded.data();
301 linker
302 .add(data)
303 .unwrap_or_else(|()| write::llvm_err(dcx, LlvmError::LoadBitcode { name }));
304 }
305drop(linker);
306save_temp_bitcode(cgcx, &module, "lto.input");
307308// Internalize everything below threshold to help strip out more modules and such.
309unsafe {
310let ptr = symbols_below_threshold.as_ptr();
311 llvm::LLVMRustRunRestrictionPass(
312llmod,
313ptras *const *const libc::c_char,
314symbols_below_threshold.len() as libc::size_t,
315 );
316 }
317save_temp_bitcode(cgcx, &module, "lto.after-restriction");
318 }
319320module321}
322323pub(crate) struct Linker<'a>(&'a mut llvm::Linker<'a>);
324325impl<'a> Linker<'a> {
326pub(crate) fn new(llmod: &'a llvm::Module) -> Self {
327unsafe { Linker(llvm::LLVMRustLinkerNew(llmod)) }
328 }
329330pub(crate) fn add(&mut self, bytecode: &[u8]) -> Result<(), ()> {
331unsafe {
332if llvm::LLVMRustLinkerAdd(
333self.0,
334bytecode.as_ptr() as *const libc::c_char,
335bytecode.len(),
336 ) {
337Ok(())
338 } else {
339Err(())
340 }
341 }
342 }
343}
344345impl Dropfor Linker<'_> {
346fn drop(&mut self) {
347unsafe {
348 llvm::LLVMRustLinkerFree(&mut *(self.0 as *mut _));
349 }
350 }
351}
352353/// Prepare "thin" LTO to get run on these modules.
354///
355/// The general structure of ThinLTO is quite different from the structure of
356/// "fat" LTO above. With "fat" LTO all LLVM modules in question are merged into
357/// one giant LLVM module, and then we run more optimization passes over this
358/// big module after internalizing most symbols. Thin LTO, on the other hand,
359/// avoid this large bottleneck through more targeted optimization.
360///
361/// At a high level Thin LTO looks like:
362///
363/// 1. Prepare a "summary" of each LLVM module in question which describes
364/// the values inside, cost of the values, etc.
365/// 2. Merge the summaries of all modules in question into one "index"
366/// 3. Perform some global analysis on this index
367/// 4. For each module, use the index and analysis calculated previously to
368/// perform local transformations on the module, for example inlining
369/// small functions from other modules.
370/// 5. Run thin-specific optimization passes over each module, and then code
371/// generate everything at the end.
372///
373/// The summary for each module is intended to be quite cheap, and the global
374/// index is relatively quite cheap to create as well. As a result, the goal of
375/// ThinLTO is to reduce the bottleneck on LTO and enable LTO to be used in more
376/// situations. For example one cheap optimization is that we can parallelize
377/// all codegen modules, easily making use of all the cores on a machine.
378///
379/// With all that in mind, the function here is designed at specifically just
380/// calculating the *index* for ThinLTO. This index will then be shared amongst
381/// all of the `LtoModuleCodegen` units returned below and destroyed once
382/// they all go out of scope.
383fn thin_lto(
384 cgcx: &CodegenContext<LlvmCodegenBackend>,
385 dcx: DiagCtxtHandle<'_>,
386 modules: Vec<(String, ThinBuffer)>,
387 serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
388 cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
389 symbols_below_threshold: &[*const libc::c_char],
390) -> (Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>) {
391let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis");
392unsafe {
393{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:393",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(393u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("going for that thin, thin LTO")
as &dyn Value))])
});
} else { ; }
};info!("going for that thin, thin LTO");
394395let green_modules: FxHashMap<_, _> =
396cached_modules.iter().map(|(_, wp)| (wp.cgu_name.clone(), wp.clone())).collect();
397398let full_scope_len = modules.len() + serialized_modules.len() + cached_modules.len();
399let mut thin_buffers = Vec::with_capacity(modules.len());
400let mut module_names = Vec::with_capacity(full_scope_len);
401let mut thin_modules = Vec::with_capacity(full_scope_len);
402403for (i, (name, buffer)) in modules.into_iter().enumerate() {
404{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:404",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(404u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("local module: {0} - {1}",
i, name) as &dyn Value))])
});
} else { ; }
};info!("local module: {} - {}", i, name);
405let cname = CString::new(name.as_bytes()).unwrap();
406 thin_modules.push(llvm::ThinLTOModule {
407 identifier: cname.as_ptr(),
408 data: buffer.data().as_ptr(),
409 len: buffer.data().len(),
410 });
411 thin_buffers.push(buffer);
412 module_names.push(cname);
413 }
414415// FIXME: All upstream crates are deserialized internally in the
416 // function below to extract their summary and modules. Note that
417 // unlike the loop above we *must* decode and/or read something
418 // here as these are all just serialized files on disk. An
419 // improvement, however, to make here would be to store the
420 // module summary separately from the actual module itself. Right
421 // now this is store in one large bitcode file, and the entire
422 // file is deflate-compressed. We could try to bypass some of the
423 // decompression by storing the index uncompressed and only
424 // lazily decompressing the bytecode if necessary.
425 //
426 // Note that truly taking advantage of this optimization will
427 // likely be further down the road. We'd have to implement
428 // incremental ThinLTO first where we could actually avoid
429 // looking at upstream modules entirely sometimes (the contents,
430 // we must always unconditionally look at the index).
431let mut serialized = Vec::with_capacity(serialized_modules.len() + cached_modules.len());
432433let cached_modules =
434cached_modules.into_iter().map(|(sm, wp)| (sm, CString::new(wp.cgu_name).unwrap()));
435436for (module, name) in serialized_modules.into_iter().chain(cached_modules) {
437{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:437",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(437u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("upstream or cached module {0:?}",
name) as &dyn Value))])
});
} else { ; }
};info!("upstream or cached module {:?}", name);
438 thin_modules.push(llvm::ThinLTOModule {
439 identifier: name.as_ptr(),
440 data: module.data().as_ptr(),
441 len: module.data().len(),
442 });
443 serialized.push(module);
444 module_names.push(name);
445 }
446447// Sanity check
448match (&thin_modules.len(), &module_names.len()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(thin_modules.len(), module_names.len());
449450// Delegate to the C++ bindings to create some data here. Once this is a
451 // tried-and-true interface we may wish to try to upstream some of this
452 // to LLVM itself, right now we reimplement a lot of what they do
453 // upstream...
454let data = llvm::LLVMRustCreateThinLTOData(
455thin_modules.as_ptr(),
456thin_modules.len(),
457symbols_below_threshold.as_ptr(),
458symbols_below_threshold.len(),
459 )
460 .unwrap_or_else(|| write::llvm_err(dcx, LlvmError::PrepareThinLtoContext));
461462let data = ThinData(data);
463464{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:464",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(464u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("thin LTO data created")
as &dyn Value))])
});
} else { ; }
};info!("thin LTO data created");
465466let (key_map_path, prev_key_map, curr_key_map) = if let Some(ref incr_comp_session_dir) =
467cgcx.incr_comp_session_dir
468 {
469let path = incr_comp_session_dir.join(THIN_LTO_KEYS_INCR_COMP_FILE_NAME);
470// If the previous file was deleted, or we get an IO error
471 // reading the file, then we'll just use `None` as the
472 // prev_key_map, which will force the code to be recompiled.
473let prev =
474if path.exists() { ThinLTOKeysMap::load_from_file(&path).ok() } else { None };
475let curr = ThinLTOKeysMap::from_thin_lto_modules(&data, &thin_modules, &module_names);
476 (Some(path), prev, curr)
477 } else {
478// If we don't compile incrementally, we don't need to load the
479 // import data from LLVM.
480if !green_modules.is_empty() {
::core::panicking::panic("assertion failed: green_modules.is_empty()")
};assert!(green_modules.is_empty());
481let curr = ThinLTOKeysMap::default();
482 (None, None, curr)
483 };
484{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:484",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(484u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("thin LTO cache key map loaded")
as &dyn Value))])
});
} else { ; }
};info!("thin LTO cache key map loaded");
485{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:485",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(485u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("prev_key_map: {0:#?}",
prev_key_map) as &dyn Value))])
});
} else { ; }
};info!("prev_key_map: {:#?}", prev_key_map);
486{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:486",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(486u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("curr_key_map: {0:#?}",
curr_key_map) as &dyn Value))])
});
} else { ; }
};info!("curr_key_map: {:#?}", curr_key_map);
487488// Throw our data in an `Arc` as we'll be sharing it across threads. We
489 // also put all memory referenced by the C++ data (buffers, ids, etc)
490 // into the arc as well. After this we'll create a thin module
491 // codegen per module in this data.
492let shared = Arc::new(ThinShared {
493data,
494thin_buffers,
495 serialized_modules: serialized,
496module_names,
497 });
498499let mut copy_jobs = ::alloc::vec::Vec::new()vec![];
500let mut opt_jobs = ::alloc::vec::Vec::new()vec![];
501502{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:502",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(502u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("checking which modules can be-reused and which have to be re-optimized.")
as &dyn Value))])
});
} else { ; }
};info!("checking which modules can be-reused and which have to be re-optimized.");
503for (module_index, module_name) in shared.module_names.iter().enumerate() {
504let module_name = module_name_to_str(module_name);
505if let (Some(prev_key_map), true) =
506 (prev_key_map.as_ref(), green_modules.contains_key(module_name))
507 {
508if !cgcx.incr_comp_session_dir.is_some() {
::core::panicking::panic("assertion failed: cgcx.incr_comp_session_dir.is_some()")
};assert!(cgcx.incr_comp_session_dir.is_some());
509510// If a module exists in both the current and the previous session,
511 // and has the same LTO cache key in both sessions, then we can re-use it
512if prev_key_map.keys.get(module_name) == curr_key_map.keys.get(module_name) {
513let work_product = green_modules[module_name].clone();
514 copy_jobs.push(work_product);
515{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:515",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(515u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!(" - {0}: re-used",
module_name) as &dyn Value))])
});
} else { ; }
};info!(" - {}: re-used", module_name);
516if !cgcx.incr_comp_session_dir.is_some() {
::core::panicking::panic("assertion failed: cgcx.incr_comp_session_dir.is_some()")
};assert!(cgcx.incr_comp_session_dir.is_some());
517continue;
518 }
519 }
520521{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:521",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(521u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!(" - {0}: re-compiled",
module_name) as &dyn Value))])
});
} else { ; }
};info!(" - {}: re-compiled", module_name);
522 opt_jobs.push(ThinModule { shared: Arc::clone(&shared), idx: module_index });
523 }
524525// Save the current ThinLTO import information for the next compilation
526 // session, overwriting the previous serialized data (if any).
527if let Some(path) = key_map_path528 && let Err(err) = curr_key_map.save_to_file(&path)
529 {
530 write::llvm_err(dcx, LlvmError::WriteThinLtoKey { err });
531 }
532533 (opt_jobs, copy_jobs)
534 }
535}
536537pub(crate) fn enable_autodiff_settings(ad: &[config::AutoDiff]) {
538let mut enzyme = llvm::EnzymeWrapper::get_instance();
539540for val in ad {
541// We intentionally don't use a wildcard, to not forget handling anything new.
542match val {
543 config::AutoDiff::PrintPerf => {
544 enzyme.set_print_perf(true);
545 }
546 config::AutoDiff::PrintAA => {
547 enzyme.set_print_activity(true);
548 }
549 config::AutoDiff::PrintTA => {
550 enzyme.set_print_type(true);
551 }
552 config::AutoDiff::PrintTAFn(fun) => {
553 enzyme.set_print_type(true); // Enable general type printing
554enzyme.set_print_type_fun(&fun); // Set specific function to analyze
555}
556 config::AutoDiff::Inline => {
557 enzyme.set_inline(true);
558 }
559 config::AutoDiff::LooseTypes => {
560 enzyme.set_loose_types(true);
561 }
562 config::AutoDiff::PrintSteps => {
563 enzyme.set_print(true);
564 }
565// We handle this in the PassWrapper.cpp
566config::AutoDiff::PrintPasses => {}
567// We handle this in the PassWrapper.cpp
568config::AutoDiff::PrintModBefore => {}
569// We handle this in the PassWrapper.cpp
570config::AutoDiff::PrintModAfter => {}
571// We handle this in the PassWrapper.cpp
572config::AutoDiff::PrintModFinal => {}
573// This is required and already checked
574config::AutoDiff::Enable => {}
575// We handle this below
576config::AutoDiff::NoPostopt => {}
577// Disables TypeTree generation
578config::AutoDiff::NoTT => {}
579 }
580 }
581// This helps with handling enums for now.
582enzyme.set_strict_aliasing(false);
583// FIXME(ZuseZ4): Test this, since it was added a long time ago.
584enzyme.set_rust_rules(true);
585}
586587pub(crate) fn run_pass_manager(
588 cgcx: &CodegenContext<LlvmCodegenBackend>,
589 dcx: DiagCtxtHandle<'_>,
590 module: &mut ModuleCodegen<ModuleLlvm>,
591 thin: bool,
592) {
593let _timer = cgcx.prof.generic_activity_with_arg("LLVM_lto_optimize", &*module.name);
594let config = &cgcx.module_config;
595596// Now we have one massive module inside of llmod. Time to run the
597 // LTO-specific optimization passes that LLVM provides.
598 //
599 // This code is based off the code found in llvm's LTO code generator:
600 // llvm/lib/LTO/LTOCodeGenerator.cpp
601{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:601",
"rustc_codegen_llvm::back::lto", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(601u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("running the pass manager")
as &dyn Value))])
});
} else { ; }
};debug!("running the pass manager");
602let opt_stage = if thin { llvm::OptStage::ThinLTO } else { llvm::OptStage::FatLTO };
603let opt_level = config.opt_level.unwrap_or(config::OptLevel::No);
604605// The PostAD behavior is the same that we would have if no autodiff was used.
606 // It will run the default optimization pipeline. If AD is enabled we select
607 // the DuringAD stage, which will disable vectorization and loop unrolling, and
608 // schedule two autodiff optimization + differentiation passes.
609 // We then run the llvm_optimize function a second time, to optimize the code which we generated
610 // in the enzyme differentiation pass.
611let enable_ad = config.autodiff.contains(&config::AutoDiff::Enable);
612let stage = if thin {
613 write::AutodiffStage::PreAD614 } else {
615if enable_ad { write::AutodiffStage::DuringAD } else { write::AutodiffStage::PostAD }
616 };
617618unsafe {
619 write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage);
620 }
621622if falsecfg!(feature = "llvm_enzyme") && enable_ad && !thin {
623let opt_stage = llvm::OptStage::FatLTO;
624let stage = write::AutodiffStage::PostAD;
625if !config.autodiff.contains(&config::AutoDiff::NoPostopt) {
626unsafe {
627 write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage);
628 }
629 }
630631// This is the final IR, so people should be able to inspect the optimized autodiff output,
632 // for manual inspection.
633if config.autodiff.contains(&config::AutoDiff::PrintModFinal) {
634unsafe { llvm::LLVMDumpModule(module.module_llvm.llmod()) };
635 }
636 }
637638{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:638",
"rustc_codegen_llvm::back::lto", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(638u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("lto done")
as &dyn Value))])
});
} else { ; }
};debug!("lto done");
639}
640641pub struct ModuleBuffer(&'static mut llvm::ModuleBuffer);
642643unsafe impl Sendfor ModuleBuffer {}
644unsafe impl Syncfor ModuleBuffer {}
645646impl ModuleBuffer {
647pub(crate) fn new(m: &llvm::Module) -> ModuleBuffer {
648ModuleBuffer(unsafe { llvm::LLVMRustModuleBufferCreate(m) })
649 }
650}
651652impl ModuleBufferMethods for ModuleBuffer {
653fn data(&self) -> &[u8] {
654unsafe {
655let ptr = llvm::LLVMRustModuleBufferPtr(self.0);
656let len = llvm::LLVMRustModuleBufferLen(self.0);
657 slice::from_raw_parts(ptr, len)
658 }
659 }
660}
661662impl Dropfor ModuleBuffer {
663fn drop(&mut self) {
664unsafe {
665 llvm::LLVMRustModuleBufferFree(&mut *(self.0 as *mut _));
666 }
667 }
668}
669670pub struct ThinData(&'static mut llvm::ThinLTOData);
671672unsafe impl Sendfor ThinData {}
673unsafe impl Syncfor ThinData {}
674675impl Dropfor ThinData {
676fn drop(&mut self) {
677unsafe {
678 llvm::LLVMRustFreeThinLTOData(&mut *(self.0 as *mut _));
679 }
680 }
681}
682683pub struct ThinBuffer(&'static mut llvm::ThinLTOBuffer);
684685unsafe impl Sendfor ThinBuffer {}
686unsafe impl Syncfor ThinBuffer {}
687688impl ThinBuffer {
689pub(crate) fn new(m: &llvm::Module, is_thin: bool) -> ThinBuffer {
690unsafe {
691let buffer = llvm::LLVMRustThinLTOBufferCreate(m, is_thin);
692ThinBuffer(buffer)
693 }
694 }
695696pub(crate) unsafe fn from_raw_ptr(ptr: *mut llvm::ThinLTOBuffer) -> ThinBuffer {
697let mut ptr = NonNull::new(ptr).unwrap();
698ThinBuffer(unsafe { ptr.as_mut() })
699 }
700701pub(crate) fn thin_link_data(&self) -> &[u8] {
702unsafe {
703let ptr = llvm::LLVMRustThinLTOBufferThinLinkDataPtr(self.0) as *const _;
704let len = llvm::LLVMRustThinLTOBufferThinLinkDataLen(self.0);
705 slice::from_raw_parts(ptr, len)
706 }
707 }
708}
709710impl ThinBufferMethods for ThinBuffer {
711fn data(&self) -> &[u8] {
712unsafe {
713let ptr = llvm::LLVMRustThinLTOBufferPtr(self.0) as *const _;
714let len = llvm::LLVMRustThinLTOBufferLen(self.0);
715 slice::from_raw_parts(ptr, len)
716 }
717 }
718}
719720impl Dropfor ThinBuffer {
721fn drop(&mut self) {
722unsafe {
723 llvm::LLVMRustThinLTOBufferFree(&mut *(self.0 as *mut _));
724 }
725 }
726}
727728pub(crate) fn optimize_thin_module(
729 cgcx: &CodegenContext<LlvmCodegenBackend>,
730 shared_emitter: &SharedEmitter,
731 thin_module: ThinModule<LlvmCodegenBackend>,
732) -> ModuleCodegen<ModuleLlvm> {
733let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
734let dcx = dcx.handle();
735736let module_name = &thin_module.shared.module_names[thin_module.idx];
737738// Right now the implementation we've got only works over serialized
739 // modules, so we create a fresh new LLVM context and parse the module
740 // into that context. One day, however, we may do this for upstream
741 // crates but for locally codegened modules we may be able to reuse
742 // that LLVM Context and Module.
743let module_llvm = ModuleLlvm::parse(cgcx, module_name, thin_module.data(), dcx);
744let mut module = ModuleCodegen::new_regular(thin_module.name(), module_llvm);
745// Given that the newly created module lacks a thinlto buffer for embedding, we need to re-add it here.
746if cgcx.module_config.embed_bitcode() {
747module.thin_lto_buffer = Some(thin_module.data().to_vec());
748 }
749 {
750let target = &*module.module_llvm.tm;
751let llmod = module.module_llvm.llmod();
752save_temp_bitcode(cgcx, &module, "thin-lto-input");
753754// Up next comes the per-module local analyses that we do for Thin LTO.
755 // Each of these functions is basically copied from the LLVM
756 // implementation and then tailored to suit this implementation. Ideally
757 // each of these would be supported by upstream LLVM but that's perhaps
758 // a patch for another day!
759 //
760 // You can find some more comments about these functions in the LLVM
761 // bindings we've got (currently `PassWrapper.cpp`)
762{
763let _timer =
764cgcx.prof.generic_activity_with_arg("LLVM_thin_lto_rename", thin_module.name());
765unsafe {
766 llvm::LLVMRustPrepareThinLTORename(thin_module.shared.data.0, llmod, target.raw())
767 };
768save_temp_bitcode(cgcx, &module, "thin-lto-after-rename");
769 }
770771 {
772let _timer = cgcx773 .prof
774 .generic_activity_with_arg("LLVM_thin_lto_resolve_weak", thin_module.name());
775if unsafe { !llvm::LLVMRustPrepareThinLTOResolveWeak(thin_module.shared.data.0, llmod) }
776 {
777 write::llvm_err(dcx, LlvmError::PrepareThinLtoModule);
778 }
779save_temp_bitcode(cgcx, &module, "thin-lto-after-resolve");
780 }
781782 {
783let _timer = cgcx784 .prof
785 .generic_activity_with_arg("LLVM_thin_lto_internalize", thin_module.name());
786if unsafe { !llvm::LLVMRustPrepareThinLTOInternalize(thin_module.shared.data.0, llmod) }
787 {
788 write::llvm_err(dcx, LlvmError::PrepareThinLtoModule);
789 }
790save_temp_bitcode(cgcx, &module, "thin-lto-after-internalize");
791 }
792793 {
794let _timer =
795cgcx.prof.generic_activity_with_arg("LLVM_thin_lto_import", thin_module.name());
796if unsafe {
797 !llvm::LLVMRustPrepareThinLTOImport(thin_module.shared.data.0, llmod, target.raw())
798 } {
799 write::llvm_err(dcx, LlvmError::PrepareThinLtoModule);
800 }
801save_temp_bitcode(cgcx, &module, "thin-lto-after-import");
802 }
803804// Alright now that we've done everything related to the ThinLTO
805 // analysis it's time to run some optimizations! Here we use the same
806 // `run_pass_manager` as the "fat" LTO above except that we tell it to
807 // populate a thin-specific pass manager, which presumably LLVM treats a
808 // little differently.
809{
810{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:810",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(810u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("running thin lto passes over {0}",
module.name) as &dyn Value))])
});
} else { ; }
};info!("running thin lto passes over {}", module.name);
811run_pass_manager(cgcx, dcx, &mut module, true);
812save_temp_bitcode(cgcx, &module, "thin-lto-after-pm");
813 }
814 }
815module816}
817818/// Maps LLVM module identifiers to their corresponding LLVM LTO cache keys
819#[derive(#[automatically_derived]
impl ::core::fmt::Debug for ThinLTOKeysMap {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field1_finish(f,
"ThinLTOKeysMap", "keys", &&self.keys)
}
}Debug, #[automatically_derived]
impl ::core::default::Default for ThinLTOKeysMap {
#[inline]
fn default() -> ThinLTOKeysMap {
ThinLTOKeysMap { keys: ::core::default::Default::default() }
}
}Default)]
820struct ThinLTOKeysMap {
821// key = llvm name of importing module, value = LLVM cache key
822keys: BTreeMap<String, String>,
823}
824825impl ThinLTOKeysMap {
826fn save_to_file(&self, path: &Path) -> io::Result<()> {
827use std::io::Write;
828let mut writer = File::create_buffered(path)?;
829// The entries are loaded back into a hash map in `load_from_file()`, so
830 // the order in which we write them to file here does not matter.
831for (module, key) in &self.keys {
832writer.write_fmt(format_args!("{0} {1}\n", module, key))writeln!(writer, "{module} {key}")?;
833 }
834Ok(())
835 }
836837fn load_from_file(path: &Path) -> io::Result<Self> {
838use std::io::BufRead;
839let mut keys = BTreeMap::default();
840let file = File::open_buffered(path)?;
841for line in file.lines() {
842let line = line?;
843let mut split = line.split(' ');
844let module = split.next().unwrap();
845let key = split.next().unwrap();
846match (&split.next(), &None) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("Expected two space-separated values, found {0:?}",
line)));
}
}
};assert_eq!(split.next(), None, "Expected two space-separated values, found {line:?}");
847 keys.insert(module.to_string(), key.to_string());
848 }
849Ok(Self { keys })
850 }
851852fn from_thin_lto_modules(
853 data: &ThinData,
854 modules: &[llvm::ThinLTOModule],
855 names: &[CString],
856 ) -> Self {
857let keys = iter::zip(modules, names)
858 .map(|(module, name)| {
859let key = build_string(|rust_str| unsafe {
860 llvm::LLVMRustComputeLTOCacheKey(rust_str, module.identifier, data.0);
861 })
862 .expect("Invalid ThinLTO module key");
863 (module_name_to_str(name).to_string(), key)
864 })
865 .collect();
866Self { keys }
867 }
868}
869870fn module_name_to_str(c_str: &CStr) -> &str {
871c_str.to_str().unwrap_or_else(|e| {
872::rustc_middle::util::bug::bug_fmt(format_args!("Encountered non-utf8 LLVM module name `{0}`: {1}",
c_str.to_string_lossy(), e))bug!("Encountered non-utf8 LLVM module name `{}`: {}", c_str.to_string_lossy(), e)873 })
874}
875876pub(crate) fn parse_module<'a>(
877 cx: &'a llvm::Context,
878 name: &CStr,
879 data: &[u8],
880 dcx: DiagCtxtHandle<'_>,
881) -> &'a llvm::Module {
882unsafe {
883 llvm::LLVMRustParseBitcodeForLTO(cx, data.as_ptr(), data.len(), name.as_ptr())
884 .unwrap_or_else(|| write::llvm_err(dcx, LlvmError::ParseBitcode))
885 }
886}