1use std::collections::BTreeMap;
2use std::ffi::{CStr, CString};
3use std::fs::File;
4use std::path::{Path, PathBuf};
5use std::sync::Arc;
6use std::{io, iter, slice};
78use object::read::archive::ArchiveFile;
9use object::{Object, ObjectSection};
10use rustc_codegen_ssa::back::lto::{SerializedModule, ThinModule, ThinShared};
11use rustc_codegen_ssa::back::write::{
12 CodegenContext, FatLtoInput, SharedEmitter, TargetMachineFactoryFn,
13};
14use rustc_codegen_ssa::traits::*;
15use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file};
16use rustc_data_structures::fx::FxHashMap;
17use rustc_data_structures::memmap::Mmap;
18use rustc_data_structures::profiling::SelfProfilerRef;
19use rustc_errors::{DiagCtxt, DiagCtxtHandle};
20use rustc_hir::attrs::SanitizerSet;
21use rustc_middle::bug;
22use rustc_middle::dep_graph::WorkProduct;
23use rustc_session::config::{self, Lto};
24use tracing::{debug, info};
2526use crate::back::write::{
27self, CodegenDiagnosticsStage, DiagnosticHandlers, bitcode_section_name, save_temp_bitcode,
28};
29use crate::errors::{LlvmError, LtoBitcodeFromRlib};
30use crate::llvm::{self, build_string};
31use crate::{LlvmCodegenBackend, ModuleLlvm};
3233/// We keep track of the computed LTO cache keys from the previous
34/// session to determine which CGUs we can reuse.
35const THIN_LTO_KEYS_INCR_COMP_FILE_NAME: &str = "thin-lto-past-keys.bin";
3637fn prepare_lto(
38 cgcx: &CodegenContext,
39 exported_symbols_for_lto: &[String],
40 each_linked_rlib_for_lto: &[PathBuf],
41 dcx: DiagCtxtHandle<'_>,
42) -> (Vec<CString>, Vec<(SerializedModule<ModuleBuffer>, CString)>) {
43let mut symbols_below_threshold = exported_symbols_for_lto44 .iter()
45 .map(|symbol| CString::new(symbol.to_owned()).unwrap())
46 .collect::<Vec<CString>>();
4748if cgcx.module_config.instrument_coverage || cgcx.module_config.pgo_gen.enabled() {
49// These are weak symbols that point to the profile version and the
50 // profile name, which need to be treated as exported so LTO doesn't nix
51 // them.
52const PROFILER_WEAK_SYMBOLS: [&CStr; 2] =
53 [c"__llvm_profile_raw_version", c"__llvm_profile_filename"];
5455symbols_below_threshold.extend(PROFILER_WEAK_SYMBOLS.iter().map(|&sym| sym.to_owned()));
56 }
5758if cgcx.module_config.sanitizer.contains(SanitizerSet::MEMORY) {
59let mut msan_weak_symbols = Vec::new();
6061// Similar to profiling, preserve weak msan symbol during LTO.
62if cgcx.module_config.sanitizer_recover.contains(SanitizerSet::MEMORY) {
63msan_weak_symbols.push(c"__msan_keep_going");
64 }
6566if cgcx.module_config.sanitizer_memory_track_origins != 0 {
67msan_weak_symbols.push(c"__msan_track_origins");
68 }
6970symbols_below_threshold.extend(msan_weak_symbols.into_iter().map(|sym| sym.to_owned()));
71 }
7273// Preserve LLVM-injected, ASAN-related symbols.
74 // See also https://github.com/rust-lang/rust/issues/113404.
75symbols_below_threshold.push(c"___asan_globals_registered".to_owned());
7677// __llvm_profile_counter_bias is pulled in at link time by an undefined reference to
78 // __llvm_profile_runtime, therefore we won't know until link time if this symbol
79 // should have default visibility.
80symbols_below_threshold.push(c"__llvm_profile_counter_bias".to_owned());
8182// LTO seems to discard this otherwise under certain circumstances.
83symbols_below_threshold.push(c"rust_eh_personality".to_owned());
8485// If we're performing LTO for the entire crate graph, then for each of our
86 // upstream dependencies, find the corresponding rlib and load the bitcode
87 // from the archive.
88 //
89 // We save off all the bytecode and LLVM module ids for later processing
90 // with either fat or thin LTO
91let mut upstream_modules = Vec::new();
92if cgcx.lto != Lto::ThinLocal {
93for path in each_linked_rlib_for_lto {
94let archive_data = unsafe {
95 Mmap::map(std::fs::File::open(&path).expect("couldn't open rlib"))
96 .expect("couldn't map rlib")
97 };
98let archive = ArchiveFile::parse(&*archive_data).expect("wanted an rlib");
99let obj_files = archive
100 .members()
101 .filter_map(|child| {
102 child.ok().and_then(|c| {
103 std::str::from_utf8(c.name()).ok().map(|name| (name.trim(), c))
104 })
105 })
106 .filter(|&(name, _)| looks_like_rust_object_file(name));
107for (name, child) in obj_files {
108{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:108",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(108u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("adding bitcode from {0}",
name) as &dyn Value))])
});
} else { ; }
};info!("adding bitcode from {}", name);
109match get_bitcode_slice_from_object_data(
110 child.data(&*archive_data).expect("corrupt rlib"),
111 cgcx,
112 ) {
113Ok(data) => {
114let module = SerializedModule::FromRlib(data.to_vec());
115 upstream_modules.push((module, CString::new(name).unwrap()));
116 }
117Err(e) => dcx.emit_fatal(e),
118 }
119 }
120 }
121 }
122123 (symbols_below_threshold, upstream_modules)
124}
125126fn get_bitcode_slice_from_object_data<'a>(
127 obj: &'a [u8],
128 cgcx: &CodegenContext,
129) -> Result<&'a [u8], LtoBitcodeFromRlib> {
130// We're about to assume the data here is an object file with sections, but if it's raw LLVM IR
131 // that won't work. Fortunately, if that's what we have we can just return the object directly,
132 // so we sniff the relevant magic strings here and return.
133if obj.starts_with(b"\xDE\xC0\x17\x0B") || obj.starts_with(b"BC\xC0\xDE") {
134return Ok(obj);
135 }
136// We drop the "__LLVM," prefix here because on Apple platforms there's a notion of "segment
137 // name" which in the public API for sections gets treated as part of the section name, but
138 // internally in MachOObjectFile.cpp gets treated separately.
139let section_name = bitcode_section_name(cgcx).to_str().unwrap().trim_start_matches("__LLVM,");
140141let obj =
142 object::File::parse(obj).map_err(|err| LtoBitcodeFromRlib { err: err.to_string() })?;
143144let section = obj145 .section_by_name(section_name)
146 .ok_or_else(|| LtoBitcodeFromRlib { err: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("Can\'t find section {0}",
section_name))
})format!("Can't find section {section_name}") })?;
147148section.data().map_err(|err| LtoBitcodeFromRlib { err: err.to_string() })
149}
150151/// Performs fat LTO by merging all modules into a single one and returning it
152/// for further optimization.
153pub(crate) fn run_fat(
154 cgcx: &CodegenContext,
155 prof: &SelfProfilerRef,
156 shared_emitter: &SharedEmitter,
157 tm_factory: TargetMachineFactoryFn<LlvmCodegenBackend>,
158 exported_symbols_for_lto: &[String],
159 each_linked_rlib_for_lto: &[PathBuf],
160 modules: Vec<FatLtoInput<LlvmCodegenBackend>>,
161) -> ModuleCodegen<ModuleLlvm> {
162let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
163let dcx = dcx.handle();
164let (symbols_below_threshold, upstream_modules) =
165prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx);
166let symbols_below_threshold =
167symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();
168fat_lto(
169cgcx,
170prof,
171dcx,
172shared_emitter,
173tm_factory,
174modules,
175upstream_modules,
176&symbols_below_threshold,
177 )
178}
179180/// Performs thin LTO by performing necessary global analysis and returning two
181/// lists, one of the modules that need optimization and another for modules that
182/// can simply be copied over from the incr. comp. cache.
183pub(crate) fn run_thin(
184 cgcx: &CodegenContext,
185 prof: &SelfProfilerRef,
186 dcx: DiagCtxtHandle<'_>,
187 exported_symbols_for_lto: &[String],
188 each_linked_rlib_for_lto: &[PathBuf],
189 modules: Vec<(String, ModuleBuffer)>,
190 cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
191) -> (Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>) {
192let (symbols_below_threshold, upstream_modules) =
193prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx);
194let symbols_below_threshold =
195symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();
196if cgcx.use_linker_plugin_lto {
197{
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("We should never reach this case if the LTO step is deferred to the linker")));
};unreachable!(
198"We should never reach this case if the LTO step \
199 is deferred to the linker"
200);
201 }
202thin_lto(cgcx, prof, dcx, modules, upstream_modules, cached_modules, &symbols_below_threshold)
203}
204205fn fat_lto(
206 cgcx: &CodegenContext,
207 prof: &SelfProfilerRef,
208 dcx: DiagCtxtHandle<'_>,
209 shared_emitter: &SharedEmitter,
210 tm_factory: TargetMachineFactoryFn<LlvmCodegenBackend>,
211 modules: Vec<FatLtoInput<LlvmCodegenBackend>>,
212mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
213 symbols_below_threshold: &[*const libc::c_char],
214) -> ModuleCodegen<ModuleLlvm> {
215let _timer = prof.generic_activity("LLVM_fat_lto_build_monolithic_module");
216{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:216",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(216u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("going for a fat lto")
as &dyn Value))])
});
} else { ; }
};info!("going for a fat lto");
217218// Sort out all our lists of incoming modules into two lists.
219 //
220 // * `serialized_modules` (also and argument to this function) contains all
221 // modules that are serialized in-memory.
222 // * `in_memory` contains modules which are already parsed and in-memory,
223 // such as from multi-CGU builds.
224let mut in_memory = Vec::new();
225for module in modules {
226match module {
227 FatLtoInput::InMemory(m) => in_memory.push(m),
228 FatLtoInput::Serialized { name, buffer } => {
229{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:229",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(229u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("pushing serialized module {0:?}",
name) as &dyn Value))])
});
} else { ; }
};info!("pushing serialized module {:?}", name);
230 serialized_modules.push((buffer, CString::new(name).unwrap()));
231 }
232 }
233 }
234235// Find the "costliest" module and merge everything into that codegen unit.
236 // All the other modules will be serialized and reparsed into the new
237 // context, so this hopefully avoids serializing and parsing the largest
238 // codegen unit.
239 //
240 // Additionally use a regular module as the base here to ensure that various
241 // file copy operations in the backend work correctly. The only other kind
242 // of module here should be an allocator one, and if your crate is smaller
243 // than the allocator module then the size doesn't really matter anyway.
244let costliest_module = in_memory245 .iter()
246 .enumerate()
247 .filter(|&(_, module)| module.kind == ModuleKind::Regular)
248 .map(|(i, module)| {
249let cost = unsafe { llvm::LLVMRustModuleCost(module.module_llvm.llmod()) };
250 (cost, i)
251 })
252 .max();
253254// If we found a costliest module, we're good to go. Otherwise all our
255 // inputs were serialized which could happen in the case, for example, that
256 // all our inputs were incrementally reread from the cache and we're just
257 // re-executing the LTO passes. If that's the case deserialize the first
258 // module and create a linker with it.
259let module: ModuleCodegen<ModuleLlvm> = match costliest_module {
260Some((_cost, i)) => in_memory.remove(i),
261None => {
262if !!serialized_modules.is_empty() {
{
::core::panicking::panic_fmt(format_args!("must have at least one serialized module"));
}
};assert!(!serialized_modules.is_empty(), "must have at least one serialized module");
263let (buffer, name) = serialized_modules.remove(0);
264{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:264",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(264u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("no in-memory regular modules to choose from, parsing {0:?}",
name) as &dyn Value))])
});
} else { ; }
};info!("no in-memory regular modules to choose from, parsing {:?}", name);
265let llvm_module = ModuleLlvm::parse(cgcx, tm_factory, &name, buffer.data(), dcx);
266 ModuleCodegen::new_regular(name.into_string().unwrap(), llvm_module)
267 }
268 };
269 {
270let (llcx, llmod) = {
271let llvm = &module.module_llvm;
272 (&llvm.llcx, llvm.llmod())
273 };
274{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:274",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(274u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("using {0:?} as a base module",
module.name) as &dyn Value))])
});
} else { ; }
};info!("using {:?} as a base module", module.name);
275276// The linking steps below may produce errors and diagnostics within LLVM
277 // which we'd like to handle and print, so set up our diagnostic handlers
278 // (which get unregistered when they go out of scope below).
279let _handler = DiagnosticHandlers::new(
280cgcx,
281shared_emitter,
282llcx,
283&module,
284 CodegenDiagnosticsStage::LTO,
285 );
286287// For all other modules we codegened we'll need to link them into our own
288 // bitcode. All modules were codegened in their own LLVM context, however,
289 // and we want to move everything to the same LLVM context. Currently the
290 // way we know of to do that is to serialize them to a string and them parse
291 // them later. Not great but hey, that's why it's "fat" LTO, right?
292for module in in_memory {
293let buffer = ModuleBuffer::new(module.module_llvm.llmod(), false);
294let llmod_id = CString::new(&module.name[..]).unwrap();
295 serialized_modules.push((SerializedModule::Local(buffer), llmod_id));
296 }
297// Sort the modules to ensure we produce deterministic results.
298serialized_modules.sort_by(|module1, module2| module1.1.cmp(&module2.1));
299300// For all serialized bitcode files we parse them and link them in as we did
301 // above, this is all mostly handled in C++.
302let mut linker = Linker::new(llmod);
303for (bc_decoded, name) in serialized_modules {
304let _timer = prof
305 .generic_activity_with_arg_recorder("LLVM_fat_lto_link_module", |recorder| {
306 recorder.record_arg(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:?}", name))
})format!("{name:?}"))
307 });
308{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:308",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(308u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("linking {0:?}",
name) as &dyn Value))])
});
} else { ; }
};info!("linking {:?}", name);
309let data = bc_decoded.data();
310 linker
311 .add(data)
312 .unwrap_or_else(|()| write::llvm_err(dcx, LlvmError::LoadBitcode { name }));
313 }
314drop(linker);
315save_temp_bitcode(cgcx, &module, "lto.input");
316317// Internalize everything below threshold to help strip out more modules and such.
318unsafe {
319let ptr = symbols_below_threshold.as_ptr();
320 llvm::LLVMRustRunRestrictionPass(
321llmod,
322ptras *const *const libc::c_char,
323symbols_below_threshold.len() as libc::size_t,
324 );
325 }
326save_temp_bitcode(cgcx, &module, "lto.after-restriction");
327 }
328329module330}
331332pub(crate) struct Linker<'a>(&'a mut llvm::Linker<'a>);
333334impl<'a> Linker<'a> {
335pub(crate) fn new(llmod: &'a llvm::Module) -> Self {
336unsafe { Linker(llvm::LLVMRustLinkerNew(llmod)) }
337 }
338339pub(crate) fn add(&mut self, bytecode: &[u8]) -> Result<(), ()> {
340unsafe {
341if llvm::LLVMRustLinkerAdd(
342self.0,
343bytecode.as_ptr() as *const libc::c_char,
344bytecode.len(),
345 ) {
346Ok(())
347 } else {
348Err(())
349 }
350 }
351 }
352}
353354impl Dropfor Linker<'_> {
355fn drop(&mut self) {
356unsafe {
357 llvm::LLVMRustLinkerFree(&mut *(self.0 as *mut _));
358 }
359 }
360}
361362/// Prepare "thin" LTO to get run on these modules.
363///
364/// The general structure of ThinLTO is quite different from the structure of
365/// "fat" LTO above. With "fat" LTO all LLVM modules in question are merged into
366/// one giant LLVM module, and then we run more optimization passes over this
367/// big module after internalizing most symbols. Thin LTO, on the other hand,
368/// avoid this large bottleneck through more targeted optimization.
369///
370/// At a high level Thin LTO looks like:
371///
372/// 1. Prepare a "summary" of each LLVM module in question which describes
373/// the values inside, cost of the values, etc.
374/// 2. Merge the summaries of all modules in question into one "index"
375/// 3. Perform some global analysis on this index
376/// 4. For each module, use the index and analysis calculated previously to
377/// perform local transformations on the module, for example inlining
378/// small functions from other modules.
379/// 5. Run thin-specific optimization passes over each module, and then code
380/// generate everything at the end.
381///
382/// The summary for each module is intended to be quite cheap, and the global
383/// index is relatively quite cheap to create as well. As a result, the goal of
384/// ThinLTO is to reduce the bottleneck on LTO and enable LTO to be used in more
385/// situations. For example one cheap optimization is that we can parallelize
386/// all codegen modules, easily making use of all the cores on a machine.
387///
388/// With all that in mind, the function here is designed at specifically just
389/// calculating the *index* for ThinLTO. This index will then be shared amongst
390/// all of the `LtoModuleCodegen` units returned below and destroyed once
391/// they all go out of scope.
392fn thin_lto(
393 cgcx: &CodegenContext,
394 prof: &SelfProfilerRef,
395 dcx: DiagCtxtHandle<'_>,
396 modules: Vec<(String, ModuleBuffer)>,
397 serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
398 cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
399 symbols_below_threshold: &[*const libc::c_char],
400) -> (Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>) {
401let _timer = prof.generic_activity("LLVM_thin_lto_global_analysis");
402unsafe {
403{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:403",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(403u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("going for that thin, thin LTO")
as &dyn Value))])
});
} else { ; }
};info!("going for that thin, thin LTO");
404405let green_modules: FxHashMap<_, _> =
406cached_modules.iter().map(|(_, wp)| (wp.cgu_name.clone(), wp.clone())).collect();
407408let full_scope_len = modules.len() + serialized_modules.len() + cached_modules.len();
409let mut thin_buffers = Vec::with_capacity(modules.len());
410let mut module_names = Vec::with_capacity(full_scope_len);
411let mut thin_modules = Vec::with_capacity(full_scope_len);
412413for (i, (name, buffer)) in modules.into_iter().enumerate() {
414{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:414",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(414u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("local module: {0} - {1}",
i, name) as &dyn Value))])
});
} else { ; }
};info!("local module: {} - {}", i, name);
415let cname = CString::new(name.as_bytes()).unwrap();
416 thin_modules.push(llvm::ThinLTOModule {
417 identifier: cname.as_ptr(),
418 data: buffer.data().as_ptr(),
419 len: buffer.data().len(),
420 });
421 thin_buffers.push(buffer);
422 module_names.push(cname);
423 }
424425// FIXME: All upstream crates are deserialized internally in the
426 // function below to extract their summary and modules. Note that
427 // unlike the loop above we *must* decode and/or read something
428 // here as these are all just serialized files on disk. An
429 // improvement, however, to make here would be to store the
430 // module summary separately from the actual module itself. Right
431 // now this is store in one large bitcode file, and the entire
432 // file is deflate-compressed. We could try to bypass some of the
433 // decompression by storing the index uncompressed and only
434 // lazily decompressing the bytecode if necessary.
435 //
436 // Note that truly taking advantage of this optimization will
437 // likely be further down the road. We'd have to implement
438 // incremental ThinLTO first where we could actually avoid
439 // looking at upstream modules entirely sometimes (the contents,
440 // we must always unconditionally look at the index).
441let mut serialized = Vec::with_capacity(serialized_modules.len() + cached_modules.len());
442443let cached_modules =
444cached_modules.into_iter().map(|(sm, wp)| (sm, CString::new(wp.cgu_name).unwrap()));
445446for (module, name) in serialized_modules.into_iter().chain(cached_modules) {
447{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:447",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(447u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("upstream or cached module {0:?}",
name) as &dyn Value))])
});
} else { ; }
};info!("upstream or cached module {:?}", name);
448 thin_modules.push(llvm::ThinLTOModule {
449 identifier: name.as_ptr(),
450 data: module.data().as_ptr(),
451 len: module.data().len(),
452 });
453 serialized.push(module);
454 module_names.push(name);
455 }
456457// Sanity check
458match (&thin_modules.len(), &module_names.len()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(thin_modules.len(), module_names.len());
459460// Delegate to the C++ bindings to create some data here. Once this is a
461 // tried-and-true interface we may wish to try to upstream some of this
462 // to LLVM itself, right now we reimplement a lot of what they do
463 // upstream...
464let data = llvm::LLVMRustCreateThinLTOData(
465thin_modules.as_ptr(),
466thin_modules.len(),
467symbols_below_threshold.as_ptr(),
468symbols_below_threshold.len(),
469 )
470 .unwrap_or_else(|| write::llvm_err(dcx, LlvmError::PrepareThinLtoContext));
471472let data = ThinData(data);
473474{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:474",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(474u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("thin LTO data created")
as &dyn Value))])
});
} else { ; }
};info!("thin LTO data created");
475476let (key_map_path, prev_key_map, curr_key_map) = if let Some(ref incr_comp_session_dir) =
477cgcx.incr_comp_session_dir
478 {
479let path = incr_comp_session_dir.join(THIN_LTO_KEYS_INCR_COMP_FILE_NAME);
480// If the previous file was deleted, or we get an IO error
481 // reading the file, then we'll just use `None` as the
482 // prev_key_map, which will force the code to be recompiled.
483let prev =
484if path.exists() { ThinLTOKeysMap::load_from_file(&path).ok() } else { None };
485let curr = ThinLTOKeysMap::from_thin_lto_modules(&data, &thin_modules, &module_names);
486 (Some(path), prev, curr)
487 } else {
488// If we don't compile incrementally, we don't need to load the
489 // import data from LLVM.
490if !green_modules.is_empty() {
::core::panicking::panic("assertion failed: green_modules.is_empty()")
};assert!(green_modules.is_empty());
491let curr = ThinLTOKeysMap::default();
492 (None, None, curr)
493 };
494{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:494",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(494u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("thin LTO cache key map loaded")
as &dyn Value))])
});
} else { ; }
};info!("thin LTO cache key map loaded");
495{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:495",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(495u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("prev_key_map: {0:#?}",
prev_key_map) as &dyn Value))])
});
} else { ; }
};info!("prev_key_map: {:#?}", prev_key_map);
496{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:496",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(496u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("curr_key_map: {0:#?}",
curr_key_map) as &dyn Value))])
});
} else { ; }
};info!("curr_key_map: {:#?}", curr_key_map);
497498// Throw our data in an `Arc` as we'll be sharing it across threads. We
499 // also put all memory referenced by the C++ data (buffers, ids, etc)
500 // into the arc as well. After this we'll create a thin module
501 // codegen per module in this data.
502let shared = Arc::new(ThinShared {
503data,
504thin_buffers,
505 serialized_modules: serialized,
506module_names,
507 });
508509let mut copy_jobs = ::alloc::vec::Vec::new()vec![];
510let mut opt_jobs = ::alloc::vec::Vec::new()vec![];
511512{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:512",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(512u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("checking which modules can be-reused and which have to be re-optimized.")
as &dyn Value))])
});
} else { ; }
};info!("checking which modules can be-reused and which have to be re-optimized.");
513for (module_index, module_name) in shared.module_names.iter().enumerate() {
514let module_name = module_name_to_str(module_name);
515if let (Some(prev_key_map), true) =
516 (prev_key_map.as_ref(), green_modules.contains_key(module_name))
517 {
518if !cgcx.incr_comp_session_dir.is_some() {
::core::panicking::panic("assertion failed: cgcx.incr_comp_session_dir.is_some()")
};assert!(cgcx.incr_comp_session_dir.is_some());
519520// If a module exists in both the current and the previous session,
521 // and has the same LTO cache key in both sessions, then we can re-use it
522if prev_key_map.keys.get(module_name) == curr_key_map.keys.get(module_name) {
523let work_product = green_modules[module_name].clone();
524 copy_jobs.push(work_product);
525{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:525",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(525u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!(" - {0}: re-used",
module_name) as &dyn Value))])
});
} else { ; }
};info!(" - {}: re-used", module_name);
526if !cgcx.incr_comp_session_dir.is_some() {
::core::panicking::panic("assertion failed: cgcx.incr_comp_session_dir.is_some()")
};assert!(cgcx.incr_comp_session_dir.is_some());
527continue;
528 }
529 }
530531{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:531",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(531u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!(" - {0}: re-compiled",
module_name) as &dyn Value))])
});
} else { ; }
};info!(" - {}: re-compiled", module_name);
532 opt_jobs.push(ThinModule { shared: Arc::clone(&shared), idx: module_index });
533 }
534535// Save the current ThinLTO import information for the next compilation
536 // session, overwriting the previous serialized data (if any).
537if let Some(path) = key_map_path538 && let Err(err) = curr_key_map.save_to_file(&path)
539 {
540 write::llvm_err(dcx, LlvmError::WriteThinLtoKey { err });
541 }
542543 (opt_jobs, copy_jobs)
544 }
545}
546547pub(crate) fn enable_autodiff_settings(ad: &[config::AutoDiff]) {
548let mut enzyme = llvm::EnzymeWrapper::get_instance();
549550for val in ad {
551// We intentionally don't use a wildcard, to not forget handling anything new.
552match val {
553 config::AutoDiff::PrintPerf => {
554 enzyme.set_print_perf(true);
555 }
556 config::AutoDiff::PrintAA => {
557 enzyme.set_print_activity(true);
558 }
559 config::AutoDiff::PrintTA => {
560 enzyme.set_print_type(true);
561 }
562 config::AutoDiff::PrintTAFn(fun) => {
563 enzyme.set_print_type(true); // Enable general type printing
564enzyme.set_print_type_fun(&fun); // Set specific function to analyze
565}
566 config::AutoDiff::Inline => {
567 enzyme.set_inline(true);
568 }
569 config::AutoDiff::LooseTypes => {
570 enzyme.set_loose_types(true);
571 }
572 config::AutoDiff::PrintSteps => {
573 enzyme.set_print(true);
574 }
575// We handle this in the PassWrapper.cpp
576config::AutoDiff::PrintPasses => {}
577// We handle this in the PassWrapper.cpp
578config::AutoDiff::PrintModBefore => {}
579// We handle this in the PassWrapper.cpp
580config::AutoDiff::PrintModAfter => {}
581// We handle this in the PassWrapper.cpp
582config::AutoDiff::PrintModFinal => {}
583// This is required and already checked
584config::AutoDiff::Enable => {}
585// We handle this below
586config::AutoDiff::NoPostopt => {}
587// Disables TypeTree generation
588config::AutoDiff::NoTT => {}
589 }
590 }
591// This helps with handling enums for now.
592enzyme.set_strict_aliasing(false);
593// FIXME(ZuseZ4): Test this, since it was added a long time ago.
594enzyme.set_rust_rules(true);
595}
596597pub(crate) fn run_pass_manager(
598 cgcx: &CodegenContext,
599 prof: &SelfProfilerRef,
600 dcx: DiagCtxtHandle<'_>,
601 module: &mut ModuleCodegen<ModuleLlvm>,
602 thin: bool,
603) {
604let _timer = prof.generic_activity_with_arg("LLVM_lto_optimize", &*module.name);
605let config = &cgcx.module_config;
606607// Now we have one massive module inside of llmod. Time to run the
608 // LTO-specific optimization passes that LLVM provides.
609 //
610 // This code is based off the code found in llvm's LTO code generator:
611 // llvm/lib/LTO/LTOCodeGenerator.cpp
612{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:612",
"rustc_codegen_llvm::back::lto", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(612u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("running the pass manager")
as &dyn Value))])
});
} else { ; }
};debug!("running the pass manager");
613let opt_stage = if thin { llvm::OptStage::ThinLTO } else { llvm::OptStage::FatLTO };
614let opt_level = config.opt_level.unwrap_or(config::OptLevel::No);
615616// The PostAD behavior is the same that we would have if no autodiff was used.
617 // It will run the default optimization pipeline. If AD is enabled we select
618 // the DuringAD stage, which will disable vectorization and loop unrolling, and
619 // schedule two autodiff optimization + differentiation passes.
620 // We then run the llvm_optimize function a second time, to optimize the code which we generated
621 // in the enzyme differentiation pass.
622let enable_ad = config.autodiff.contains(&config::AutoDiff::Enable);
623let stage = if thin {
624 write::AutodiffStage::PreAD625 } else {
626if enable_ad { write::AutodiffStage::DuringAD } else { write::AutodiffStage::PostAD }
627 };
628629unsafe {
630 write::llvm_optimize(
631cgcx, prof, dcx, module, None, None, config, opt_level, opt_stage, stage,
632 );
633 }
634635if falsecfg!(feature = "llvm_enzyme") && enable_ad && !thin {
636let opt_stage = llvm::OptStage::FatLTO;
637let stage = write::AutodiffStage::PostAD;
638if !config.autodiff.contains(&config::AutoDiff::NoPostopt) {
639unsafe {
640 write::llvm_optimize(
641cgcx, prof, dcx, module, None, None, config, opt_level, opt_stage, stage,
642 );
643 }
644 }
645646// This is the final IR, so people should be able to inspect the optimized autodiff output,
647 // for manual inspection.
648if config.autodiff.contains(&config::AutoDiff::PrintModFinal) {
649unsafe { llvm::LLVMDumpModule(module.module_llvm.llmod()) };
650 }
651 }
652653{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:653",
"rustc_codegen_llvm::back::lto", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(653u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("lto done")
as &dyn Value))])
});
} else { ; }
};debug!("lto done");
654}
655656#[repr(transparent)]
657pub(crate) struct Buffer(&'static mut llvm::Buffer);
658659unsafe impl Sendfor Buffer {}
660unsafe impl Syncfor Buffer {}
661662impl Buffer {
663pub(crate) fn data(&self) -> &[u8] {
664unsafe {
665let ptr = llvm::LLVMRustBufferPtr(self.0);
666let len = llvm::LLVMRustBufferLen(self.0);
667 slice::from_raw_parts(ptr, len)
668 }
669 }
670}
671672impl Dropfor Buffer {
673fn drop(&mut self) {
674unsafe {
675 llvm::LLVMRustBufferFree(&mut *(self.0 as *mut _));
676 }
677 }
678}
679680pub struct ThinData(&'static mut llvm::ThinLTOData);
681682unsafe impl Sendfor ThinData {}
683unsafe impl Syncfor ThinData {}
684685impl Dropfor ThinData {
686fn drop(&mut self) {
687unsafe {
688 llvm::LLVMRustFreeThinLTOData(&mut *(self.0 as *mut _));
689 }
690 }
691}
692693pub struct ModuleBuffer {
694 data: Buffer,
695}
696697impl ModuleBuffer {
698pub(crate) fn new(m: &llvm::Module, is_thin: bool) -> ModuleBuffer {
699unsafe {
700let buffer = llvm::LLVMRustModuleSerialize(m, is_thin);
701ModuleBuffer { data: Buffer(buffer) }
702 }
703 }
704}
705706impl ModuleBufferMethods for ModuleBuffer {
707fn data(&self) -> &[u8] {
708self.data.data()
709 }
710}
711712pub(crate) fn optimize_thin_module(
713 cgcx: &CodegenContext,
714 prof: &SelfProfilerRef,
715 shared_emitter: &SharedEmitter,
716 tm_factory: TargetMachineFactoryFn<LlvmCodegenBackend>,
717 thin_module: ThinModule<LlvmCodegenBackend>,
718) -> ModuleCodegen<ModuleLlvm> {
719let dcx = DiagCtxt::new(Box::new(shared_emitter.clone()));
720let dcx = dcx.handle();
721722let module_name = &thin_module.shared.module_names[thin_module.idx];
723724// Right now the implementation we've got only works over serialized
725 // modules, so we create a fresh new LLVM context and parse the module
726 // into that context. One day, however, we may do this for upstream
727 // crates but for locally codegened modules we may be able to reuse
728 // that LLVM Context and Module.
729let module_llvm = ModuleLlvm::parse(cgcx, tm_factory, module_name, thin_module.data(), dcx);
730let mut module = ModuleCodegen::new_regular(thin_module.name(), module_llvm);
731// Given that the newly created module lacks a thinlto buffer for embedding, we need to re-add it here.
732if cgcx.module_config.embed_bitcode() {
733module.thin_lto_buffer = Some(thin_module.data().to_vec());
734 }
735 {
736let target = &*module.module_llvm.tm;
737let llmod = module.module_llvm.llmod();
738save_temp_bitcode(cgcx, &module, "thin-lto-input");
739740// Up next comes the per-module local analyses that we do for Thin LTO.
741 // Each of these functions is basically copied from the LLVM
742 // implementation and then tailored to suit this implementation. Ideally
743 // each of these would be supported by upstream LLVM but that's perhaps
744 // a patch for another day!
745 //
746 // You can find some more comments about these functions in the LLVM
747 // bindings we've got (currently `PassWrapper.cpp`)
748{
749let _timer = prof.generic_activity_with_arg("LLVM_thin_lto_rename", thin_module.name());
750unsafe {
751 llvm::LLVMRustPrepareThinLTORename(thin_module.shared.data.0, llmod, target.raw())
752 };
753save_temp_bitcode(cgcx, &module, "thin-lto-after-rename");
754 }
755756 {
757let _timer =
758prof.generic_activity_with_arg("LLVM_thin_lto_resolve_weak", thin_module.name());
759if unsafe { !llvm::LLVMRustPrepareThinLTOResolveWeak(thin_module.shared.data.0, llmod) }
760 {
761 write::llvm_err(dcx, LlvmError::PrepareThinLtoModule);
762 }
763save_temp_bitcode(cgcx, &module, "thin-lto-after-resolve");
764 }
765766 {
767let _timer =
768prof.generic_activity_with_arg("LLVM_thin_lto_internalize", thin_module.name());
769if unsafe { !llvm::LLVMRustPrepareThinLTOInternalize(thin_module.shared.data.0, llmod) }
770 {
771 write::llvm_err(dcx, LlvmError::PrepareThinLtoModule);
772 }
773save_temp_bitcode(cgcx, &module, "thin-lto-after-internalize");
774 }
775776 {
777let _timer = prof.generic_activity_with_arg("LLVM_thin_lto_import", thin_module.name());
778if unsafe {
779 !llvm::LLVMRustPrepareThinLTOImport(thin_module.shared.data.0, llmod, target.raw())
780 } {
781 write::llvm_err(dcx, LlvmError::PrepareThinLtoModule);
782 }
783save_temp_bitcode(cgcx, &module, "thin-lto-after-import");
784 }
785786// Alright now that we've done everything related to the ThinLTO
787 // analysis it's time to run some optimizations! Here we use the same
788 // `run_pass_manager` as the "fat" LTO above except that we tell it to
789 // populate a thin-specific pass manager, which presumably LLVM treats a
790 // little differently.
791{
792{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/back/lto.rs:792",
"rustc_codegen_llvm::back::lto", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/back/lto.rs"),
::tracing_core::__macro_support::Option::Some(792u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::back::lto"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("running thin lto passes over {0}",
module.name) as &dyn Value))])
});
} else { ; }
};info!("running thin lto passes over {}", module.name);
793run_pass_manager(cgcx, prof, dcx, &mut module, true);
794save_temp_bitcode(cgcx, &module, "thin-lto-after-pm");
795 }
796 }
797module798}
799800/// Maps LLVM module identifiers to their corresponding LLVM LTO cache keys
801#[derive(#[automatically_derived]
impl ::core::fmt::Debug for ThinLTOKeysMap {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field1_finish(f,
"ThinLTOKeysMap", "keys", &&self.keys)
}
}Debug, #[automatically_derived]
impl ::core::default::Default for ThinLTOKeysMap {
#[inline]
fn default() -> ThinLTOKeysMap {
ThinLTOKeysMap { keys: ::core::default::Default::default() }
}
}Default)]
802struct ThinLTOKeysMap {
803// key = llvm name of importing module, value = LLVM cache key
804keys: BTreeMap<String, String>,
805}
806807impl ThinLTOKeysMap {
808fn save_to_file(&self, path: &Path) -> io::Result<()> {
809use std::io::Write;
810let mut writer = File::create_buffered(path)?;
811// The entries are loaded back into a hash map in `load_from_file()`, so
812 // the order in which we write them to file here does not matter.
813for (module, key) in &self.keys {
814writer.write_fmt(format_args!("{0} {1}\n", module, key))writeln!(writer, "{module} {key}")?;
815 }
816Ok(())
817 }
818819fn load_from_file(path: &Path) -> io::Result<Self> {
820use std::io::BufRead;
821let mut keys = BTreeMap::default();
822let file = File::open_buffered(path)?;
823for line in file.lines() {
824let line = line?;
825let mut split = line.split(' ');
826let module = split.next().unwrap();
827let key = split.next().unwrap();
828match (&split.next(), &None) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("Expected two space-separated values, found {0:?}",
line)));
}
}
};assert_eq!(split.next(), None, "Expected two space-separated values, found {line:?}");
829 keys.insert(module.to_string(), key.to_string());
830 }
831Ok(Self { keys })
832 }
833834fn from_thin_lto_modules(
835 data: &ThinData,
836 modules: &[llvm::ThinLTOModule],
837 names: &[CString],
838 ) -> Self {
839let keys = iter::zip(modules, names)
840 .map(|(module, name)| {
841let key = build_string(|rust_str| unsafe {
842 llvm::LLVMRustComputeLTOCacheKey(rust_str, module.identifier, data.0);
843 })
844 .expect("Invalid ThinLTO module key");
845 (module_name_to_str(name).to_string(), key)
846 })
847 .collect();
848Self { keys }
849 }
850}
851852fn module_name_to_str(c_str: &CStr) -> &str {
853c_str.to_str().unwrap_or_else(|e| {
854::rustc_middle::util::bug::bug_fmt(format_args!("Encountered non-utf8 LLVM module name `{0}`: {1}",
c_str.to_string_lossy(), e))bug!("Encountered non-utf8 LLVM module name `{}`: {}", c_str.to_string_lossy(), e)855 })
856}
857858pub(crate) fn parse_module<'a>(
859 cx: &'a llvm::Context,
860 name: &CStr,
861 data: &[u8],
862 dcx: DiagCtxtHandle<'_>,
863) -> &'a llvm::Module {
864unsafe {
865 llvm::LLVMRustParseBitcodeForLTO(cx, data.as_ptr(), data.len(), name.as_ptr())
866 .unwrap_or_else(|| write::llvm_err(dcx, LlvmError::ParseBitcode))
867 }
868}