1use std::iter;
23use rustc_index::IndexVec;
4use rustc_index::bit_set::DenseBitSet;
5use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
6use rustc_middle::mir::{Body, Local, UnwindTerminateReason, traversal};
7use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, HasTypingEnv, TyAndLayout};
8use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
9use rustc_middle::{bug, mir, span_bug};
10use rustc_target::callconv::{FnAbi, PassMode};
11use tracing::{debug, instrument};
1213use crate::base;
14use crate::traits::*;
1516mod analyze;
17mod block;
18mod constant;
19mod coverageinfo;
20pub mod debuginfo;
21mod intrinsic;
22mod locals;
23pub mod naked_asm;
24pub mod operand;
25pub mod place;
26mod rvalue;
27mod statement;
2829pub use self::block::store_cast;
30use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
31use self::operand::{OperandRef, OperandValue};
32use self::place::PlaceRef;
3334// Used for tracking the state of generated basic blocks.
35enum CachedLlbb<T> {
36/// Nothing created yet.
37None,
3839/// Has been created.
40Some(T),
4142/// Nothing created yet, and nothing should be.
43Skip,
44}
4546type PerLocalVarDebugInfoIndexVec<'tcx, V> =
47IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, V>>>;
4849/// Master context for codegenning from MIR.
50pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
51 instance: Instance<'tcx>,
5253 mir: &'tcx mir::Body<'tcx>,
5455 debug_context: Option<FunctionDebugContext<'tcx, Bx::DIScope, Bx::DILocation>>,
5657 llfn: Bx::Function,
5859 cx: &'a Bx::CodegenCx,
6061 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
6263/// When unwinding is initiated, we have to store this personality
64 /// value somewhere so that we can load it and re-use it in the
65 /// resume instruction. The personality is (afaik) some kind of
66 /// value used for C++ unwinding, which must filter by type: we
67 /// don't really care about it very much. Anyway, this value
68 /// contains an alloca into which the personality is stored and
69 /// then later loaded when generating the DIVERGE_BLOCK.
70personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
7172/// A backend `BasicBlock` for each MIR `BasicBlock`, created lazily
73 /// as-needed (e.g. RPO reaching it or another block branching to it).
74// FIXME(eddyb) rename `llbbs` and other `ll`-prefixed things to use a
75 // more backend-agnostic prefix such as `cg` (i.e. this would be `cgbbs`).
76cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>>,
7778/// The funclet status of each basic block
79cleanup_kinds: Option<IndexVec<mir::BasicBlock, analyze::CleanupKind>>,
8081/// When targeting MSVC, this stores the cleanup info for each funclet BB.
82 /// This is initialized at the same time as the `landing_pads` entry for the
83 /// funclets' head block, i.e. when needed by an unwind / `cleanup_ret` edge.
84funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
8586/// This stores the cached landing/cleanup pad block for a given BB.
87// FIXME(eddyb) rename this to `eh_pads`.
88landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
8990/// Cached unreachable block
91unreachable_block: Option<Bx::BasicBlock>,
9293/// Cached terminate upon unwinding block and its reason
94terminate_block: Option<(Bx::BasicBlock, UnwindTerminateReason)>,
9596/// A bool flag for each basic block indicating whether it is a cold block.
97 /// A cold block is a block that is unlikely to be executed at runtime.
98cold_blocks: IndexVec<mir::BasicBlock, bool>,
99100/// The location where each MIR arg/var/tmp/ret is stored. This is
101 /// usually an `PlaceRef` representing an alloca, but not always:
102 /// sometimes we can skip the alloca and just store the value
103 /// directly using an `OperandRef`, which makes for tighter LLVM
104 /// IR. The conditions for using an `OperandRef` are as follows:
105 ///
106 /// - the type of the local must be judged "immediate" by `is_llvm_immediate`
107 /// - the operand must never be referenced indirectly
108 /// - we should not take its address using the `&` operator
109 /// - nor should it appear in a place path like `tmp.a`
110 /// - the operand must be defined by an rvalue that can generate immediate
111 /// values
112 ///
113 /// Avoiding allocs can also be important for certain intrinsics,
114 /// notably `expect`.
115locals: locals::Locals<'tcx, Bx::Value>,
116117/// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
118 /// This is `None` if no variable debuginfo/names are needed.
119per_local_var_debug_info: Option<PerLocalVarDebugInfoIndexVec<'tcx, Bx::DIVariable>>,
120121/// Caller location propagated if this function has `#[track_caller]`.
122caller_location: Option<OperandRef<'tcx, Bx::Value>>,
123}
124125impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
126pub fn monomorphize<T>(&self, value: T) -> T
127where
128T: Copy + TypeFoldable<TyCtxt<'tcx>>,
129 {
130{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/mod.rs:130",
"rustc_codegen_ssa::mir", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/mod.rs"),
::tracing_core::__macro_support::Option::Some(130u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("monomorphize: self.instance={0:?}",
self.instance) as &dyn Value))])
});
} else { ; }
};debug!("monomorphize: self.instance={:?}", self.instance);
131self.instance.instantiate_mir_and_normalize_erasing_regions(
132self.cx.tcx(),
133self.cx.typing_env(),
134 ty::EarlyBinder::bind(value),
135 )
136 }
137}
138139enum LocalRef<'tcx, V> {
140 Place(PlaceRef<'tcx, V>),
141/// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place).
142 /// `*p` is the wide pointer that references the actual unsized place.
143 ///
144 /// MIR only supports unsized args, not dynamically-sized locals, so
145 /// new unsized temps don't exist and we must reuse the referred-to place.
146 ///
147 /// FIXME: Since the removal of unsized locals in <https://github.com/rust-lang/rust/pull/142911>,
148 /// can we maybe use `Place` here? Or refactor it in another way? There are quite a few
149 /// `UnsizedPlace => bug` branches now.
150UnsizedPlace(PlaceRef<'tcx, V>),
151/// The backend [`OperandValue`] has already been generated.
152Operand(OperandRef<'tcx, V>),
153/// Will be a `Self::Operand` once we get to its definition.
154PendingOperand,
155}
156157impl<'tcx, V: CodegenObject> LocalRef<'tcx, V> {
158fn new_operand(layout: TyAndLayout<'tcx>) -> LocalRef<'tcx, V> {
159if layout.is_zst() {
160// Zero-size temporaries aren't always initialized, which
161 // doesn't matter because they don't contain data, but
162 // we need something sufficiently aligned in the operand.
163LocalRef::Operand(OperandRef::zero_sized(layout))
164 } else {
165 LocalRef::PendingOperand166 }
167 }
168}
169170///////////////////////////////////////////////////////////////////////////
171172#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("codegen_mir",
"rustc_codegen_ssa::mir", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/mod.rs"),
::tracing_core::__macro_support::Option::Some(172u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir"),
::tracing_core::field::FieldSet::new(&["instance"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&instance)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: () = loop {};
return __tracing_attr_fake_return;
}
{
if !!instance.args.has_infer() {
::core::panicking::panic("assertion failed: !instance.args.has_infer()")
};
let tcx = cx.tcx();
let llfn = cx.get_fn(instance);
let mut mir = tcx.instance_mir(instance.def);
let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/mod.rs:188",
"rustc_codegen_ssa::mir", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/mod.rs"),
::tracing_core::__macro_support::Option::Some(188u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("fn_abi: {0:?}",
fn_abi) as &dyn Value))])
});
} else { ; }
};
if tcx.features().ergonomic_clones() {
let monomorphized_mir =
instance.instantiate_mir_and_normalize_erasing_regions(tcx,
ty::TypingEnv::fully_monomorphized(),
ty::EarlyBinder::bind(mir.clone()));
mir =
tcx.arena.alloc(optimize_use_clone::<Bx>(cx,
monomorphized_mir));
}
let debug_context =
cx.create_function_debug_context(instance, fn_abi, llfn,
&mir);
let start_llbb = Bx::append_block(cx, llfn, "start");
let mut start_bx = Bx::build(cx, start_llbb);
if mir.basic_blocks.iter().any(|bb|
{
bb.is_cleanup ||
#[allow(non_exhaustive_omitted_patterns)] match bb.terminator().unwind()
{
Some(mir::UnwindAction::Terminate(_)) => true,
_ => false,
}
}) {
start_bx.set_personality_fn(cx.eh_personality());
}
let cleanup_kinds =
base::wants_new_eh_instructions(tcx.sess).then(||
analyze::cleanup_kinds(&mir));
let cached_llbbs:
IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> =
mir.basic_blocks.indices().map(|bb|
{
if bb == mir::START_BLOCK {
CachedLlbb::Some(start_llbb)
} else { CachedLlbb::None }
}).collect();
let mut fx =
FunctionCx {
instance,
mir,
llfn,
fn_abi,
cx,
personality_slot: None,
cached_llbbs,
unreachable_block: None,
terminate_block: None,
cleanup_kinds,
landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
funclets: IndexVec::from_fn_n(|_| None,
mir.basic_blocks.len()),
cold_blocks: find_cold_blocks(tcx, mir),
locals: locals::Locals::empty(),
debug_context,
per_local_var_debug_info: None,
caller_location: None,
};
let (per_local_var_debug_info, consts_debug_info) =
fx.compute_per_local_var_debug_info(&mut start_bx).unzip();
fx.per_local_var_debug_info = per_local_var_debug_info;
let traversal_order =
traversal::mono_reachable_reverse_postorder(mir, tcx,
instance);
let memory_locals =
analyze::non_ssa_locals(&fx, &traversal_order);
let local_values =
{
let args =
arg_local_refs(&mut start_bx, &mut fx, &memory_locals);
let mut allocate_local =
|local: Local|
{
let decl = &mir.local_decls[local];
let layout = start_bx.layout_of(fx.monomorphize(decl.ty));
if !!layout.ty.has_erasable_regions() {
::core::panicking::panic("assertion failed: !layout.ty.has_erasable_regions()")
};
if local == mir::RETURN_PLACE {
match fx.fn_abi.ret.mode {
PassMode::Indirect { .. } => {
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/mod.rs:265",
"rustc_codegen_ssa::mir", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/mod.rs"),
::tracing_core::__macro_support::Option::Some(265u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("alloc: {0:?} (return place) -> place",
local) as &dyn Value))])
});
} else { ; }
};
let llretptr = start_bx.get_param(0);
return LocalRef::Place(PlaceRef::new_sized(llretptr,
layout));
}
PassMode::Cast { ref cast, .. } => {
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/mod.rs:270",
"rustc_codegen_ssa::mir", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/mod.rs"),
::tracing_core::__macro_support::Option::Some(270u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("alloc: {0:?} (return place) -> place",
local) as &dyn Value))])
});
} else { ; }
};
let size = cast.size(&start_bx).max(layout.size);
return LocalRef::Place(PlaceRef::alloca_size(&mut start_bx,
size, layout));
}
_ => {}
};
}
if memory_locals.contains(local) {
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/mod.rs:279",
"rustc_codegen_ssa::mir", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/mod.rs"),
::tracing_core::__macro_support::Option::Some(279u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("alloc: {0:?} -> place",
local) as &dyn Value))])
});
} else { ; }
};
if layout.is_unsized() {
LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut start_bx,
layout))
} else {
LocalRef::Place(PlaceRef::alloca(&mut start_bx, layout))
}
} else {
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_ssa/src/mir/mod.rs:286",
"rustc_codegen_ssa::mir", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_ssa/src/mir/mod.rs"),
::tracing_core::__macro_support::Option::Some(286u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_ssa::mir"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("alloc: {0:?} -> operand",
local) as &dyn Value))])
});
} else { ; }
};
LocalRef::new_operand(layout)
}
};
let retptr = allocate_local(mir::RETURN_PLACE);
iter::once(retptr).chain(args.into_iter()).chain(mir.vars_and_temps_iter().map(allocate_local)).collect()
};
fx.initialize_locals(local_values);
fx.debug_introduce_locals(&mut start_bx,
consts_debug_info.unwrap_or_default());
drop(start_bx);
let mut unreached_blocks =
DenseBitSet::new_filled(mir.basic_blocks.len());
for bb in traversal_order {
fx.codegen_block(bb);
unreached_blocks.remove(bb);
}
for bb in unreached_blocks.iter() {
fx.codegen_block_as_unreachable(bb);
}
}
}
}#[instrument(level = "debug", skip(cx))]173pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
174 cx: &'a Bx::CodegenCx,
175 instance: Instance<'tcx>,
176) {
177assert!(!instance.args.has_infer());
178179let tcx = cx.tcx();
180let llfn = cx.get_fn(instance);
181182let mut mir = tcx.instance_mir(instance.def);
183// Note that the ABI logic has deduced facts about the functions' parameters based on the MIR we
184 // got here (`deduce_param_attrs`). That means we can *not* apply arbitrary further MIR
185 // transforms as that may invalidate those deduced facts!
186187let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
188debug!("fn_abi: {:?}", fn_abi);
189190if tcx.features().ergonomic_clones() {
191let monomorphized_mir = instance.instantiate_mir_and_normalize_erasing_regions(
192 tcx,
193 ty::TypingEnv::fully_monomorphized(),
194 ty::EarlyBinder::bind(mir.clone()),
195 );
196 mir = tcx.arena.alloc(optimize_use_clone::<Bx>(cx, monomorphized_mir));
197 }
198199let debug_context = cx.create_function_debug_context(instance, fn_abi, llfn, &mir);
200201let start_llbb = Bx::append_block(cx, llfn, "start");
202let mut start_bx = Bx::build(cx, start_llbb);
203204if mir.basic_blocks.iter().any(|bb| {
205 bb.is_cleanup || matches!(bb.terminator().unwind(), Some(mir::UnwindAction::Terminate(_)))
206 }) {
207 start_bx.set_personality_fn(cx.eh_personality());
208 }
209210let cleanup_kinds =
211 base::wants_new_eh_instructions(tcx.sess).then(|| analyze::cleanup_kinds(&mir));
212213let cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> =
214 mir.basic_blocks
215 .indices()
216 .map(|bb| {
217if bb == mir::START_BLOCK { CachedLlbb::Some(start_llbb) } else { CachedLlbb::None }
218 })
219 .collect();
220221let mut fx = FunctionCx {
222 instance,
223 mir,
224 llfn,
225 fn_abi,
226 cx,
227 personality_slot: None,
228 cached_llbbs,
229 unreachable_block: None,
230 terminate_block: None,
231 cleanup_kinds,
232 landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
233 funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
234 cold_blocks: find_cold_blocks(tcx, mir),
235 locals: locals::Locals::empty(),
236 debug_context,
237 per_local_var_debug_info: None,
238 caller_location: None,
239 };
240241// It may seem like we should iterate over `required_consts` to ensure they all successfully
242 // evaluate; however, the `MirUsedCollector` already did that during the collection phase of
243 // monomorphization, and if there is an error during collection then codegen never starts -- so
244 // we don't have to do it again.
245246let (per_local_var_debug_info, consts_debug_info) =
247 fx.compute_per_local_var_debug_info(&mut start_bx).unzip();
248 fx.per_local_var_debug_info = per_local_var_debug_info;
249250let traversal_order = traversal::mono_reachable_reverse_postorder(mir, tcx, instance);
251let memory_locals = analyze::non_ssa_locals(&fx, &traversal_order);
252253// Allocate variable and temp allocas
254let local_values = {
255let args = arg_local_refs(&mut start_bx, &mut fx, &memory_locals);
256257let mut allocate_local = |local: Local| {
258let decl = &mir.local_decls[local];
259let layout = start_bx.layout_of(fx.monomorphize(decl.ty));
260assert!(!layout.ty.has_erasable_regions());
261262if local == mir::RETURN_PLACE {
263match fx.fn_abi.ret.mode {
264 PassMode::Indirect { .. } => {
265debug!("alloc: {:?} (return place) -> place", local);
266let llretptr = start_bx.get_param(0);
267return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
268 }
269 PassMode::Cast { ref cast, .. } => {
270debug!("alloc: {:?} (return place) -> place", local);
271let size = cast.size(&start_bx).max(layout.size);
272return LocalRef::Place(PlaceRef::alloca_size(&mut start_bx, size, layout));
273 }
274_ => {}
275 };
276 }
277278if memory_locals.contains(local) {
279debug!("alloc: {:?} -> place", local);
280if layout.is_unsized() {
281 LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut start_bx, layout))
282 } else {
283 LocalRef::Place(PlaceRef::alloca(&mut start_bx, layout))
284 }
285 } else {
286debug!("alloc: {:?} -> operand", local);
287 LocalRef::new_operand(layout)
288 }
289 };
290291let retptr = allocate_local(mir::RETURN_PLACE);
292 iter::once(retptr)
293 .chain(args.into_iter())
294 .chain(mir.vars_and_temps_iter().map(allocate_local))
295 .collect()
296 };
297 fx.initialize_locals(local_values);
298299// Apply debuginfo to the newly allocated locals.
300fx.debug_introduce_locals(&mut start_bx, consts_debug_info.unwrap_or_default());
301302// The builders will be created separately for each basic block at `codegen_block`.
303 // So drop the builder of `start_llbb` to avoid having two at the same time.
304drop(start_bx);
305306let mut unreached_blocks = DenseBitSet::new_filled(mir.basic_blocks.len());
307// Codegen the body of each reachable block using our reverse postorder list.
308for bb in traversal_order {
309 fx.codegen_block(bb);
310 unreached_blocks.remove(bb);
311 }
312313// FIXME: These empty unreachable blocks are *mostly* a waste. They are occasionally
314 // targets for a SwitchInt terminator, but the reimplementation of the mono-reachable
315 // simplification in SwitchInt lowering sometimes misses cases that
316 // mono_reachable_reverse_postorder manages to figure out.
317 // The solution is to do something like post-mono GVN. But for now we have this hack.
318for bb in unreached_blocks.iter() {
319 fx.codegen_block_as_unreachable(bb);
320 }
321}
322323/// Replace `clone` calls that come from `use` statements with direct copies if possible.
324// FIXME: Move this function to mir::transform when post-mono MIR passes land.
325fn optimize_use_clone<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
326 cx: &'a Bx::CodegenCx,
327mut mir: Body<'tcx>,
328) -> Body<'tcx> {
329let tcx = cx.tcx();
330331if tcx.features().ergonomic_clones() {
332for bb in mir.basic_blocks.as_mut() {
333let mir::TerminatorKind::Call {
334 args,
335 destination,
336 target,
337 call_source: mir::CallSource::Use,
338 ..
339 } = &bb.terminator().kind
340else {
341continue;
342 };
343344// CallSource::Use calls always use 1 argument.
345match (&args.len(), &1) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(args.len(), 1);
346let arg = &args[0];
347348// These types are easily available from locals, so check that before
349 // doing DefId lookups to figure out what we're actually calling.
350let arg_ty = arg.node.ty(&mir.local_decls, tcx);
351352let ty::Ref(_region, inner_ty, mir::Mutability::Not) = *arg_ty.kind() else { continue };
353354if !tcx.type_is_copy_modulo_regions(cx.typing_env(), inner_ty) {
355continue;
356 }
357358let Some(arg_place) = arg.node.place() else { continue };
359360let destination_block = target.unwrap();
361362 bb.statements.push(mir::Statement::new(
363 bb.terminator().source_info,
364 mir::StatementKind::Assign(Box::new((
365*destination,
366 mir::Rvalue::Use(mir::Operand::Copy(
367 arg_place.project_deeper(&[mir::ProjectionElem::Deref], tcx),
368 )),
369 ))),
370 ));
371372 bb.terminator_mut().kind = mir::TerminatorKind::Goto { target: destination_block };
373 }
374 }
375376mir377}
378379/// Produces, for each argument, a `Value` pointing at the
380/// argument's value. As arguments are places, these are always
381/// indirect.
382fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
383 bx: &mut Bx,
384 fx: &mut FunctionCx<'a, 'tcx, Bx>,
385 memory_locals: &DenseBitSet<mir::Local>,
386) -> Vec<LocalRef<'tcx, Bx::Value>> {
387let mir = fx.mir;
388let mut idx = 0;
389let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize;
390391let mut num_untupled = None;
392393let codegen_fn_attrs = bx.tcx().codegen_instance_attrs(fx.instance.def);
394if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
395return ::alloc::vec::Vec::new()vec![];
396 }
397398let args = mir399 .args_iter()
400 .enumerate()
401 .map(|(arg_index, local)| {
402let arg_decl = &mir.local_decls[local];
403let arg_ty = fx.monomorphize(arg_decl.ty);
404405if Some(local) == mir.spread_arg {
406// This argument (e.g., the last argument in the "rust-call" ABI)
407 // is a tuple that was spread at the ABI level and now we have
408 // to reconstruct it into a tuple local variable, from multiple
409 // individual LLVM function arguments.
410let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else {
411::rustc_middle::util::bug::bug_fmt(format_args!("spread argument isn\'t a tuple?!"));bug!("spread argument isn't a tuple?!");
412 };
413414let layout = bx.layout_of(arg_ty);
415416// FIXME: support unsized params in "rust-call" ABI
417if layout.is_unsized() {
418::rustc_middle::util::bug::span_bug_fmt(arg_decl.source_info.span,
format_args!("\"rust-call\" ABI does not support unsized params"));span_bug!(
419arg_decl.source_info.span,
420"\"rust-call\" ABI does not support unsized params",
421 );
422 }
423424let place = PlaceRef::alloca(bx, layout);
425for i in 0..tupled_arg_tys.len() {
426let arg = &fx.fn_abi.args[idx];
427 idx += 1;
428if let PassMode::Cast { pad_i32: true, .. } = arg.mode {
429 llarg_idx += 1;
430 }
431let pr_field = place.project_field(bx, i);
432 bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
433 }
434match (&None, &num_untupled.replace(tupled_arg_tys.len())) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("Replaced existing num_tupled")));
}
}
};assert_eq!(
435None,
436 num_untupled.replace(tupled_arg_tys.len()),
437"Replaced existing num_tupled"
438);
439440return LocalRef::Place(place);
441 }
442443if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
444let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
445446// Explicitly start the lifetime of the `va_list`, improves LLVM codegen.
447bx.lifetime_start(va_list.val.llval, va_list.layout.size);
448449bx.va_start(va_list.val.llval);
450451return LocalRef::Place(va_list);
452 }
453454let arg = &fx.fn_abi.args[idx];
455idx += 1;
456if let PassMode::Cast { pad_i32: true, .. } = arg.mode {
457llarg_idx += 1;
458 }
459460if !memory_locals.contains(local) {
461// We don't have to cast or keep the argument in the alloca.
462 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
463 // of putting everything in allocas just so we can use llvm.dbg.declare.
464let local = |op| LocalRef::Operand(op);
465match arg.mode {
466 PassMode::Ignore => {
467return local(OperandRef::zero_sized(arg.layout));
468 }
469 PassMode::Direct(_) => {
470let llarg = bx.get_param(llarg_idx);
471llarg_idx += 1;
472return local(OperandRef::from_immediate_or_packed_pair(
473bx, llarg, arg.layout,
474 ));
475 }
476 PassMode::Pair(..) => {
477let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
478llarg_idx += 2;
479480return local(OperandRef {
481 val: OperandValue::Pair(a, b),
482 layout: arg.layout,
483 move_annotation: None,
484 });
485 }
486_ => {}
487 }
488 }
489490match arg.mode {
491// Sized indirect arguments
492PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
493// Don't copy an indirect argument to an alloca, the caller already put it
494 // in a temporary alloca and gave it up.
495 // FIXME: lifetimes
496if let Some(pointee_align) = attrs.pointee_align
497 && pointee_align < arg.layout.align.abi
498 {
499// ...unless the argument is underaligned, then we need to copy it to
500 // a higher-aligned alloca.
501let tmp = PlaceRef::alloca(bx, arg.layout);
502bx.store_fn_arg(arg, &mut llarg_idx, tmp);
503 LocalRef::Place(tmp)
504 } else {
505let llarg = bx.get_param(llarg_idx);
506llarg_idx += 1;
507 LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
508 }
509 }
510// Unsized indirect arguments
511PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
512// As the storage for the indirect argument lives during
513 // the whole function call, we just copy the wide pointer.
514let llarg = bx.get_param(llarg_idx);
515llarg_idx += 1;
516let llextra = bx.get_param(llarg_idx);
517llarg_idx += 1;
518let indirect_operand = OperandValue::Pair(llarg, llextra);
519520let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
521indirect_operand.store(bx, tmp);
522 LocalRef::UnsizedPlace(tmp)
523 }
524_ => {
525let tmp = PlaceRef::alloca(bx, arg.layout);
526bx.store_fn_arg(arg, &mut llarg_idx, tmp);
527 LocalRef::Place(tmp)
528 }
529 }
530 })
531 .collect::<Vec<_>>();
532533if fx.instance.def.requires_caller_location(bx.tcx()) {
534let mir_args = if let Some(num_untupled) = num_untupled {
535// Subtract off the tupled argument that gets 'expanded'
536args.len() - 1 + num_untupled537 } else {
538args.len()
539 };
540match (&fx.fn_abi.args.len(), &(mir_args + 1)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("#[track_caller] instance {0:?} must have 1 more argument in their ABI than in their MIR",
fx.instance)));
}
}
};assert_eq!(
541 fx.fn_abi.args.len(),
542 mir_args + 1,
543"#[track_caller] instance {:?} must have 1 more argument in their ABI than in their MIR",
544 fx.instance
545 );
546547let arg = fx.fn_abi.args.last().unwrap();
548match arg.mode {
549 PassMode::Direct(_) => (),
550_ => ::rustc_middle::util::bug::bug_fmt(format_args!("caller location must be PassMode::Direct, found {0:?}",
arg.mode))bug!("caller location must be PassMode::Direct, found {:?}", arg.mode),
551 }
552553fx.caller_location = Some(OperandRef {
554 val: OperandValue::Immediate(bx.get_param(llarg_idx)),
555 layout: arg.layout,
556 move_annotation: None,
557 });
558 }
559560args561}
562563fn find_cold_blocks<'tcx>(
564 tcx: TyCtxt<'tcx>,
565 mir: &mir::Body<'tcx>,
566) -> IndexVec<mir::BasicBlock, bool> {
567let local_decls = &mir.local_decls;
568569let mut cold_blocks: IndexVec<mir::BasicBlock, bool> =
570IndexVec::from_elem(false, &mir.basic_blocks);
571572// Traverse all basic blocks from end of the function to the start.
573for (bb, bb_data) in traversal::postorder(mir) {
574let terminator = bb_data.terminator();
575576match terminator.kind {
577// If a BB ends with a call to a cold function, mark it as cold.
578mir::TerminatorKind::Call { ref func, .. }
579 | mir::TerminatorKind::TailCall { ref func, .. }
580if let ty::FnDef(def_id, ..) = *func.ty(local_decls, tcx).kind()
581 && let attrs = tcx.codegen_fn_attrs(def_id)
582 && attrs.flags.contains(CodegenFnAttrFlags::COLD) =>
583 {
584 cold_blocks[bb] = true;
585continue;
586 }
587588// If a BB ends with an `unreachable`, also mark it as cold.
589mir::TerminatorKind::Unreachable => {
590 cold_blocks[bb] = true;
591continue;
592 }
593594_ => {}
595 }
596597// If all successors of a BB are cold and there's at least one of them, mark this BB as cold
598let mut succ = terminator.successors();
599if let Some(first) = succ.next()
600 && cold_blocks[first]
601 && succ.all(|s| cold_blocks[s])
602 {
603 cold_blocks[bb] = true;
604 }
605 }
606607cold_blocks608}