1use std::iter;
2
3use rustc_index::IndexVec;
4use rustc_index::bit_set::DenseBitSet;
5use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
6use rustc_middle::mir::{UnwindTerminateReason, traversal};
7use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, HasTypingEnv, TyAndLayout};
8use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
9use rustc_middle::{bug, mir, span_bug};
10use rustc_target::callconv::{FnAbi, PassMode};
11use tracing::{debug, instrument};
12
13use crate::base;
14use crate::traits::*;
15
16mod analyze;
17mod block;
18mod constant;
19mod coverageinfo;
20pub mod debuginfo;
21mod intrinsic;
22mod locals;
23mod naked_asm;
24pub mod operand;
25pub mod place;
26mod rvalue;
27mod statement;
28
29use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
30use self::operand::{OperandRef, OperandValue};
31use self::place::PlaceRef;
32
33enum CachedLlbb<T> {
35 None,
37
38 Some(T),
40
41 Skip,
43}
44
45type PerLocalVarDebugInfoIndexVec<'tcx, V> =
46 IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, V>>>;
47
48pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
50 instance: Instance<'tcx>,
51
52 mir: &'tcx mir::Body<'tcx>,
53
54 debug_context: Option<FunctionDebugContext<'tcx, Bx::DIScope, Bx::DILocation>>,
55
56 llfn: Bx::Function,
57
58 cx: &'a Bx::CodegenCx,
59
60 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
61
62 personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
70
71 cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>>,
76
77 cleanup_kinds: Option<IndexVec<mir::BasicBlock, analyze::CleanupKind>>,
79
80 funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
84
85 landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
88
89 unreachable_block: Option<Bx::BasicBlock>,
91
92 terminate_block: Option<(Bx::BasicBlock, UnwindTerminateReason)>,
94
95 cold_blocks: IndexVec<mir::BasicBlock, bool>,
98
99 locals: locals::Locals<'tcx, Bx::Value>,
115
116 per_local_var_debug_info: Option<PerLocalVarDebugInfoIndexVec<'tcx, Bx::DIVariable>>,
119
120 caller_location: Option<OperandRef<'tcx, Bx::Value>>,
122}
123
124impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
125 pub fn monomorphize<T>(&self, value: T) -> T
126 where
127 T: Copy + TypeFoldable<TyCtxt<'tcx>>,
128 {
129 debug!("monomorphize: self.instance={:?}", self.instance);
130 self.instance.instantiate_mir_and_normalize_erasing_regions(
131 self.cx.tcx(),
132 self.cx.typing_env(),
133 ty::EarlyBinder::bind(value),
134 )
135 }
136}
137
138enum LocalRef<'tcx, V> {
139 Place(PlaceRef<'tcx, V>),
140 UnsizedPlace(PlaceRef<'tcx, V>),
145 Operand(OperandRef<'tcx, V>),
147 PendingOperand,
149}
150
151impl<'tcx, V: CodegenObject> LocalRef<'tcx, V> {
152 fn new_operand(layout: TyAndLayout<'tcx>) -> LocalRef<'tcx, V> {
153 if layout.is_zst() {
154 LocalRef::Operand(OperandRef::zero_sized(layout))
158 } else {
159 LocalRef::PendingOperand
160 }
161 }
162}
163
164#[instrument(level = "debug", skip(cx))]
167pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
168 cx: &'a Bx::CodegenCx,
169 instance: Instance<'tcx>,
170) {
171 assert!(!instance.args.has_infer());
172
173 let llfn = cx.get_fn(instance);
174
175 let mir = cx.tcx().instance_mir(instance.def);
176
177 let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
178 debug!("fn_abi: {:?}", fn_abi);
179
180 if cx.tcx().codegen_fn_attrs(instance.def_id()).flags.contains(CodegenFnAttrFlags::NAKED) {
181 crate::mir::naked_asm::codegen_naked_asm::<Bx>(cx, &mir, instance);
182 return;
183 }
184
185 let debug_context = cx.create_function_debug_context(instance, fn_abi, llfn, mir);
186
187 let start_llbb = Bx::append_block(cx, llfn, "start");
188 let mut start_bx = Bx::build(cx, start_llbb);
189
190 if mir.basic_blocks.iter().any(|bb| {
191 bb.is_cleanup || matches!(bb.terminator().unwind(), Some(mir::UnwindAction::Terminate(_)))
192 }) {
193 start_bx.set_personality_fn(cx.eh_personality());
194 }
195
196 let cleanup_kinds =
197 base::wants_new_eh_instructions(cx.tcx().sess).then(|| analyze::cleanup_kinds(mir));
198
199 let cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> =
200 mir.basic_blocks
201 .indices()
202 .map(|bb| {
203 if bb == mir::START_BLOCK { CachedLlbb::Some(start_llbb) } else { CachedLlbb::None }
204 })
205 .collect();
206
207 let mut fx = FunctionCx {
208 instance,
209 mir,
210 llfn,
211 fn_abi,
212 cx,
213 personality_slot: None,
214 cached_llbbs,
215 unreachable_block: None,
216 terminate_block: None,
217 cleanup_kinds,
218 landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
219 funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
220 cold_blocks: find_cold_blocks(cx.tcx(), mir),
221 locals: locals::Locals::empty(),
222 debug_context,
223 per_local_var_debug_info: None,
224 caller_location: None,
225 };
226
227 let (per_local_var_debug_info, consts_debug_info) =
233 fx.compute_per_local_var_debug_info(&mut start_bx).unzip();
234 fx.per_local_var_debug_info = per_local_var_debug_info;
235
236 let traversal_order = traversal::mono_reachable_reverse_postorder(mir, cx.tcx(), instance);
237 let memory_locals = analyze::non_ssa_locals(&fx, &traversal_order);
238
239 let local_values = {
241 let args = arg_local_refs(&mut start_bx, &mut fx, &memory_locals);
242
243 let mut allocate_local = |local| {
244 let decl = &mir.local_decls[local];
245 let layout = start_bx.layout_of(fx.monomorphize(decl.ty));
246 assert!(!layout.ty.has_erasable_regions());
247
248 if local == mir::RETURN_PLACE {
249 match fx.fn_abi.ret.mode {
250 PassMode::Indirect { .. } => {
251 debug!("alloc: {:?} (return place) -> place", local);
252 let llretptr = start_bx.get_param(0);
253 return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
254 }
255 PassMode::Cast { ref cast, .. } => {
256 debug!("alloc: {:?} (return place) -> place", local);
257 let size = cast.size(&start_bx);
258 return LocalRef::Place(PlaceRef::alloca_size(&mut start_bx, size, layout));
259 }
260 _ => {}
261 };
262 }
263
264 if memory_locals.contains(local) {
265 debug!("alloc: {:?} -> place", local);
266 if layout.is_unsized() {
267 LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut start_bx, layout))
268 } else {
269 LocalRef::Place(PlaceRef::alloca(&mut start_bx, layout))
270 }
271 } else {
272 debug!("alloc: {:?} -> operand", local);
273 LocalRef::new_operand(layout)
274 }
275 };
276
277 let retptr = allocate_local(mir::RETURN_PLACE);
278 iter::once(retptr)
279 .chain(args.into_iter())
280 .chain(mir.vars_and_temps_iter().map(allocate_local))
281 .collect()
282 };
283 fx.initialize_locals(local_values);
284
285 fx.debug_introduce_locals(&mut start_bx, consts_debug_info.unwrap_or_default());
287
288 start_bx.init_coverage(instance);
291
292 drop(start_bx);
295
296 let mut unreached_blocks = DenseBitSet::new_filled(mir.basic_blocks.len());
297 for bb in traversal_order {
299 fx.codegen_block(bb);
300 unreached_blocks.remove(bb);
301 }
302
303 for bb in unreached_blocks.iter() {
309 fx.codegen_block_as_unreachable(bb);
310 }
311}
312
313fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
317 bx: &mut Bx,
318 fx: &mut FunctionCx<'a, 'tcx, Bx>,
319 memory_locals: &DenseBitSet<mir::Local>,
320) -> Vec<LocalRef<'tcx, Bx::Value>> {
321 let mir = fx.mir;
322 let mut idx = 0;
323 let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize;
324
325 let mut num_untupled = None;
326
327 let codegen_fn_attrs = bx.tcx().codegen_fn_attrs(fx.instance.def_id());
328 let naked = codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED);
329 if naked {
330 return vec![];
331 }
332
333 let args = mir
334 .args_iter()
335 .enumerate()
336 .map(|(arg_index, local)| {
337 let arg_decl = &mir.local_decls[local];
338 let arg_ty = fx.monomorphize(arg_decl.ty);
339
340 if Some(local) == mir.spread_arg {
341 let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else {
346 bug!("spread argument isn't a tuple?!");
347 };
348
349 let layout = bx.layout_of(arg_ty);
350
351 if layout.is_unsized() {
353 span_bug!(
354 arg_decl.source_info.span,
355 "\"rust-call\" ABI does not support unsized params",
356 );
357 }
358
359 let place = PlaceRef::alloca(bx, layout);
360 for i in 0..tupled_arg_tys.len() {
361 let arg = &fx.fn_abi.args[idx];
362 idx += 1;
363 if let PassMode::Cast { pad_i32: true, .. } = arg.mode {
364 llarg_idx += 1;
365 }
366 let pr_field = place.project_field(bx, i);
367 bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
368 }
369 assert_eq!(
370 None,
371 num_untupled.replace(tupled_arg_tys.len()),
372 "Replaced existing num_tupled"
373 );
374
375 return LocalRef::Place(place);
376 }
377
378 if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
379 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
380 bx.va_start(va_list.val.llval);
381
382 return LocalRef::Place(va_list);
383 }
384
385 let arg = &fx.fn_abi.args[idx];
386 idx += 1;
387 if let PassMode::Cast { pad_i32: true, .. } = arg.mode {
388 llarg_idx += 1;
389 }
390
391 if !memory_locals.contains(local) {
392 let local = |op| LocalRef::Operand(op);
396 match arg.mode {
397 PassMode::Ignore => {
398 return local(OperandRef::zero_sized(arg.layout));
399 }
400 PassMode::Direct(_) => {
401 let llarg = bx.get_param(llarg_idx);
402 llarg_idx += 1;
403 return local(OperandRef::from_immediate_or_packed_pair(
404 bx, llarg, arg.layout,
405 ));
406 }
407 PassMode::Pair(..) => {
408 let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
409 llarg_idx += 2;
410
411 return local(OperandRef {
412 val: OperandValue::Pair(a, b),
413 layout: arg.layout,
414 });
415 }
416 _ => {}
417 }
418 }
419
420 match arg.mode {
421 PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
423 if let Some(pointee_align) = attrs.pointee_align
427 && pointee_align < arg.layout.align.abi
428 {
429 let tmp = PlaceRef::alloca(bx, arg.layout);
432 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
433 LocalRef::Place(tmp)
434 } else {
435 let llarg = bx.get_param(llarg_idx);
436 llarg_idx += 1;
437 LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
438 }
439 }
440 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
442 let llarg = bx.get_param(llarg_idx);
445 llarg_idx += 1;
446 let llextra = bx.get_param(llarg_idx);
447 llarg_idx += 1;
448 let indirect_operand = OperandValue::Pair(llarg, llextra);
449
450 let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
451 indirect_operand.store(bx, tmp);
452 LocalRef::UnsizedPlace(tmp)
453 }
454 _ => {
455 let tmp = PlaceRef::alloca(bx, arg.layout);
456 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
457 LocalRef::Place(tmp)
458 }
459 }
460 })
461 .collect::<Vec<_>>();
462
463 if fx.instance.def.requires_caller_location(bx.tcx()) {
464 let mir_args = if let Some(num_untupled) = num_untupled {
465 args.len() - 1 + num_untupled
467 } else {
468 args.len()
469 };
470 assert_eq!(
471 fx.fn_abi.args.len(),
472 mir_args + 1,
473 "#[track_caller] instance {:?} must have 1 more argument in their ABI than in their MIR",
474 fx.instance
475 );
476
477 let arg = fx.fn_abi.args.last().unwrap();
478 match arg.mode {
479 PassMode::Direct(_) => (),
480 _ => bug!("caller location must be PassMode::Direct, found {:?}", arg.mode),
481 }
482
483 fx.caller_location = Some(OperandRef {
484 val: OperandValue::Immediate(bx.get_param(llarg_idx)),
485 layout: arg.layout,
486 });
487 }
488
489 args
490}
491
492fn find_cold_blocks<'tcx>(
493 tcx: TyCtxt<'tcx>,
494 mir: &mir::Body<'tcx>,
495) -> IndexVec<mir::BasicBlock, bool> {
496 let local_decls = &mir.local_decls;
497
498 let mut cold_blocks: IndexVec<mir::BasicBlock, bool> =
499 IndexVec::from_elem(false, &mir.basic_blocks);
500
501 for (bb, bb_data) in traversal::postorder(mir) {
503 let terminator = bb_data.terminator();
504
505 if let mir::TerminatorKind::Call { ref func, .. } = terminator.kind
507 && let ty::FnDef(def_id, ..) = *func.ty(local_decls, tcx).kind()
508 && let attrs = tcx.codegen_fn_attrs(def_id)
509 && attrs.flags.contains(CodegenFnAttrFlags::COLD)
510 {
511 cold_blocks[bb] = true;
512 continue;
513 }
514
515 let mut succ = terminator.successors();
517 if let Some(first) = succ.next()
518 && cold_blocks[first]
519 && succ.all(|s| cold_blocks[s])
520 {
521 cold_blocks[bb] = true;
522 }
523 }
524
525 cold_blocks
526}