1use std::iter;
2
3use rustc_index::IndexVec;
4use rustc_index::bit_set::DenseBitSet;
5use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
6use rustc_middle::mir::{Body, Local, UnwindTerminateReason, traversal};
7use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, HasTypingEnv, TyAndLayout};
8use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
9use rustc_middle::{bug, mir, span_bug};
10use rustc_target::callconv::{FnAbi, PassMode};
11use tracing::{debug, instrument};
12
13use crate::base;
14use crate::traits::*;
15
16mod analyze;
17mod block;
18mod constant;
19mod coverageinfo;
20pub mod debuginfo;
21mod intrinsic;
22mod locals;
23pub mod naked_asm;
24pub mod operand;
25pub mod place;
26mod rvalue;
27mod statement;
28
29pub use self::block::store_cast;
30use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
31use self::operand::{OperandRef, OperandValue};
32use self::place::PlaceRef;
33
34enum CachedLlbb<T> {
36 None,
38
39 Some(T),
41
42 Skip,
44}
45
46type PerLocalVarDebugInfoIndexVec<'tcx, V> =
47 IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, V>>>;
48
49pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
51 instance: Instance<'tcx>,
52
53 mir: &'tcx mir::Body<'tcx>,
54
55 debug_context: Option<FunctionDebugContext<'tcx, Bx::DIScope, Bx::DILocation>>,
56
57 llfn: Bx::Function,
58
59 cx: &'a Bx::CodegenCx,
60
61 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
62
63 personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
71
72 cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>>,
77
78 cleanup_kinds: Option<IndexVec<mir::BasicBlock, analyze::CleanupKind>>,
80
81 funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
85
86 landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
89
90 unreachable_block: Option<Bx::BasicBlock>,
92
93 terminate_block: Option<(Bx::BasicBlock, UnwindTerminateReason)>,
95
96 cold_blocks: IndexVec<mir::BasicBlock, bool>,
99
100 locals: locals::Locals<'tcx, Bx::Value>,
116
117 per_local_var_debug_info: Option<PerLocalVarDebugInfoIndexVec<'tcx, Bx::DIVariable>>,
120
121 caller_location: Option<OperandRef<'tcx, Bx::Value>>,
123}
124
125impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
126 pub fn monomorphize<T>(&self, value: T) -> T
127 where
128 T: Copy + TypeFoldable<TyCtxt<'tcx>>,
129 {
130 debug!("monomorphize: self.instance={:?}", self.instance);
131 self.instance.instantiate_mir_and_normalize_erasing_regions(
132 self.cx.tcx(),
133 self.cx.typing_env(),
134 ty::EarlyBinder::bind(value),
135 )
136 }
137}
138
139enum LocalRef<'tcx, V> {
140 Place(PlaceRef<'tcx, V>),
141 UnsizedPlace(PlaceRef<'tcx, V>),
151 Operand(OperandRef<'tcx, V>),
153 PendingOperand,
155}
156
157impl<'tcx, V: CodegenObject> LocalRef<'tcx, V> {
158 fn new_operand(layout: TyAndLayout<'tcx>) -> LocalRef<'tcx, V> {
159 if layout.is_zst() {
160 LocalRef::Operand(OperandRef::zero_sized(layout))
164 } else {
165 LocalRef::PendingOperand
166 }
167 }
168}
169
170#[instrument(level = "debug", skip(cx))]
173pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
174 cx: &'a Bx::CodegenCx,
175 instance: Instance<'tcx>,
176) {
177 assert!(!instance.args.has_infer());
178
179 let tcx = cx.tcx();
180 let llfn = cx.get_fn(instance);
181
182 let mut mir = tcx.instance_mir(instance.def);
183 let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
188 debug!("fn_abi: {:?}", fn_abi);
189
190 if tcx.features().ergonomic_clones() {
191 let monomorphized_mir = instance.instantiate_mir_and_normalize_erasing_regions(
192 tcx,
193 ty::TypingEnv::fully_monomorphized(),
194 ty::EarlyBinder::bind(mir.clone()),
195 );
196 mir = tcx.arena.alloc(optimize_use_clone::<Bx>(cx, monomorphized_mir));
197 }
198
199 let debug_context = cx.create_function_debug_context(instance, fn_abi, llfn, &mir);
200
201 let start_llbb = Bx::append_block(cx, llfn, "start");
202 let mut start_bx = Bx::build(cx, start_llbb);
203
204 if mir.basic_blocks.iter().any(|bb| {
205 bb.is_cleanup || matches!(bb.terminator().unwind(), Some(mir::UnwindAction::Terminate(_)))
206 }) {
207 start_bx.set_personality_fn(cx.eh_personality());
208 }
209
210 let cleanup_kinds =
211 base::wants_new_eh_instructions(tcx.sess).then(|| analyze::cleanup_kinds(&mir));
212
213 let cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> =
214 mir.basic_blocks
215 .indices()
216 .map(|bb| {
217 if bb == mir::START_BLOCK { CachedLlbb::Some(start_llbb) } else { CachedLlbb::None }
218 })
219 .collect();
220
221 let mut fx = FunctionCx {
222 instance,
223 mir,
224 llfn,
225 fn_abi,
226 cx,
227 personality_slot: None,
228 cached_llbbs,
229 unreachable_block: None,
230 terminate_block: None,
231 cleanup_kinds,
232 landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
233 funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
234 cold_blocks: find_cold_blocks(tcx, mir),
235 locals: locals::Locals::empty(),
236 debug_context,
237 per_local_var_debug_info: None,
238 caller_location: None,
239 };
240
241 let (per_local_var_debug_info, consts_debug_info) =
247 fx.compute_per_local_var_debug_info(&mut start_bx).unzip();
248 fx.per_local_var_debug_info = per_local_var_debug_info;
249
250 let traversal_order = traversal::mono_reachable_reverse_postorder(mir, tcx, instance);
251 let memory_locals = analyze::non_ssa_locals(&fx, &traversal_order);
252
253 let local_values = {
255 let args = arg_local_refs(&mut start_bx, &mut fx, &memory_locals);
256
257 let mut allocate_local = |local: Local| {
258 let decl = &mir.local_decls[local];
259 let layout = start_bx.layout_of(fx.monomorphize(decl.ty));
260 assert!(!layout.ty.has_erasable_regions());
261
262 if local == mir::RETURN_PLACE {
263 match fx.fn_abi.ret.mode {
264 PassMode::Indirect { .. } => {
265 debug!("alloc: {:?} (return place) -> place", local);
266 let llretptr = start_bx.get_param(0);
267 return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
268 }
269 PassMode::Cast { ref cast, .. } => {
270 debug!("alloc: {:?} (return place) -> place", local);
271 let size = cast.size(&start_bx).max(layout.size);
272 return LocalRef::Place(PlaceRef::alloca_size(&mut start_bx, size, layout));
273 }
274 _ => {}
275 };
276 }
277
278 if memory_locals.contains(local) {
279 debug!("alloc: {:?} -> place", local);
280 if layout.is_unsized() {
281 LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut start_bx, layout))
282 } else {
283 LocalRef::Place(PlaceRef::alloca(&mut start_bx, layout))
284 }
285 } else {
286 debug!("alloc: {:?} -> operand", local);
287 LocalRef::new_operand(layout)
288 }
289 };
290
291 let retptr = allocate_local(mir::RETURN_PLACE);
292 iter::once(retptr)
293 .chain(args.into_iter())
294 .chain(mir.vars_and_temps_iter().map(allocate_local))
295 .collect()
296 };
297 fx.initialize_locals(local_values);
298
299 fx.debug_introduce_locals(&mut start_bx, consts_debug_info.unwrap_or_default());
301
302 drop(start_bx);
305
306 let mut unreached_blocks = DenseBitSet::new_filled(mir.basic_blocks.len());
307 for bb in traversal_order {
309 fx.codegen_block(bb);
310 unreached_blocks.remove(bb);
311 }
312
313 for bb in unreached_blocks.iter() {
319 fx.codegen_block_as_unreachable(bb);
320 }
321}
322
323fn optimize_use_clone<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
326 cx: &'a Bx::CodegenCx,
327 mut mir: Body<'tcx>,
328) -> Body<'tcx> {
329 let tcx = cx.tcx();
330
331 if tcx.features().ergonomic_clones() {
332 for bb in mir.basic_blocks.as_mut() {
333 let mir::TerminatorKind::Call {
334 args,
335 destination,
336 target,
337 call_source: mir::CallSource::Use,
338 ..
339 } = &bb.terminator().kind
340 else {
341 continue;
342 };
343
344 assert_eq!(args.len(), 1);
346 let arg = &args[0];
347
348 let arg_ty = arg.node.ty(&mir.local_decls, tcx);
351
352 let ty::Ref(_region, inner_ty, mir::Mutability::Not) = *arg_ty.kind() else { continue };
353
354 if !tcx.type_is_copy_modulo_regions(cx.typing_env(), inner_ty) {
355 continue;
356 }
357
358 let Some(arg_place) = arg.node.place() else { continue };
359
360 let destination_block = target.unwrap();
361
362 bb.statements.push(mir::Statement::new(
363 bb.terminator().source_info,
364 mir::StatementKind::Assign(Box::new((
365 *destination,
366 mir::Rvalue::Use(mir::Operand::Copy(
367 arg_place.project_deeper(&[mir::ProjectionElem::Deref], tcx),
368 )),
369 ))),
370 ));
371
372 bb.terminator_mut().kind = mir::TerminatorKind::Goto { target: destination_block };
373 }
374 }
375
376 mir
377}
378
379fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
383 bx: &mut Bx,
384 fx: &mut FunctionCx<'a, 'tcx, Bx>,
385 memory_locals: &DenseBitSet<mir::Local>,
386) -> Vec<LocalRef<'tcx, Bx::Value>> {
387 let mir = fx.mir;
388 let mut idx = 0;
389 let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize;
390
391 let mut num_untupled = None;
392
393 let codegen_fn_attrs = bx.tcx().codegen_instance_attrs(fx.instance.def);
394 if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
395 return vec![];
396 }
397
398 let args = mir
399 .args_iter()
400 .enumerate()
401 .map(|(arg_index, local)| {
402 let arg_decl = &mir.local_decls[local];
403 let arg_ty = fx.monomorphize(arg_decl.ty);
404
405 if Some(local) == mir.spread_arg {
406 let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else {
411 bug!("spread argument isn't a tuple?!");
412 };
413
414 let layout = bx.layout_of(arg_ty);
415
416 if layout.is_unsized() {
418 span_bug!(
419 arg_decl.source_info.span,
420 "\"rust-call\" ABI does not support unsized params",
421 );
422 }
423
424 let place = PlaceRef::alloca(bx, layout);
425 for i in 0..tupled_arg_tys.len() {
426 let arg = &fx.fn_abi.args[idx];
427 idx += 1;
428 if let PassMode::Cast { pad_i32: true, .. } = arg.mode {
429 llarg_idx += 1;
430 }
431 let pr_field = place.project_field(bx, i);
432 bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
433 }
434 assert_eq!(
435 None,
436 num_untupled.replace(tupled_arg_tys.len()),
437 "Replaced existing num_tupled"
438 );
439
440 return LocalRef::Place(place);
441 }
442
443 if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
444 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
445
446 bx.lifetime_start(va_list.val.llval, va_list.layout.size);
448
449 bx.va_start(va_list.val.llval);
450
451 return LocalRef::Place(va_list);
452 }
453
454 let arg = &fx.fn_abi.args[idx];
455 idx += 1;
456 if let PassMode::Cast { pad_i32: true, .. } = arg.mode {
457 llarg_idx += 1;
458 }
459
460 if !memory_locals.contains(local) {
461 let local = |op| LocalRef::Operand(op);
465 match arg.mode {
466 PassMode::Ignore => {
467 return local(OperandRef::zero_sized(arg.layout));
468 }
469 PassMode::Direct(_) => {
470 let llarg = bx.get_param(llarg_idx);
471 llarg_idx += 1;
472 return local(OperandRef::from_immediate_or_packed_pair(
473 bx, llarg, arg.layout,
474 ));
475 }
476 PassMode::Pair(..) => {
477 let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
478 llarg_idx += 2;
479
480 return local(OperandRef {
481 val: OperandValue::Pair(a, b),
482 layout: arg.layout,
483 });
484 }
485 _ => {}
486 }
487 }
488
489 match arg.mode {
490 PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
492 if let Some(pointee_align) = attrs.pointee_align
496 && pointee_align < arg.layout.align.abi
497 {
498 let tmp = PlaceRef::alloca(bx, arg.layout);
501 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
502 LocalRef::Place(tmp)
503 } else {
504 let llarg = bx.get_param(llarg_idx);
505 llarg_idx += 1;
506 LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
507 }
508 }
509 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
511 let llarg = bx.get_param(llarg_idx);
514 llarg_idx += 1;
515 let llextra = bx.get_param(llarg_idx);
516 llarg_idx += 1;
517 let indirect_operand = OperandValue::Pair(llarg, llextra);
518
519 let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
520 indirect_operand.store(bx, tmp);
521 LocalRef::UnsizedPlace(tmp)
522 }
523 _ => {
524 let tmp = PlaceRef::alloca(bx, arg.layout);
525 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
526 LocalRef::Place(tmp)
527 }
528 }
529 })
530 .collect::<Vec<_>>();
531
532 if fx.instance.def.requires_caller_location(bx.tcx()) {
533 let mir_args = if let Some(num_untupled) = num_untupled {
534 args.len() - 1 + num_untupled
536 } else {
537 args.len()
538 };
539 assert_eq!(
540 fx.fn_abi.args.len(),
541 mir_args + 1,
542 "#[track_caller] instance {:?} must have 1 more argument in their ABI than in their MIR",
543 fx.instance
544 );
545
546 let arg = fx.fn_abi.args.last().unwrap();
547 match arg.mode {
548 PassMode::Direct(_) => (),
549 _ => bug!("caller location must be PassMode::Direct, found {:?}", arg.mode),
550 }
551
552 fx.caller_location = Some(OperandRef {
553 val: OperandValue::Immediate(bx.get_param(llarg_idx)),
554 layout: arg.layout,
555 });
556 }
557
558 args
559}
560
561fn find_cold_blocks<'tcx>(
562 tcx: TyCtxt<'tcx>,
563 mir: &mir::Body<'tcx>,
564) -> IndexVec<mir::BasicBlock, bool> {
565 let local_decls = &mir.local_decls;
566
567 let mut cold_blocks: IndexVec<mir::BasicBlock, bool> =
568 IndexVec::from_elem(false, &mir.basic_blocks);
569
570 for (bb, bb_data) in traversal::postorder(mir) {
572 let terminator = bb_data.terminator();
573
574 match terminator.kind {
575 mir::TerminatorKind::Call { ref func, .. }
577 | mir::TerminatorKind::TailCall { ref func, .. }
578 if let ty::FnDef(def_id, ..) = *func.ty(local_decls, tcx).kind()
579 && let attrs = tcx.codegen_fn_attrs(def_id)
580 && attrs.flags.contains(CodegenFnAttrFlags::COLD) =>
581 {
582 cold_blocks[bb] = true;
583 continue;
584 }
585
586 mir::TerminatorKind::Unreachable => {
588 cold_blocks[bb] = true;
589 continue;
590 }
591
592 _ => {}
593 }
594
595 let mut succ = terminator.successors();
597 if let Some(first) = succ.next()
598 && cold_blocks[first]
599 && succ.all(|s| cold_blocks[s])
600 {
601 cold_blocks[bb] = true;
602 }
603 }
604
605 cold_blocks
606}