1use std::iter;
2
3use rustc_index::IndexVec;
4use rustc_index::bit_set::DenseBitSet;
5use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
6use rustc_middle::mir::{Body, Local, UnwindTerminateReason, traversal};
7use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, HasTypingEnv, TyAndLayout};
8use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
9use rustc_middle::{bug, mir, span_bug};
10use rustc_target::callconv::{FnAbi, PassMode};
11use tracing::{debug, instrument};
12
13use crate::base;
14use crate::traits::*;
15
16mod analyze;
17mod block;
18mod constant;
19mod coverageinfo;
20pub mod debuginfo;
21mod intrinsic;
22mod locals;
23pub mod naked_asm;
24pub mod operand;
25pub mod place;
26mod rvalue;
27mod statement;
28
29pub use self::block::store_cast;
30use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
31use self::operand::{OperandRef, OperandValue};
32use self::place::PlaceRef;
33
34enum CachedLlbb<T> {
36 None,
38
39 Some(T),
41
42 Skip,
44}
45
46type PerLocalVarDebugInfoIndexVec<'tcx, V> =
47 IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, V>>>;
48
49pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
51 instance: Instance<'tcx>,
52
53 mir: &'tcx mir::Body<'tcx>,
54
55 debug_context: Option<FunctionDebugContext<'tcx, Bx::DIScope, Bx::DILocation>>,
56
57 llfn: Bx::Function,
58
59 cx: &'a Bx::CodegenCx,
60
61 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
62
63 personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
71
72 cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>>,
77
78 cleanup_kinds: Option<IndexVec<mir::BasicBlock, analyze::CleanupKind>>,
80
81 funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
85
86 landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
89
90 unreachable_block: Option<Bx::BasicBlock>,
92
93 terminate_block: Option<(Bx::BasicBlock, UnwindTerminateReason)>,
95
96 cold_blocks: IndexVec<mir::BasicBlock, bool>,
99
100 locals: locals::Locals<'tcx, Bx::Value>,
116
117 per_local_var_debug_info: Option<PerLocalVarDebugInfoIndexVec<'tcx, Bx::DIVariable>>,
120
121 caller_location: Option<OperandRef<'tcx, Bx::Value>>,
123}
124
125impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
126 pub fn monomorphize<T>(&self, value: T) -> T
127 where
128 T: Copy + TypeFoldable<TyCtxt<'tcx>>,
129 {
130 debug!("monomorphize: self.instance={:?}", self.instance);
131 self.instance.instantiate_mir_and_normalize_erasing_regions(
132 self.cx.tcx(),
133 self.cx.typing_env(),
134 ty::EarlyBinder::bind(value),
135 )
136 }
137}
138
139enum LocalRef<'tcx, V> {
140 Place(PlaceRef<'tcx, V>),
141 UnsizedPlace(PlaceRef<'tcx, V>),
151 Operand(OperandRef<'tcx, V>),
153 PendingOperand,
155}
156
157impl<'tcx, V: CodegenObject> LocalRef<'tcx, V> {
158 fn new_operand(layout: TyAndLayout<'tcx>) -> LocalRef<'tcx, V> {
159 if layout.is_zst() {
160 LocalRef::Operand(OperandRef::zero_sized(layout))
164 } else {
165 LocalRef::PendingOperand
166 }
167 }
168}
169
170#[instrument(level = "debug", skip(cx))]
173pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
174 cx: &'a Bx::CodegenCx,
175 instance: Instance<'tcx>,
176) {
177 assert!(!instance.args.has_infer());
178
179 let tcx = cx.tcx();
180 let llfn = cx.get_fn(instance);
181
182 let mut mir = tcx.instance_mir(instance.def);
183
184 let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
185 debug!("fn_abi: {:?}", fn_abi);
186
187 if tcx.features().ergonomic_clones() {
188 let monomorphized_mir = instance.instantiate_mir_and_normalize_erasing_regions(
189 tcx,
190 ty::TypingEnv::fully_monomorphized(),
191 ty::EarlyBinder::bind(mir.clone()),
192 );
193 mir = tcx.arena.alloc(optimize_use_clone::<Bx>(cx, monomorphized_mir));
194 }
195
196 let debug_context = cx.create_function_debug_context(instance, fn_abi, llfn, &mir);
197
198 let start_llbb = Bx::append_block(cx, llfn, "start");
199 let mut start_bx = Bx::build(cx, start_llbb);
200
201 if mir.basic_blocks.iter().any(|bb| {
202 bb.is_cleanup || matches!(bb.terminator().unwind(), Some(mir::UnwindAction::Terminate(_)))
203 }) {
204 start_bx.set_personality_fn(cx.eh_personality());
205 }
206
207 let cleanup_kinds =
208 base::wants_new_eh_instructions(tcx.sess).then(|| analyze::cleanup_kinds(&mir));
209
210 let cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> =
211 mir.basic_blocks
212 .indices()
213 .map(|bb| {
214 if bb == mir::START_BLOCK { CachedLlbb::Some(start_llbb) } else { CachedLlbb::None }
215 })
216 .collect();
217
218 let mut fx = FunctionCx {
219 instance,
220 mir,
221 llfn,
222 fn_abi,
223 cx,
224 personality_slot: None,
225 cached_llbbs,
226 unreachable_block: None,
227 terminate_block: None,
228 cleanup_kinds,
229 landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
230 funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
231 cold_blocks: find_cold_blocks(tcx, mir),
232 locals: locals::Locals::empty(),
233 debug_context,
234 per_local_var_debug_info: None,
235 caller_location: None,
236 };
237
238 let (per_local_var_debug_info, consts_debug_info) =
244 fx.compute_per_local_var_debug_info(&mut start_bx).unzip();
245 fx.per_local_var_debug_info = per_local_var_debug_info;
246
247 let traversal_order = traversal::mono_reachable_reverse_postorder(mir, tcx, instance);
248 let memory_locals = analyze::non_ssa_locals(&fx, &traversal_order);
249
250 let local_values = {
252 let args = arg_local_refs(&mut start_bx, &mut fx, &memory_locals);
253
254 let mut allocate_local = |local: Local| {
255 let decl = &mir.local_decls[local];
256 let layout = start_bx.layout_of(fx.monomorphize(decl.ty));
257 assert!(!layout.ty.has_erasable_regions());
258
259 if local == mir::RETURN_PLACE {
260 match fx.fn_abi.ret.mode {
261 PassMode::Indirect { .. } => {
262 debug!("alloc: {:?} (return place) -> place", local);
263 let llretptr = start_bx.get_param(0);
264 return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
265 }
266 PassMode::Cast { ref cast, .. } => {
267 debug!("alloc: {:?} (return place) -> place", local);
268 let size = cast.size(&start_bx).max(layout.size);
269 return LocalRef::Place(PlaceRef::alloca_size(&mut start_bx, size, layout));
270 }
271 _ => {}
272 };
273 }
274
275 if memory_locals.contains(local) {
276 debug!("alloc: {:?} -> place", local);
277 if layout.is_unsized() {
278 LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut start_bx, layout))
279 } else {
280 LocalRef::Place(PlaceRef::alloca(&mut start_bx, layout))
281 }
282 } else {
283 debug!("alloc: {:?} -> operand", local);
284 LocalRef::new_operand(layout)
285 }
286 };
287
288 let retptr = allocate_local(mir::RETURN_PLACE);
289 iter::once(retptr)
290 .chain(args.into_iter())
291 .chain(mir.vars_and_temps_iter().map(allocate_local))
292 .collect()
293 };
294 fx.initialize_locals(local_values);
295
296 fx.debug_introduce_locals(&mut start_bx, consts_debug_info.unwrap_or_default());
298
299 start_bx.init_coverage(instance);
302
303 drop(start_bx);
306
307 let mut unreached_blocks = DenseBitSet::new_filled(mir.basic_blocks.len());
308 for bb in traversal_order {
310 fx.codegen_block(bb);
311 unreached_blocks.remove(bb);
312 }
313
314 for bb in unreached_blocks.iter() {
320 fx.codegen_block_as_unreachable(bb);
321 }
322}
323
324fn optimize_use_clone<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
326 cx: &'a Bx::CodegenCx,
327 mut mir: Body<'tcx>,
328) -> Body<'tcx> {
329 let tcx = cx.tcx();
330
331 if tcx.features().ergonomic_clones() {
332 for bb in mir.basic_blocks.as_mut() {
333 let mir::TerminatorKind::Call {
334 args,
335 destination,
336 target,
337 call_source: mir::CallSource::Use,
338 ..
339 } = &bb.terminator().kind
340 else {
341 continue;
342 };
343
344 assert_eq!(args.len(), 1);
346 let arg = &args[0];
347
348 let arg_ty = arg.node.ty(&mir.local_decls, tcx);
351
352 let ty::Ref(_region, inner_ty, mir::Mutability::Not) = *arg_ty.kind() else { continue };
353
354 if !tcx.type_is_copy_modulo_regions(cx.typing_env(), inner_ty) {
355 continue;
356 }
357
358 let Some(arg_place) = arg.node.place() else { continue };
359
360 let destination_block = target.unwrap();
361
362 bb.statements.push(mir::Statement::new(
363 bb.terminator().source_info,
364 mir::StatementKind::Assign(Box::new((
365 *destination,
366 mir::Rvalue::Use(mir::Operand::Copy(
367 arg_place.project_deeper(&[mir::ProjectionElem::Deref], tcx),
368 )),
369 ))),
370 ));
371
372 bb.terminator_mut().kind = mir::TerminatorKind::Goto { target: destination_block };
373 }
374 }
375
376 mir
377}
378
379fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
383 bx: &mut Bx,
384 fx: &mut FunctionCx<'a, 'tcx, Bx>,
385 memory_locals: &DenseBitSet<mir::Local>,
386) -> Vec<LocalRef<'tcx, Bx::Value>> {
387 let mir = fx.mir;
388 let mut idx = 0;
389 let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize;
390
391 let mut num_untupled = None;
392
393 let codegen_fn_attrs = bx.tcx().codegen_fn_attrs(fx.instance.def_id());
394 let naked = codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED);
395 if naked {
396 return vec![];
397 }
398
399 let args = mir
400 .args_iter()
401 .enumerate()
402 .map(|(arg_index, local)| {
403 let arg_decl = &mir.local_decls[local];
404 let arg_ty = fx.monomorphize(arg_decl.ty);
405
406 if Some(local) == mir.spread_arg {
407 let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else {
412 bug!("spread argument isn't a tuple?!");
413 };
414
415 let layout = bx.layout_of(arg_ty);
416
417 if layout.is_unsized() {
419 span_bug!(
420 arg_decl.source_info.span,
421 "\"rust-call\" ABI does not support unsized params",
422 );
423 }
424
425 let place = PlaceRef::alloca(bx, layout);
426 for i in 0..tupled_arg_tys.len() {
427 let arg = &fx.fn_abi.args[idx];
428 idx += 1;
429 if let PassMode::Cast { pad_i32: true, .. } = arg.mode {
430 llarg_idx += 1;
431 }
432 let pr_field = place.project_field(bx, i);
433 bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
434 }
435 assert_eq!(
436 None,
437 num_untupled.replace(tupled_arg_tys.len()),
438 "Replaced existing num_tupled"
439 );
440
441 return LocalRef::Place(place);
442 }
443
444 if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
445 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
446 bx.va_start(va_list.val.llval);
447
448 return LocalRef::Place(va_list);
449 }
450
451 let arg = &fx.fn_abi.args[idx];
452 idx += 1;
453 if let PassMode::Cast { pad_i32: true, .. } = arg.mode {
454 llarg_idx += 1;
455 }
456
457 if !memory_locals.contains(local) {
458 let local = |op| LocalRef::Operand(op);
462 match arg.mode {
463 PassMode::Ignore => {
464 return local(OperandRef::zero_sized(arg.layout));
465 }
466 PassMode::Direct(_) => {
467 let llarg = bx.get_param(llarg_idx);
468 llarg_idx += 1;
469 return local(OperandRef::from_immediate_or_packed_pair(
470 bx, llarg, arg.layout,
471 ));
472 }
473 PassMode::Pair(..) => {
474 let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
475 llarg_idx += 2;
476
477 return local(OperandRef {
478 val: OperandValue::Pair(a, b),
479 layout: arg.layout,
480 });
481 }
482 _ => {}
483 }
484 }
485
486 match arg.mode {
487 PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
489 if let Some(pointee_align) = attrs.pointee_align
493 && pointee_align < arg.layout.align.abi
494 {
495 let tmp = PlaceRef::alloca(bx, arg.layout);
498 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
499 LocalRef::Place(tmp)
500 } else {
501 let llarg = bx.get_param(llarg_idx);
502 llarg_idx += 1;
503 LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
504 }
505 }
506 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
508 let llarg = bx.get_param(llarg_idx);
511 llarg_idx += 1;
512 let llextra = bx.get_param(llarg_idx);
513 llarg_idx += 1;
514 let indirect_operand = OperandValue::Pair(llarg, llextra);
515
516 let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
517 indirect_operand.store(bx, tmp);
518 LocalRef::UnsizedPlace(tmp)
519 }
520 _ => {
521 let tmp = PlaceRef::alloca(bx, arg.layout);
522 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
523 LocalRef::Place(tmp)
524 }
525 }
526 })
527 .collect::<Vec<_>>();
528
529 if fx.instance.def.requires_caller_location(bx.tcx()) {
530 let mir_args = if let Some(num_untupled) = num_untupled {
531 args.len() - 1 + num_untupled
533 } else {
534 args.len()
535 };
536 assert_eq!(
537 fx.fn_abi.args.len(),
538 mir_args + 1,
539 "#[track_caller] instance {:?} must have 1 more argument in their ABI than in their MIR",
540 fx.instance
541 );
542
543 let arg = fx.fn_abi.args.last().unwrap();
544 match arg.mode {
545 PassMode::Direct(_) => (),
546 _ => bug!("caller location must be PassMode::Direct, found {:?}", arg.mode),
547 }
548
549 fx.caller_location = Some(OperandRef {
550 val: OperandValue::Immediate(bx.get_param(llarg_idx)),
551 layout: arg.layout,
552 });
553 }
554
555 args
556}
557
558fn find_cold_blocks<'tcx>(
559 tcx: TyCtxt<'tcx>,
560 mir: &mir::Body<'tcx>,
561) -> IndexVec<mir::BasicBlock, bool> {
562 let local_decls = &mir.local_decls;
563
564 let mut cold_blocks: IndexVec<mir::BasicBlock, bool> =
565 IndexVec::from_elem(false, &mir.basic_blocks);
566
567 for (bb, bb_data) in traversal::postorder(mir) {
569 let terminator = bb_data.terminator();
570
571 match terminator.kind {
572 mir::TerminatorKind::Call { ref func, .. }
574 | mir::TerminatorKind::TailCall { ref func, .. }
575 if let ty::FnDef(def_id, ..) = *func.ty(local_decls, tcx).kind()
576 && let attrs = tcx.codegen_fn_attrs(def_id)
577 && attrs.flags.contains(CodegenFnAttrFlags::COLD) =>
578 {
579 cold_blocks[bb] = true;
580 continue;
581 }
582
583 mir::TerminatorKind::Unreachable => {
585 cold_blocks[bb] = true;
586 continue;
587 }
588
589 _ => {}
590 }
591
592 let mut succ = terminator.successors();
594 if let Some(first) = succ.next()
595 && cold_blocks[first]
596 && succ.all(|s| cold_blocks[s])
597 {
598 cold_blocks[bb] = true;
599 }
600 }
601
602 cold_blocks
603}