1use std::cmp;
2
3use rustc_abi::{Align, BackendRepr, ExternAbi, HasDataLayout, Reg, Size, WrappingRange};
4use rustc_ast as ast;
5use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
6use rustc_data_structures::packed::Pu128;
7use rustc_hir::lang_items::LangItem;
8use rustc_lint_defs::builtin::TAIL_CALL_TRACK_CALLER;
9use rustc_middle::mir::{self, AssertKind, InlineAsmMacro, SwitchTargets, UnwindTerminateReason};
10use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, ValidityRequirement};
11use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths};
12use rustc_middle::ty::{self, Instance, Ty, TypeVisitableExt};
13use rustc_middle::{bug, span_bug};
14use rustc_session::config::OptLevel;
15use rustc_span::Span;
16use rustc_span::source_map::Spanned;
17use rustc_target::callconv::{ArgAbi, ArgAttributes, CastTarget, FnAbi, PassMode};
18use tracing::{debug, info};
19
20use super::operand::OperandRef;
21use super::operand::OperandValue::{Immediate, Pair, Ref, ZeroSized};
22use super::place::{PlaceRef, PlaceValue};
23use super::{CachedLlbb, FunctionCx, LocalRef};
24use crate::base::{self, is_call_from_compiler_builtins_to_upstream_monomorphization};
25use crate::common::{self, IntPredicate};
26use crate::errors::CompilerBuiltinsCannotCall;
27use crate::traits::*;
28use crate::{MemFlags, meth};
29
30#[derive(Debug, PartialEq)]
34enum MergingSucc {
35 False,
36 True,
37}
38
39#[derive(Debug, PartialEq)]
42enum CallKind {
43 Normal,
44 Tail,
45}
46
47struct TerminatorCodegenHelper<'tcx> {
50 bb: mir::BasicBlock,
51 terminator: &'tcx mir::Terminator<'tcx>,
52}
53
54impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
55 fn funclet<'b, Bx: BuilderMethods<'a, 'tcx>>(
58 &self,
59 fx: &'b mut FunctionCx<'a, 'tcx, Bx>,
60 ) -> Option<&'b Bx::Funclet> {
61 let cleanup_kinds = fx.cleanup_kinds.as_ref()?;
62 let funclet_bb = cleanup_kinds[self.bb].funclet_bb(self.bb)?;
63 if fx.funclets[funclet_bb].is_none() {
72 fx.landing_pad_for(funclet_bb);
73 }
74 Some(
75 fx.funclets[funclet_bb]
76 .as_ref()
77 .expect("landing_pad_for didn't also create funclets entry"),
78 )
79 }
80
81 fn llbb_with_cleanup<Bx: BuilderMethods<'a, 'tcx>>(
84 &self,
85 fx: &mut FunctionCx<'a, 'tcx, Bx>,
86 target: mir::BasicBlock,
87 ) -> Bx::BasicBlock {
88 let (needs_landing_pad, is_cleanupret) = self.llbb_characteristics(fx, target);
89 let mut lltarget = fx.llbb(target);
90 if needs_landing_pad {
91 lltarget = fx.landing_pad_for(target);
92 }
93 if is_cleanupret {
94 assert!(base::wants_new_eh_instructions(fx.cx.tcx().sess));
96 debug!("llbb_with_cleanup: creating cleanup trampoline for {:?}", target);
97 let name = &format!("{:?}_cleanup_trampoline_{:?}", self.bb, target);
98 let trampoline_llbb = Bx::append_block(fx.cx, fx.llfn, name);
99 let mut trampoline_bx = Bx::build(fx.cx, trampoline_llbb);
100 trampoline_bx.cleanup_ret(self.funclet(fx).unwrap(), Some(lltarget));
101 trampoline_llbb
102 } else {
103 lltarget
104 }
105 }
106
107 fn llbb_characteristics<Bx: BuilderMethods<'a, 'tcx>>(
108 &self,
109 fx: &mut FunctionCx<'a, 'tcx, Bx>,
110 target: mir::BasicBlock,
111 ) -> (bool, bool) {
112 if let Some(ref cleanup_kinds) = fx.cleanup_kinds {
113 let funclet_bb = cleanup_kinds[self.bb].funclet_bb(self.bb);
114 let target_funclet = cleanup_kinds[target].funclet_bb(target);
115 let (needs_landing_pad, is_cleanupret) = match (funclet_bb, target_funclet) {
116 (None, None) => (false, false),
117 (None, Some(_)) => (true, false),
118 (Some(f), Some(t_f)) => (f != t_f, f != t_f),
119 (Some(_), None) => {
120 let span = self.terminator.source_info.span;
121 span_bug!(span, "{:?} - jump out of cleanup?", self.terminator);
122 }
123 };
124 (needs_landing_pad, is_cleanupret)
125 } else {
126 let needs_landing_pad = !fx.mir[self.bb].is_cleanup && fx.mir[target].is_cleanup;
127 let is_cleanupret = false;
128 (needs_landing_pad, is_cleanupret)
129 }
130 }
131
132 fn funclet_br<Bx: BuilderMethods<'a, 'tcx>>(
133 &self,
134 fx: &mut FunctionCx<'a, 'tcx, Bx>,
135 bx: &mut Bx,
136 target: mir::BasicBlock,
137 mergeable_succ: bool,
138 ) -> MergingSucc {
139 let (needs_landing_pad, is_cleanupret) = self.llbb_characteristics(fx, target);
140 if mergeable_succ && !needs_landing_pad && !is_cleanupret {
141 MergingSucc::True
143 } else {
144 let mut lltarget = fx.llbb(target);
145 if needs_landing_pad {
146 lltarget = fx.landing_pad_for(target);
147 }
148 if is_cleanupret {
149 bx.cleanup_ret(self.funclet(fx).unwrap(), Some(lltarget));
152 } else {
153 bx.br(lltarget);
154 }
155 MergingSucc::False
156 }
157 }
158
159 fn do_call<Bx: BuilderMethods<'a, 'tcx>>(
162 &self,
163 fx: &mut FunctionCx<'a, 'tcx, Bx>,
164 bx: &mut Bx,
165 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
166 fn_ptr: Bx::Value,
167 llargs: &[Bx::Value],
168 destination: Option<(ReturnDest<'tcx, Bx::Value>, mir::BasicBlock)>,
169 mut unwind: mir::UnwindAction,
170 lifetime_ends_after_call: &[(Bx::Value, Size)],
171 instance: Option<Instance<'tcx>>,
172 kind: CallKind,
173 mergeable_succ: bool,
174 ) -> MergingSucc {
175 let tcx = bx.tcx();
176 if let Some(instance) = instance
177 && is_call_from_compiler_builtins_to_upstream_monomorphization(tcx, instance)
178 {
179 if destination.is_some() {
180 let caller_def = fx.instance.def_id();
181 let e = CompilerBuiltinsCannotCall {
182 span: tcx.def_span(caller_def),
183 caller: with_no_trimmed_paths!(tcx.def_path_str(caller_def)),
184 callee: with_no_trimmed_paths!(tcx.def_path_str(instance.def_id())),
185 };
186 tcx.dcx().emit_err(e);
187 } else {
188 info!(
189 "compiler_builtins call to diverging function {:?} replaced with abort",
190 instance.def_id()
191 );
192 bx.abort();
193 bx.unreachable();
194 return MergingSucc::False;
195 }
196 }
197
198 let fn_ty = bx.fn_decl_backend_type(fn_abi);
201
202 let fn_attrs = if bx.tcx().def_kind(fx.instance.def_id()).has_codegen_attrs() {
203 Some(bx.tcx().codegen_instance_attrs(fx.instance.def))
204 } else {
205 None
206 };
207 let fn_attrs = fn_attrs.as_deref();
208
209 if !fn_abi.can_unwind {
210 unwind = mir::UnwindAction::Unreachable;
211 }
212
213 let unwind_block = match unwind {
214 mir::UnwindAction::Cleanup(cleanup) => Some(self.llbb_with_cleanup(fx, cleanup)),
215 mir::UnwindAction::Continue => None,
216 mir::UnwindAction::Unreachable => None,
217 mir::UnwindAction::Terminate(reason) => {
218 if fx.mir[self.bb].is_cleanup && base::wants_new_eh_instructions(fx.cx.tcx().sess) {
219 None
229 } else {
230 Some(fx.terminate_block(reason))
231 }
232 }
233 };
234
235 if kind == CallKind::Tail {
236 bx.tail_call(fn_ty, fn_attrs, fn_abi, fn_ptr, llargs, self.funclet(fx), instance);
237 return MergingSucc::False;
238 }
239
240 if let Some(unwind_block) = unwind_block {
241 let ret_llbb = if let Some((_, target)) = destination {
242 fx.llbb(target)
243 } else {
244 fx.unreachable_block()
245 };
246 let invokeret = bx.invoke(
247 fn_ty,
248 fn_attrs,
249 Some(fn_abi),
250 fn_ptr,
251 llargs,
252 ret_llbb,
253 unwind_block,
254 self.funclet(fx),
255 instance,
256 );
257 if fx.mir[self.bb].is_cleanup {
258 bx.apply_attrs_to_cleanup_callsite(invokeret);
259 }
260
261 if let Some((ret_dest, target)) = destination {
262 bx.switch_to_block(fx.llbb(target));
263 fx.set_debug_loc(bx, self.terminator.source_info);
264 for &(tmp, size) in lifetime_ends_after_call {
265 bx.lifetime_end(tmp, size);
266 }
267 fx.store_return(bx, ret_dest, &fn_abi.ret, invokeret);
268 }
269 MergingSucc::False
270 } else {
271 let llret =
272 bx.call(fn_ty, fn_attrs, Some(fn_abi), fn_ptr, llargs, self.funclet(fx), instance);
273 if fx.mir[self.bb].is_cleanup {
274 bx.apply_attrs_to_cleanup_callsite(llret);
275 }
276
277 if let Some((ret_dest, target)) = destination {
278 for &(tmp, size) in lifetime_ends_after_call {
279 bx.lifetime_end(tmp, size);
280 }
281 fx.store_return(bx, ret_dest, &fn_abi.ret, llret);
282 self.funclet_br(fx, bx, target, mergeable_succ)
283 } else {
284 bx.unreachable();
285 MergingSucc::False
286 }
287 }
288 }
289
290 fn do_inlineasm<Bx: BuilderMethods<'a, 'tcx>>(
292 &self,
293 fx: &mut FunctionCx<'a, 'tcx, Bx>,
294 bx: &mut Bx,
295 template: &[InlineAsmTemplatePiece],
296 operands: &[InlineAsmOperandRef<'tcx, Bx>],
297 options: InlineAsmOptions,
298 line_spans: &[Span],
299 destination: Option<mir::BasicBlock>,
300 unwind: mir::UnwindAction,
301 instance: Instance<'_>,
302 mergeable_succ: bool,
303 ) -> MergingSucc {
304 let unwind_target = match unwind {
305 mir::UnwindAction::Cleanup(cleanup) => Some(self.llbb_with_cleanup(fx, cleanup)),
306 mir::UnwindAction::Terminate(reason) => Some(fx.terminate_block(reason)),
307 mir::UnwindAction::Continue => None,
308 mir::UnwindAction::Unreachable => None,
309 };
310
311 if operands.iter().any(|x| matches!(x, InlineAsmOperandRef::Label { .. })) {
312 assert!(unwind_target.is_none());
313 let ret_llbb = if let Some(target) = destination {
314 fx.llbb(target)
315 } else {
316 fx.unreachable_block()
317 };
318
319 bx.codegen_inline_asm(
320 template,
321 operands,
322 options,
323 line_spans,
324 instance,
325 Some(ret_llbb),
326 None,
327 );
328 MergingSucc::False
329 } else if let Some(cleanup) = unwind_target {
330 let ret_llbb = if let Some(target) = destination {
331 fx.llbb(target)
332 } else {
333 fx.unreachable_block()
334 };
335
336 bx.codegen_inline_asm(
337 template,
338 operands,
339 options,
340 line_spans,
341 instance,
342 Some(ret_llbb),
343 Some((cleanup, self.funclet(fx))),
344 );
345 MergingSucc::False
346 } else {
347 bx.codegen_inline_asm(template, operands, options, line_spans, instance, None, None);
348
349 if let Some(target) = destination {
350 self.funclet_br(fx, bx, target, mergeable_succ)
351 } else {
352 bx.unreachable();
353 MergingSucc::False
354 }
355 }
356 }
357}
358
359impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
361 fn codegen_resume_terminator(&mut self, helper: TerminatorCodegenHelper<'tcx>, bx: &mut Bx) {
363 if let Some(funclet) = helper.funclet(self) {
364 bx.cleanup_ret(funclet, None);
365 } else {
366 let slot = self.get_personality_slot(bx);
367 let exn0 = slot.project_field(bx, 0);
368 let exn0 = bx.load_operand(exn0).immediate();
369 let exn1 = slot.project_field(bx, 1);
370 let exn1 = bx.load_operand(exn1).immediate();
371 slot.storage_dead(bx);
372
373 bx.resume(exn0, exn1);
374 }
375 }
376
377 fn codegen_switchint_terminator(
378 &mut self,
379 helper: TerminatorCodegenHelper<'tcx>,
380 bx: &mut Bx,
381 discr: &mir::Operand<'tcx>,
382 targets: &SwitchTargets,
383 ) {
384 let discr = self.codegen_operand(bx, discr);
385 let discr_value = discr.immediate();
386 let switch_ty = discr.layout.ty;
387 if let Some(const_discr) = bx.const_to_opt_u128(discr_value, false) {
389 let target = targets.target_for_value(const_discr);
390 bx.br(helper.llbb_with_cleanup(self, target));
391 return;
392 };
393
394 let mut target_iter = targets.iter();
395 if target_iter.len() == 1 {
396 let (test_value, target) = target_iter.next().unwrap();
399 let otherwise = targets.otherwise();
400 let lltarget = helper.llbb_with_cleanup(self, target);
401 let llotherwise = helper.llbb_with_cleanup(self, otherwise);
402 let target_cold = self.cold_blocks[target];
403 let otherwise_cold = self.cold_blocks[otherwise];
404 let expect = if target_cold == otherwise_cold { None } else { Some(otherwise_cold) };
408 if switch_ty == bx.tcx().types.bool {
409 match test_value {
411 0 => {
412 let expect = expect.map(|e| !e);
413 bx.cond_br_with_expect(discr_value, llotherwise, lltarget, expect);
414 }
415 1 => {
416 bx.cond_br_with_expect(discr_value, lltarget, llotherwise, expect);
417 }
418 _ => bug!(),
419 }
420 } else {
421 let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty));
422 let llval = bx.const_uint_big(switch_llty, test_value);
423 let cmp = bx.icmp(IntPredicate::IntEQ, discr_value, llval);
424 bx.cond_br_with_expect(cmp, lltarget, llotherwise, expect);
425 }
426 } else if target_iter.len() == 2
427 && self.mir[targets.otherwise()].is_empty_unreachable()
428 && targets.all_values().contains(&Pu128(0))
429 && targets.all_values().contains(&Pu128(1))
430 {
431 let true_bb = targets.target_for_value(1);
435 let false_bb = targets.target_for_value(0);
436 let true_ll = helper.llbb_with_cleanup(self, true_bb);
437 let false_ll = helper.llbb_with_cleanup(self, false_bb);
438
439 let expected_cond_value = if self.cx.sess().opts.optimize == OptLevel::No {
440 None
441 } else {
442 match (self.cold_blocks[true_bb], self.cold_blocks[false_bb]) {
443 (true, true) | (false, false) => None,
445 (true, false) => Some(false),
447 (false, true) => Some(true),
448 }
449 };
450
451 let bool_ty = bx.tcx().types.bool;
452 let cond = if switch_ty == bool_ty {
453 discr_value
454 } else {
455 let bool_llty = bx.immediate_backend_type(bx.layout_of(bool_ty));
456 bx.unchecked_utrunc(discr_value, bool_llty)
457 };
458 bx.cond_br_with_expect(cond, true_ll, false_ll, expected_cond_value);
459 } else if self.cx.sess().opts.optimize == OptLevel::No
460 && target_iter.len() == 2
461 && self.mir[targets.otherwise()].is_empty_unreachable()
462 {
463 let (test_value1, target1) = target_iter.next().unwrap();
476 let (_test_value2, target2) = target_iter.next().unwrap();
477 let ll1 = helper.llbb_with_cleanup(self, target1);
478 let ll2 = helper.llbb_with_cleanup(self, target2);
479 let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty));
480 let llval = bx.const_uint_big(switch_llty, test_value1);
481 let cmp = bx.icmp(IntPredicate::IntEQ, discr_value, llval);
482 bx.cond_br(cmp, ll1, ll2);
483 } else {
484 let otherwise = targets.otherwise();
485 let otherwise_cold = self.cold_blocks[otherwise];
486 let otherwise_unreachable = self.mir[otherwise].is_empty_unreachable();
487 let cold_count = targets.iter().filter(|(_, target)| self.cold_blocks[*target]).count();
488 let none_cold = cold_count == 0;
489 let all_cold = cold_count == targets.iter().len();
490 if (none_cold && (!otherwise_cold || otherwise_unreachable))
491 || (all_cold && (otherwise_cold || otherwise_unreachable))
492 {
493 bx.switch(
496 discr_value,
497 helper.llbb_with_cleanup(self, targets.otherwise()),
498 target_iter
499 .map(|(value, target)| (value, helper.llbb_with_cleanup(self, target))),
500 );
501 } else {
502 bx.switch_with_weights(
504 discr_value,
505 helper.llbb_with_cleanup(self, targets.otherwise()),
506 otherwise_cold,
507 target_iter.map(|(value, target)| {
508 (value, helper.llbb_with_cleanup(self, target), self.cold_blocks[target])
509 }),
510 );
511 }
512 }
513 }
514
515 fn codegen_return_terminator(&mut self, bx: &mut Bx) {
516 if self.fn_abi.c_variadic {
518 let va_list_arg_idx = self.fn_abi.args.len();
520 match self.locals[mir::Local::from_usize(1 + va_list_arg_idx)] {
521 LocalRef::Place(va_list) => {
522 bx.va_end(va_list.val.llval);
523
524 bx.lifetime_end(va_list.val.llval, va_list.layout.size);
526 }
527 _ => bug!("C-variadic function must have a `VaList` place"),
528 }
529 }
530 if self.fn_abi.ret.layout.is_uninhabited() {
531 bx.abort();
536 bx.unreachable();
539 return;
540 }
541 let llval = match &self.fn_abi.ret.mode {
542 PassMode::Ignore | PassMode::Indirect { .. } => {
543 bx.ret_void();
544 return;
545 }
546
547 PassMode::Direct(_) | PassMode::Pair(..) => {
548 let op = self.codegen_consume(bx, mir::Place::return_place().as_ref());
549 if let Ref(place_val) = op.val {
550 bx.load_from_place(bx.backend_type(op.layout), place_val)
551 } else {
552 op.immediate_or_packed_pair(bx)
553 }
554 }
555
556 PassMode::Cast { cast: cast_ty, pad_i32: _ } => {
557 let op = match self.locals[mir::RETURN_PLACE] {
558 LocalRef::Operand(op) => op,
559 LocalRef::PendingOperand => bug!("use of return before def"),
560 LocalRef::Place(cg_place) => OperandRef {
561 val: Ref(cg_place.val),
562 layout: cg_place.layout,
563 move_annotation: None,
564 },
565 LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
566 };
567 let llslot = match op.val {
568 Immediate(_) | Pair(..) => {
569 let scratch = PlaceRef::alloca(bx, self.fn_abi.ret.layout);
570 op.val.store(bx, scratch);
571 scratch.val.llval
572 }
573 Ref(place_val) => {
574 assert_eq!(
575 place_val.align, op.layout.align.abi,
576 "return place is unaligned!"
577 );
578 place_val.llval
579 }
580 ZeroSized => bug!("ZST return value shouldn't be in PassMode::Cast"),
581 };
582 load_cast(bx, cast_ty, llslot, self.fn_abi.ret.layout.align.abi)
583 }
584 };
585 bx.ret(llval);
586 }
587
588 #[tracing::instrument(level = "trace", skip(self, helper, bx))]
589 fn codegen_drop_terminator(
590 &mut self,
591 helper: TerminatorCodegenHelper<'tcx>,
592 bx: &mut Bx,
593 source_info: &mir::SourceInfo,
594 location: mir::Place<'tcx>,
595 target: mir::BasicBlock,
596 unwind: mir::UnwindAction,
597 mergeable_succ: bool,
598 ) -> MergingSucc {
599 let ty = location.ty(self.mir, bx.tcx()).ty;
600 let ty = self.monomorphize(ty);
601 let drop_fn = Instance::resolve_drop_in_place(bx.tcx(), ty);
602
603 if let ty::InstanceKind::DropGlue(_, None) = drop_fn.def {
604 return helper.funclet_br(self, bx, target, mergeable_succ);
606 }
607
608 let place = self.codegen_place(bx, location.as_ref());
609 let (args1, args2);
610 let mut args = if let Some(llextra) = place.val.llextra {
611 args2 = [place.val.llval, llextra];
612 &args2[..]
613 } else {
614 args1 = [place.val.llval];
615 &args1[..]
616 };
617 let (maybe_null, drop_fn, fn_abi, drop_instance) = match ty.kind() {
618 ty::Dynamic(_, _) => {
621 let virtual_drop = Instance {
634 def: ty::InstanceKind::Virtual(drop_fn.def_id(), 0), args: drop_fn.args,
636 };
637 debug!("ty = {:?}", ty);
638 debug!("drop_fn = {:?}", drop_fn);
639 debug!("args = {:?}", args);
640 let fn_abi = bx.fn_abi_of_instance(virtual_drop, ty::List::empty());
641 let vtable = args[1];
642 args = &args[..1];
644 (
645 true,
646 meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
647 .get_optional_fn(bx, vtable, ty, fn_abi),
648 fn_abi,
649 virtual_drop,
650 )
651 }
652 _ => (
653 false,
654 bx.get_fn_addr(drop_fn),
655 bx.fn_abi_of_instance(drop_fn, ty::List::empty()),
656 drop_fn,
657 ),
658 };
659
660 if maybe_null {
663 let is_not_null = bx.append_sibling_block("is_not_null");
664 let llty = bx.fn_ptr_backend_type(fn_abi);
665 let null = bx.const_null(llty);
666 let non_null =
667 bx.icmp(base::bin_op_to_icmp_predicate(mir::BinOp::Ne, false), drop_fn, null);
668 bx.cond_br(non_null, is_not_null, helper.llbb_with_cleanup(self, target));
669 bx.switch_to_block(is_not_null);
670 self.set_debug_loc(bx, *source_info);
671 }
672
673 helper.do_call(
674 self,
675 bx,
676 fn_abi,
677 drop_fn,
678 args,
679 Some((ReturnDest::Nothing, target)),
680 unwind,
681 &[],
682 Some(drop_instance),
683 CallKind::Normal,
684 !maybe_null && mergeable_succ,
685 )
686 }
687
688 fn codegen_assert_terminator(
689 &mut self,
690 helper: TerminatorCodegenHelper<'tcx>,
691 bx: &mut Bx,
692 terminator: &mir::Terminator<'tcx>,
693 cond: &mir::Operand<'tcx>,
694 expected: bool,
695 msg: &mir::AssertMessage<'tcx>,
696 target: mir::BasicBlock,
697 unwind: mir::UnwindAction,
698 mergeable_succ: bool,
699 ) -> MergingSucc {
700 let span = terminator.source_info.span;
701 let cond = self.codegen_operand(bx, cond).immediate();
702 let mut const_cond = bx.const_to_opt_u128(cond, false).map(|c| c == 1);
703
704 if !bx.sess().overflow_checks() && msg.is_optional_overflow_check() {
709 const_cond = Some(expected);
710 }
711
712 if const_cond == Some(expected) {
714 return helper.funclet_br(self, bx, target, mergeable_succ);
715 }
716
717 let lltarget = helper.llbb_with_cleanup(self, target);
722 let panic_block = bx.append_sibling_block("panic");
723 if expected {
724 bx.cond_br(cond, lltarget, panic_block);
725 } else {
726 bx.cond_br(cond, panic_block, lltarget);
727 }
728
729 bx.switch_to_block(panic_block);
731 self.set_debug_loc(bx, terminator.source_info);
732
733 let location = self.get_caller_location(bx, terminator.source_info).immediate();
735
736 let (lang_item, args) = match msg {
738 AssertKind::BoundsCheck { len, index } => {
739 let len = self.codegen_operand(bx, len).immediate();
740 let index = self.codegen_operand(bx, index).immediate();
741 (LangItem::PanicBoundsCheck, vec![index, len, location])
744 }
745 AssertKind::MisalignedPointerDereference { required, found } => {
746 let required = self.codegen_operand(bx, required).immediate();
747 let found = self.codegen_operand(bx, found).immediate();
748 (LangItem::PanicMisalignedPointerDereference, vec![required, found, location])
751 }
752 AssertKind::NullPointerDereference => {
753 (LangItem::PanicNullPointerDereference, vec![location])
756 }
757 AssertKind::InvalidEnumConstruction(source) => {
758 let source = self.codegen_operand(bx, source).immediate();
759 (LangItem::PanicInvalidEnumConstruction, vec![source, location])
762 }
763 _ => {
764 (msg.panic_function(), vec![location])
766 }
767 };
768
769 let (fn_abi, llfn, instance) = common::build_langcall(bx, span, lang_item);
770
771 let merging_succ = helper.do_call(
773 self,
774 bx,
775 fn_abi,
776 llfn,
777 &args,
778 None,
779 unwind,
780 &[],
781 Some(instance),
782 CallKind::Normal,
783 false,
784 );
785 assert_eq!(merging_succ, MergingSucc::False);
786 MergingSucc::False
787 }
788
789 fn codegen_terminate_terminator(
790 &mut self,
791 helper: TerminatorCodegenHelper<'tcx>,
792 bx: &mut Bx,
793 terminator: &mir::Terminator<'tcx>,
794 reason: UnwindTerminateReason,
795 ) {
796 let span = terminator.source_info.span;
797 self.set_debug_loc(bx, terminator.source_info);
798
799 let (fn_abi, llfn, instance) = common::build_langcall(bx, span, reason.lang_item());
801
802 let merging_succ = helper.do_call(
804 self,
805 bx,
806 fn_abi,
807 llfn,
808 &[],
809 None,
810 mir::UnwindAction::Unreachable,
811 &[],
812 Some(instance),
813 CallKind::Normal,
814 false,
815 );
816 assert_eq!(merging_succ, MergingSucc::False);
817 }
818
819 fn codegen_panic_intrinsic(
821 &mut self,
822 helper: &TerminatorCodegenHelper<'tcx>,
823 bx: &mut Bx,
824 intrinsic: ty::IntrinsicDef,
825 instance: Instance<'tcx>,
826 source_info: mir::SourceInfo,
827 target: Option<mir::BasicBlock>,
828 unwind: mir::UnwindAction,
829 mergeable_succ: bool,
830 ) -> Option<MergingSucc> {
831 let Some(requirement) = ValidityRequirement::from_intrinsic(intrinsic.name) else {
835 return None;
836 };
837
838 let ty = instance.args.type_at(0);
839
840 let is_valid = bx
841 .tcx()
842 .check_validity_requirement((requirement, bx.typing_env().as_query_input(ty)))
843 .expect("expect to have layout during codegen");
844
845 if is_valid {
846 let target = target.unwrap();
848 return Some(helper.funclet_br(self, bx, target, mergeable_succ));
849 }
850
851 let layout = bx.layout_of(ty);
852
853 let msg_str = with_no_visible_paths!({
854 with_no_trimmed_paths!({
855 if layout.is_uninhabited() {
856 format!("attempted to instantiate uninhabited type `{ty}`")
858 } else if requirement == ValidityRequirement::Zero {
859 format!("attempted to zero-initialize type `{ty}`, which is invalid")
860 } else {
861 format!("attempted to leave type `{ty}` uninitialized, which is invalid")
862 }
863 })
864 });
865 let msg = bx.const_str(&msg_str);
866
867 let (fn_abi, llfn, instance) =
869 common::build_langcall(bx, source_info.span, LangItem::PanicNounwind);
870
871 Some(helper.do_call(
873 self,
874 bx,
875 fn_abi,
876 llfn,
877 &[msg.0, msg.1],
878 target.as_ref().map(|bb| (ReturnDest::Nothing, *bb)),
879 unwind,
880 &[],
881 Some(instance),
882 CallKind::Normal,
883 mergeable_succ,
884 ))
885 }
886
887 fn codegen_call_terminator(
888 &mut self,
889 helper: TerminatorCodegenHelper<'tcx>,
890 bx: &mut Bx,
891 terminator: &mir::Terminator<'tcx>,
892 func: &mir::Operand<'tcx>,
893 args: &[Spanned<mir::Operand<'tcx>>],
894 destination: mir::Place<'tcx>,
895 target: Option<mir::BasicBlock>,
896 unwind: mir::UnwindAction,
897 fn_span: Span,
898 kind: CallKind,
899 mergeable_succ: bool,
900 ) -> MergingSucc {
901 let source_info = mir::SourceInfo { span: fn_span, ..terminator.source_info };
902
903 let callee = self.codegen_operand(bx, func);
905
906 let (instance, mut llfn) = match *callee.layout.ty.kind() {
907 ty::FnDef(def_id, generic_args) => {
908 let instance = ty::Instance::expect_resolve(
909 bx.tcx(),
910 bx.typing_env(),
911 def_id,
912 generic_args,
913 fn_span,
914 );
915
916 match instance.def {
917 ty::InstanceKind::DropGlue(_, None) => {
920 let target = target.unwrap();
922 return helper.funclet_br(self, bx, target, mergeable_succ);
923 }
924 ty::InstanceKind::Intrinsic(def_id) => {
925 let intrinsic = bx.tcx().intrinsic(def_id).unwrap();
926 if let Some(merging_succ) = self.codegen_panic_intrinsic(
927 &helper,
928 bx,
929 intrinsic,
930 instance,
931 source_info,
932 target,
933 unwind,
934 mergeable_succ,
935 ) {
936 return merging_succ;
937 }
938
939 let result_layout =
940 self.cx.layout_of(self.monomorphized_place_ty(destination.as_ref()));
941
942 let (result, store_in_local) = if result_layout.is_zst() {
943 (
944 PlaceRef::new_sized(bx.const_undef(bx.type_ptr()), result_layout),
945 None,
946 )
947 } else if let Some(local) = destination.as_local() {
948 match self.locals[local] {
949 LocalRef::Place(dest) => (dest, None),
950 LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
951 LocalRef::PendingOperand => {
952 let tmp = PlaceRef::alloca(bx, result_layout);
956 tmp.storage_live(bx);
957 (tmp, Some(local))
958 }
959 LocalRef::Operand(_) => {
960 bug!("place local already assigned to");
961 }
962 }
963 } else {
964 (self.codegen_place(bx, destination.as_ref()), None)
965 };
966
967 if result.val.align < result.layout.align.abi {
968 span_bug!(self.mir.span, "can't directly store to unaligned value");
975 }
976
977 let args: Vec<_> =
978 args.iter().map(|arg| self.codegen_operand(bx, &arg.node)).collect();
979
980 match self.codegen_intrinsic_call(bx, instance, &args, result, source_info)
981 {
982 Ok(()) => {
983 if let Some(local) = store_in_local {
984 let op = bx.load_operand(result);
985 result.storage_dead(bx);
986 self.overwrite_local(local, LocalRef::Operand(op));
987 self.debug_introduce_local(bx, local);
988 }
989
990 return if let Some(target) = target {
991 helper.funclet_br(self, bx, target, mergeable_succ)
992 } else {
993 bx.unreachable();
994 MergingSucc::False
995 };
996 }
997 Err(instance) => {
998 if intrinsic.must_be_overridden {
999 span_bug!(
1000 fn_span,
1001 "intrinsic {} must be overridden by codegen backend, but isn't",
1002 intrinsic.name,
1003 );
1004 }
1005 (Some(instance), None)
1006 }
1007 }
1008 }
1009
1010 _ if kind == CallKind::Tail
1011 && instance.def.requires_caller_location(bx.tcx()) =>
1012 {
1013 if let Some(hir_id) =
1014 terminator.source_info.scope.lint_root(&self.mir.source_scopes)
1015 {
1016 let msg = "tail calling a function marked with `#[track_caller]` has no special effect";
1017 bx.tcx().node_lint(TAIL_CALL_TRACK_CALLER, hir_id, |d| {
1018 _ = d.primary_message(msg).span(fn_span)
1019 });
1020 }
1021
1022 let instance = ty::Instance::resolve_for_fn_ptr(
1023 bx.tcx(),
1024 bx.typing_env(),
1025 def_id,
1026 generic_args,
1027 )
1028 .unwrap();
1029
1030 (None, Some(bx.get_fn_addr(instance)))
1031 }
1032 _ => (Some(instance), None),
1033 }
1034 }
1035 ty::FnPtr(..) => (None, Some(callee.immediate())),
1036 _ => bug!("{} is not callable", callee.layout.ty),
1037 };
1038
1039 if let Some(instance) = instance
1040 && let Some(name) = bx.tcx().codegen_fn_attrs(instance.def_id()).symbol_name
1041 && name.as_str().starts_with("llvm.")
1042 && name.as_str() != "llvm.wasm.throw"
1046 {
1047 assert!(!instance.args.has_infer());
1048 assert!(!instance.args.has_escaping_bound_vars());
1049
1050 let result_layout =
1051 self.cx.layout_of(self.monomorphized_place_ty(destination.as_ref()));
1052
1053 let return_dest = if result_layout.is_zst() {
1054 ReturnDest::Nothing
1055 } else if let Some(index) = destination.as_local() {
1056 match self.locals[index] {
1057 LocalRef::Place(dest) => ReturnDest::Store(dest),
1058 LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
1059 LocalRef::PendingOperand => {
1060 ReturnDest::DirectOperand(index)
1063 }
1064 LocalRef::Operand(_) => bug!("place local already assigned to"),
1065 }
1066 } else {
1067 ReturnDest::Store(self.codegen_place(bx, destination.as_ref()))
1068 };
1069
1070 let args =
1071 args.into_iter().map(|arg| self.codegen_operand(bx, &arg.node)).collect::<Vec<_>>();
1072
1073 self.set_debug_loc(bx, source_info);
1074
1075 let llret =
1076 bx.codegen_llvm_intrinsic_call(instance, &args, self.mir[helper.bb].is_cleanup);
1077
1078 if let Some(target) = target {
1079 self.store_return(
1080 bx,
1081 return_dest,
1082 &ArgAbi { layout: result_layout, mode: PassMode::Direct(ArgAttributes::new()) },
1083 llret,
1084 );
1085 return helper.funclet_br(self, bx, target, mergeable_succ);
1086 } else {
1087 bx.unreachable();
1088 return MergingSucc::False;
1089 }
1090 }
1091
1092 let sig = callee.layout.ty.fn_sig(bx.tcx());
1096
1097 let extra_args = &args[sig.inputs().skip_binder().len()..];
1098 let extra_args = bx.tcx().mk_type_list_from_iter(extra_args.iter().map(|op_arg| {
1099 let op_ty = op_arg.node.ty(self.mir, bx.tcx());
1100 self.monomorphize(op_ty)
1101 }));
1102
1103 let fn_abi = match instance {
1104 Some(instance) => bx.fn_abi_of_instance(instance, extra_args),
1105 None => bx.fn_abi_of_fn_ptr(sig, extra_args),
1106 };
1107
1108 let arg_count = fn_abi.args.len() + fn_abi.ret.is_indirect() as usize;
1110
1111 let mut llargs = Vec::with_capacity(arg_count);
1112
1113 let destination = match kind {
1117 CallKind::Normal => {
1118 let return_dest = self.make_return_dest(bx, destination, &fn_abi.ret, &mut llargs);
1119 target.map(|target| (return_dest, target))
1120 }
1121 CallKind::Tail => {
1122 if fn_abi.ret.is_indirect() {
1123 match self.make_return_dest(bx, destination, &fn_abi.ret, &mut llargs) {
1124 ReturnDest::Nothing => {}
1125 _ => bug!(
1126 "tail calls to functions with indirect returns cannot store into a destination"
1127 ),
1128 }
1129 }
1130 None
1131 }
1132 };
1133
1134 let (first_args, untuple) = if sig.abi() == ExternAbi::RustCall
1136 && let Some((tup, args)) = args.split_last()
1137 {
1138 (args, Some(tup))
1139 } else {
1140 (args, None)
1141 };
1142
1143 let mut lifetime_ends_after_call: Vec<(Bx::Value, Size)> = Vec::new();
1147 'make_args: for (i, arg) in first_args.iter().enumerate() {
1148 if kind == CallKind::Tail && matches!(fn_abi.args[i].mode, PassMode::Indirect { .. }) {
1149 span_bug!(
1151 fn_span,
1152 "arguments using PassMode::Indirect are currently not supported for tail calls"
1153 );
1154 }
1155
1156 let mut op = self.codegen_operand(bx, &arg.node);
1157
1158 if let (0, Some(ty::InstanceKind::Virtual(_, idx))) = (i, instance.map(|i| i.def)) {
1159 match op.val {
1160 Pair(data_ptr, meta) => {
1161 while !op.layout.ty.is_raw_ptr() && !op.layout.ty.is_ref() {
1171 let (idx, _) = op.layout.non_1zst_field(bx).expect(
1172 "not exactly one non-1-ZST field in a `DispatchFromDyn` type",
1173 );
1174 op = op.extract_field(self, bx, idx.as_usize());
1175 }
1176
1177 llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
1181 bx,
1182 meta,
1183 op.layout.ty,
1184 fn_abi,
1185 ));
1186 llargs.push(data_ptr);
1187 continue 'make_args;
1188 }
1189 Ref(PlaceValue { llval: data_ptr, llextra: Some(meta), .. }) => {
1190 llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
1192 bx,
1193 meta,
1194 op.layout.ty,
1195 fn_abi,
1196 ));
1197 llargs.push(data_ptr);
1198 continue;
1199 }
1200 _ => {
1201 span_bug!(fn_span, "can't codegen a virtual call on {:#?}", op);
1202 }
1203 }
1204 }
1205
1206 match (&arg.node, op.val) {
1209 (&mir::Operand::Copy(_), Ref(PlaceValue { llextra: None, .. }))
1210 | (&mir::Operand::Constant(_), Ref(PlaceValue { llextra: None, .. })) => {
1211 let tmp = PlaceRef::alloca(bx, op.layout);
1212 bx.lifetime_start(tmp.val.llval, tmp.layout.size);
1213 op.store_with_annotation(bx, tmp);
1214 op.val = Ref(tmp.val);
1215 lifetime_ends_after_call.push((tmp.val.llval, tmp.layout.size));
1216 }
1217 _ => {}
1218 }
1219
1220 self.codegen_argument(
1221 bx,
1222 op,
1223 &mut llargs,
1224 &fn_abi.args[i],
1225 &mut lifetime_ends_after_call,
1226 );
1227 }
1228 let num_untupled = untuple.map(|tup| {
1229 self.codegen_arguments_untupled(
1230 bx,
1231 &tup.node,
1232 &mut llargs,
1233 &fn_abi.args[first_args.len()..],
1234 &mut lifetime_ends_after_call,
1235 )
1236 });
1237
1238 let needs_location =
1239 instance.is_some_and(|i| i.def.requires_caller_location(self.cx.tcx()));
1240 if needs_location {
1241 let mir_args = if let Some(num_untupled) = num_untupled {
1242 first_args.len() + num_untupled
1243 } else {
1244 args.len()
1245 };
1246 assert_eq!(
1247 fn_abi.args.len(),
1248 mir_args + 1,
1249 "#[track_caller] fn's must have 1 more argument in their ABI than in their MIR: {instance:?} {fn_span:?} {fn_abi:?}",
1250 );
1251 let location = self.get_caller_location(bx, source_info);
1252 debug!(
1253 "codegen_call_terminator({:?}): location={:?} (fn_span {:?})",
1254 terminator, location, fn_span
1255 );
1256
1257 let last_arg = fn_abi.args.last().unwrap();
1258 self.codegen_argument(
1259 bx,
1260 location,
1261 &mut llargs,
1262 last_arg,
1263 &mut lifetime_ends_after_call,
1264 );
1265 }
1266
1267 let fn_ptr = match (instance, llfn) {
1268 (Some(instance), None) => bx.get_fn_addr(instance),
1269 (_, Some(llfn)) => llfn,
1270 _ => span_bug!(fn_span, "no instance or llfn for call"),
1271 };
1272 self.set_debug_loc(bx, source_info);
1273 helper.do_call(
1274 self,
1275 bx,
1276 fn_abi,
1277 fn_ptr,
1278 &llargs,
1279 destination,
1280 unwind,
1281 &lifetime_ends_after_call,
1282 instance,
1283 kind,
1284 mergeable_succ,
1285 )
1286 }
1287
1288 fn codegen_asm_terminator(
1289 &mut self,
1290 helper: TerminatorCodegenHelper<'tcx>,
1291 bx: &mut Bx,
1292 asm_macro: InlineAsmMacro,
1293 terminator: &mir::Terminator<'tcx>,
1294 template: &[ast::InlineAsmTemplatePiece],
1295 operands: &[mir::InlineAsmOperand<'tcx>],
1296 options: ast::InlineAsmOptions,
1297 line_spans: &[Span],
1298 targets: &[mir::BasicBlock],
1299 unwind: mir::UnwindAction,
1300 instance: Instance<'_>,
1301 mergeable_succ: bool,
1302 ) -> MergingSucc {
1303 let span = terminator.source_info.span;
1304
1305 let operands: Vec<_> = operands
1306 .iter()
1307 .map(|op| match *op {
1308 mir::InlineAsmOperand::In { reg, ref value } => {
1309 let value = self.codegen_operand(bx, value);
1310 InlineAsmOperandRef::In { reg, value }
1311 }
1312 mir::InlineAsmOperand::Out { reg, late, ref place } => {
1313 let place = place.map(|place| self.codegen_place(bx, place.as_ref()));
1314 InlineAsmOperandRef::Out { reg, late, place }
1315 }
1316 mir::InlineAsmOperand::InOut { reg, late, ref in_value, ref out_place } => {
1317 let in_value = self.codegen_operand(bx, in_value);
1318 let out_place =
1319 out_place.map(|out_place| self.codegen_place(bx, out_place.as_ref()));
1320 InlineAsmOperandRef::InOut { reg, late, in_value, out_place }
1321 }
1322 mir::InlineAsmOperand::Const { ref value } => {
1323 let const_value = self.eval_mir_constant(value);
1324 let string = common::asm_const_to_str(
1325 bx.tcx(),
1326 span,
1327 const_value,
1328 bx.layout_of(value.ty()),
1329 );
1330 InlineAsmOperandRef::Const { string }
1331 }
1332 mir::InlineAsmOperand::SymFn { ref value } => {
1333 let const_ = self.monomorphize(value.const_);
1334 if let ty::FnDef(def_id, args) = *const_.ty().kind() {
1335 let instance = ty::Instance::resolve_for_fn_ptr(
1336 bx.tcx(),
1337 bx.typing_env(),
1338 def_id,
1339 args,
1340 )
1341 .unwrap();
1342 InlineAsmOperandRef::SymFn { instance }
1343 } else {
1344 span_bug!(span, "invalid type for asm sym (fn)");
1345 }
1346 }
1347 mir::InlineAsmOperand::SymStatic { def_id } => {
1348 InlineAsmOperandRef::SymStatic { def_id }
1349 }
1350 mir::InlineAsmOperand::Label { target_index } => {
1351 InlineAsmOperandRef::Label { label: self.llbb(targets[target_index]) }
1352 }
1353 })
1354 .collect();
1355
1356 helper.do_inlineasm(
1357 self,
1358 bx,
1359 template,
1360 &operands,
1361 options,
1362 line_spans,
1363 if asm_macro.diverges(options) { None } else { targets.get(0).copied() },
1364 unwind,
1365 instance,
1366 mergeable_succ,
1367 )
1368 }
1369
1370 pub(crate) fn codegen_block(&mut self, mut bb: mir::BasicBlock) {
1371 let llbb = match self.try_llbb(bb) {
1372 Some(llbb) => llbb,
1373 None => return,
1374 };
1375 let bx = &mut Bx::build(self.cx, llbb);
1376 let mir = self.mir;
1377
1378 loop {
1382 let data = &mir[bb];
1383
1384 debug!("codegen_block({:?}={:?})", bb, data);
1385
1386 for statement in &data.statements {
1387 self.codegen_statement(bx, statement);
1388 }
1389 self.codegen_stmt_debuginfos(bx, &data.after_last_stmt_debuginfos);
1390
1391 let merging_succ = self.codegen_terminator(bx, bb, data.terminator());
1392 if let MergingSucc::False = merging_succ {
1393 break;
1394 }
1395
1396 let mut successors = data.terminator().successors();
1404 let succ = successors.next().unwrap();
1405 assert!(matches!(self.cached_llbbs[succ], CachedLlbb::None));
1406 self.cached_llbbs[succ] = CachedLlbb::Skip;
1407 bb = succ;
1408 }
1409 }
1410
1411 pub(crate) fn codegen_block_as_unreachable(&mut self, bb: mir::BasicBlock) {
1412 let llbb = match self.try_llbb(bb) {
1413 Some(llbb) => llbb,
1414 None => return,
1415 };
1416 let bx = &mut Bx::build(self.cx, llbb);
1417 debug!("codegen_block_as_unreachable({:?})", bb);
1418 bx.unreachable();
1419 }
1420
1421 fn codegen_terminator(
1422 &mut self,
1423 bx: &mut Bx,
1424 bb: mir::BasicBlock,
1425 terminator: &'tcx mir::Terminator<'tcx>,
1426 ) -> MergingSucc {
1427 debug!("codegen_terminator: {:?}", terminator);
1428
1429 let helper = TerminatorCodegenHelper { bb, terminator };
1430
1431 let mergeable_succ = || {
1432 let mut successors = terminator.successors();
1435 if let Some(succ) = successors.next()
1436 && successors.next().is_none()
1437 && let &[succ_pred] = self.mir.basic_blocks.predecessors()[succ].as_slice()
1438 {
1439 assert_eq!(succ_pred, bb);
1442 true
1443 } else {
1444 false
1445 }
1446 };
1447
1448 self.set_debug_loc(bx, terminator.source_info);
1449 match terminator.kind {
1450 mir::TerminatorKind::UnwindResume => {
1451 self.codegen_resume_terminator(helper, bx);
1452 MergingSucc::False
1453 }
1454
1455 mir::TerminatorKind::UnwindTerminate(reason) => {
1456 self.codegen_terminate_terminator(helper, bx, terminator, reason);
1457 MergingSucc::False
1458 }
1459
1460 mir::TerminatorKind::Goto { target } => {
1461 helper.funclet_br(self, bx, target, mergeable_succ())
1462 }
1463
1464 mir::TerminatorKind::SwitchInt { ref discr, ref targets } => {
1465 self.codegen_switchint_terminator(helper, bx, discr, targets);
1466 MergingSucc::False
1467 }
1468
1469 mir::TerminatorKind::Return => {
1470 self.codegen_return_terminator(bx);
1471 MergingSucc::False
1472 }
1473
1474 mir::TerminatorKind::Unreachable => {
1475 bx.unreachable();
1476 MergingSucc::False
1477 }
1478
1479 mir::TerminatorKind::Drop { place, target, unwind, replace: _, drop, async_fut } => {
1480 assert!(
1481 async_fut.is_none() && drop.is_none(),
1482 "Async Drop must be expanded or reset to sync before codegen"
1483 );
1484 self.codegen_drop_terminator(
1485 helper,
1486 bx,
1487 &terminator.source_info,
1488 place,
1489 target,
1490 unwind,
1491 mergeable_succ(),
1492 )
1493 }
1494
1495 mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, unwind } => self
1496 .codegen_assert_terminator(
1497 helper,
1498 bx,
1499 terminator,
1500 cond,
1501 expected,
1502 msg,
1503 target,
1504 unwind,
1505 mergeable_succ(),
1506 ),
1507
1508 mir::TerminatorKind::Call {
1509 ref func,
1510 ref args,
1511 destination,
1512 target,
1513 unwind,
1514 call_source: _,
1515 fn_span,
1516 } => self.codegen_call_terminator(
1517 helper,
1518 bx,
1519 terminator,
1520 func,
1521 args,
1522 destination,
1523 target,
1524 unwind,
1525 fn_span,
1526 CallKind::Normal,
1527 mergeable_succ(),
1528 ),
1529 mir::TerminatorKind::TailCall { ref func, ref args, fn_span } => self
1530 .codegen_call_terminator(
1531 helper,
1532 bx,
1533 terminator,
1534 func,
1535 args,
1536 mir::Place::from(mir::RETURN_PLACE),
1537 None,
1538 mir::UnwindAction::Unreachable,
1539 fn_span,
1540 CallKind::Tail,
1541 mergeable_succ(),
1542 ),
1543 mir::TerminatorKind::CoroutineDrop | mir::TerminatorKind::Yield { .. } => {
1544 bug!("coroutine ops in codegen")
1545 }
1546 mir::TerminatorKind::FalseEdge { .. } | mir::TerminatorKind::FalseUnwind { .. } => {
1547 bug!("borrowck false edges in codegen")
1548 }
1549
1550 mir::TerminatorKind::InlineAsm {
1551 asm_macro,
1552 template,
1553 ref operands,
1554 options,
1555 line_spans,
1556 ref targets,
1557 unwind,
1558 } => self.codegen_asm_terminator(
1559 helper,
1560 bx,
1561 asm_macro,
1562 terminator,
1563 template,
1564 operands,
1565 options,
1566 line_spans,
1567 targets,
1568 unwind,
1569 self.instance,
1570 mergeable_succ(),
1571 ),
1572 }
1573 }
1574
1575 fn codegen_argument(
1576 &mut self,
1577 bx: &mut Bx,
1578 op: OperandRef<'tcx, Bx::Value>,
1579 llargs: &mut Vec<Bx::Value>,
1580 arg: &ArgAbi<'tcx, Ty<'tcx>>,
1581 lifetime_ends_after_call: &mut Vec<(Bx::Value, Size)>,
1582 ) {
1583 match arg.mode {
1584 PassMode::Ignore => return,
1585 PassMode::Cast { pad_i32: true, .. } => {
1586 llargs.push(bx.const_undef(bx.reg_backend_type(&Reg::i32())));
1588 }
1589 PassMode::Pair(..) => match op.val {
1590 Pair(a, b) => {
1591 llargs.push(a);
1592 llargs.push(b);
1593 return;
1594 }
1595 _ => bug!("codegen_argument: {:?} invalid for pair argument", op),
1596 },
1597 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => match op.val {
1598 Ref(PlaceValue { llval: a, llextra: Some(b), .. }) => {
1599 llargs.push(a);
1600 llargs.push(b);
1601 return;
1602 }
1603 _ => bug!("codegen_argument: {:?} invalid for unsized indirect argument", op),
1604 },
1605 _ => {}
1606 }
1607
1608 let (mut llval, align, by_ref) = match op.val {
1610 Immediate(_) | Pair(..) => match arg.mode {
1611 PassMode::Indirect { attrs, .. } => {
1612 let required_align = match attrs.pointee_align {
1616 Some(pointee_align) => cmp::max(pointee_align, arg.layout.align.abi),
1617 None => arg.layout.align.abi,
1618 };
1619 let scratch = PlaceValue::alloca(bx, arg.layout.size, required_align);
1620 bx.lifetime_start(scratch.llval, arg.layout.size);
1621 op.store_with_annotation(bx, scratch.with_type(arg.layout));
1622 lifetime_ends_after_call.push((scratch.llval, arg.layout.size));
1623 (scratch.llval, scratch.align, true)
1624 }
1625 PassMode::Cast { .. } => {
1626 let scratch = PlaceRef::alloca(bx, arg.layout);
1627 op.store_with_annotation(bx, scratch);
1628 (scratch.val.llval, scratch.val.align, true)
1629 }
1630 _ => (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false),
1631 },
1632 Ref(op_place_val) => match arg.mode {
1633 PassMode::Indirect { attrs, .. } => {
1634 let required_align = match attrs.pointee_align {
1635 Some(pointee_align) => cmp::max(pointee_align, arg.layout.align.abi),
1636 None => arg.layout.align.abi,
1637 };
1638 if op_place_val.align < required_align {
1639 let scratch = PlaceValue::alloca(bx, arg.layout.size, required_align);
1643 bx.lifetime_start(scratch.llval, arg.layout.size);
1644 bx.typed_place_copy(scratch, op_place_val, op.layout);
1645 lifetime_ends_after_call.push((scratch.llval, arg.layout.size));
1646 (scratch.llval, scratch.align, true)
1647 } else {
1648 (op_place_val.llval, op_place_val.align, true)
1649 }
1650 }
1651 _ => (op_place_val.llval, op_place_val.align, true),
1652 },
1653 ZeroSized => match arg.mode {
1654 PassMode::Indirect { on_stack, .. } => {
1655 if on_stack {
1656 bug!("ZST {op:?} passed on stack with abi {arg:?}");
1659 }
1660 let scratch = PlaceRef::alloca(bx, arg.layout);
1664 (scratch.val.llval, scratch.val.align, true)
1665 }
1666 _ => bug!("ZST {op:?} wasn't ignored, but was passed with abi {arg:?}"),
1667 },
1668 };
1669
1670 if by_ref && !arg.is_indirect() {
1671 if let PassMode::Cast { cast, pad_i32: _ } = &arg.mode {
1673 let scratch_size = cast.size(bx);
1677 let scratch_align = cast.align(bx);
1678 let copy_bytes = cmp::min(cast.unaligned_size(bx).bytes(), arg.layout.size.bytes());
1685 let llscratch = bx.alloca(scratch_size, scratch_align);
1687 bx.lifetime_start(llscratch, scratch_size);
1688 bx.memcpy(
1690 llscratch,
1691 scratch_align,
1692 llval,
1693 align,
1694 bx.const_usize(copy_bytes),
1695 MemFlags::empty(),
1696 None,
1697 );
1698 llval = load_cast(bx, cast, llscratch, scratch_align);
1700 bx.lifetime_end(llscratch, scratch_size);
1701 } else {
1702 llval = bx.load(bx.backend_type(arg.layout), llval, align);
1708 if let BackendRepr::Scalar(scalar) = arg.layout.backend_repr {
1709 if scalar.is_bool() {
1710 bx.range_metadata(llval, WrappingRange { start: 0, end: 1 });
1711 }
1712 llval = bx.to_immediate_scalar(llval, scalar);
1714 }
1715 }
1716 }
1717
1718 llargs.push(llval);
1719 }
1720
1721 fn codegen_arguments_untupled(
1722 &mut self,
1723 bx: &mut Bx,
1724 operand: &mir::Operand<'tcx>,
1725 llargs: &mut Vec<Bx::Value>,
1726 args: &[ArgAbi<'tcx, Ty<'tcx>>],
1727 lifetime_ends_after_call: &mut Vec<(Bx::Value, Size)>,
1728 ) -> usize {
1729 let tuple = self.codegen_operand(bx, operand);
1730
1731 if let Ref(place_val) = tuple.val {
1733 if place_val.llextra.is_some() {
1734 bug!("closure arguments must be sized");
1735 }
1736 let tuple_ptr = place_val.with_type(tuple.layout);
1737 for i in 0..tuple.layout.fields.count() {
1738 let field_ptr = tuple_ptr.project_field(bx, i);
1739 let field = bx.load_operand(field_ptr);
1740 self.codegen_argument(bx, field, llargs, &args[i], lifetime_ends_after_call);
1741 }
1742 } else {
1743 for i in 0..tuple.layout.fields.count() {
1745 let op = tuple.extract_field(self, bx, i);
1746 self.codegen_argument(bx, op, llargs, &args[i], lifetime_ends_after_call);
1747 }
1748 }
1749 tuple.layout.fields.count()
1750 }
1751
1752 pub(super) fn get_caller_location(
1753 &mut self,
1754 bx: &mut Bx,
1755 source_info: mir::SourceInfo,
1756 ) -> OperandRef<'tcx, Bx::Value> {
1757 self.mir.caller_location_span(source_info, self.caller_location, bx.tcx(), |span: Span| {
1758 let const_loc = bx.tcx().span_as_caller_location(span);
1759 OperandRef::from_const(bx, const_loc, bx.tcx().caller_location_ty())
1760 })
1761 }
1762
1763 fn get_personality_slot(&mut self, bx: &mut Bx) -> PlaceRef<'tcx, Bx::Value> {
1764 let cx = bx.cx();
1765 if let Some(slot) = self.personality_slot {
1766 slot
1767 } else {
1768 let layout = cx.layout_of(Ty::new_tup(
1769 cx.tcx(),
1770 &[Ty::new_mut_ptr(cx.tcx(), cx.tcx().types.u8), cx.tcx().types.i32],
1771 ));
1772 let slot = PlaceRef::alloca(bx, layout);
1773 self.personality_slot = Some(slot);
1774 slot
1775 }
1776 }
1777
1778 fn landing_pad_for(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
1781 if let Some(landing_pad) = self.landing_pads[bb] {
1782 return landing_pad;
1783 }
1784
1785 let landing_pad = self.landing_pad_for_uncached(bb);
1786 self.landing_pads[bb] = Some(landing_pad);
1787 landing_pad
1788 }
1789
1790 fn landing_pad_for_uncached(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
1792 let llbb = self.llbb(bb);
1793 if base::wants_new_eh_instructions(self.cx.sess()) {
1794 let cleanup_bb = Bx::append_block(self.cx, self.llfn, &format!("funclet_{bb:?}"));
1795 let mut cleanup_bx = Bx::build(self.cx, cleanup_bb);
1796 let funclet = cleanup_bx.cleanup_pad(None, &[]);
1797 cleanup_bx.br(llbb);
1798 self.funclets[bb] = Some(funclet);
1799 cleanup_bb
1800 } else {
1801 let cleanup_llbb = Bx::append_block(self.cx, self.llfn, "cleanup");
1802 let mut cleanup_bx = Bx::build(self.cx, cleanup_llbb);
1803
1804 let llpersonality = self.cx.eh_personality();
1805 let (exn0, exn1) = cleanup_bx.cleanup_landing_pad(llpersonality);
1806
1807 let slot = self.get_personality_slot(&mut cleanup_bx);
1808 slot.storage_live(&mut cleanup_bx);
1809 Pair(exn0, exn1).store(&mut cleanup_bx, slot);
1810
1811 cleanup_bx.br(llbb);
1812 cleanup_llbb
1813 }
1814 }
1815
1816 fn unreachable_block(&mut self) -> Bx::BasicBlock {
1817 self.unreachable_block.unwrap_or_else(|| {
1818 let llbb = Bx::append_block(self.cx, self.llfn, "unreachable");
1819 let mut bx = Bx::build(self.cx, llbb);
1820 bx.unreachable();
1821 self.unreachable_block = Some(llbb);
1822 llbb
1823 })
1824 }
1825
1826 fn terminate_block(&mut self, reason: UnwindTerminateReason) -> Bx::BasicBlock {
1827 if let Some((cached_bb, cached_reason)) = self.terminate_block
1828 && reason == cached_reason
1829 {
1830 return cached_bb;
1831 }
1832
1833 let funclet;
1834 let llbb;
1835 let mut bx;
1836 if base::wants_new_eh_instructions(self.cx.sess()) {
1837 llbb = Bx::append_block(self.cx, self.llfn, "cs_terminate");
1867 let cp_llbb = Bx::append_block(self.cx, self.llfn, "cp_terminate");
1868
1869 let mut cs_bx = Bx::build(self.cx, llbb);
1870 let cs = cs_bx.catch_switch(None, None, &[cp_llbb]);
1871
1872 bx = Bx::build(self.cx, cp_llbb);
1873 let null =
1874 bx.const_null(bx.type_ptr_ext(bx.cx().data_layout().instruction_address_space));
1875
1876 let args = if base::wants_msvc_seh(self.cx.sess()) {
1880 let adjectives = bx.const_i32(0x40);
1887 &[null, adjectives, null] as &[_]
1888 } else {
1889 &[null] as &[_]
1895 };
1896
1897 funclet = Some(bx.catch_pad(cs, args));
1898 } else {
1899 llbb = Bx::append_block(self.cx, self.llfn, "terminate");
1900 bx = Bx::build(self.cx, llbb);
1901
1902 let llpersonality = self.cx.eh_personality();
1903 bx.filter_landing_pad(llpersonality);
1904
1905 funclet = None;
1906 }
1907
1908 self.set_debug_loc(&mut bx, mir::SourceInfo::outermost(self.mir.span));
1909
1910 let (fn_abi, fn_ptr, instance) =
1911 common::build_langcall(&bx, self.mir.span, reason.lang_item());
1912 if is_call_from_compiler_builtins_to_upstream_monomorphization(bx.tcx(), instance) {
1913 bx.abort();
1914 } else {
1915 let fn_ty = bx.fn_decl_backend_type(fn_abi);
1916
1917 let llret = bx.call(fn_ty, None, Some(fn_abi), fn_ptr, &[], funclet.as_ref(), None);
1918 bx.apply_attrs_to_cleanup_callsite(llret);
1919 }
1920
1921 bx.unreachable();
1922
1923 self.terminate_block = Some((llbb, reason));
1924 llbb
1925 }
1926
1927 pub fn llbb(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
1932 self.try_llbb(bb).unwrap()
1933 }
1934
1935 pub(crate) fn try_llbb(&mut self, bb: mir::BasicBlock) -> Option<Bx::BasicBlock> {
1937 match self.cached_llbbs[bb] {
1938 CachedLlbb::None => {
1939 let llbb = Bx::append_block(self.cx, self.llfn, &format!("{bb:?}"));
1940 self.cached_llbbs[bb] = CachedLlbb::Some(llbb);
1941 Some(llbb)
1942 }
1943 CachedLlbb::Some(llbb) => Some(llbb),
1944 CachedLlbb::Skip => None,
1945 }
1946 }
1947
1948 fn make_return_dest(
1949 &mut self,
1950 bx: &mut Bx,
1951 dest: mir::Place<'tcx>,
1952 fn_ret: &ArgAbi<'tcx, Ty<'tcx>>,
1953 llargs: &mut Vec<Bx::Value>,
1954 ) -> ReturnDest<'tcx, Bx::Value> {
1955 if fn_ret.is_ignore() {
1957 return ReturnDest::Nothing;
1958 }
1959 let dest = if let Some(index) = dest.as_local() {
1960 match self.locals[index] {
1961 LocalRef::Place(dest) => dest,
1962 LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
1963 LocalRef::PendingOperand => {
1964 return if fn_ret.is_indirect() {
1967 let tmp = PlaceRef::alloca(bx, fn_ret.layout);
1970 tmp.storage_live(bx);
1971 llargs.push(tmp.val.llval);
1972 ReturnDest::IndirectOperand(tmp, index)
1973 } else {
1974 ReturnDest::DirectOperand(index)
1975 };
1976 }
1977 LocalRef::Operand(_) => {
1978 bug!("place local already assigned to");
1979 }
1980 }
1981 } else {
1982 self.codegen_place(bx, dest.as_ref())
1983 };
1984 if fn_ret.is_indirect() {
1985 if dest.val.align < dest.layout.align.abi {
1986 span_bug!(self.mir.span, "can't directly store to unaligned value");
1993 }
1994 llargs.push(dest.val.llval);
1995 ReturnDest::Nothing
1996 } else {
1997 ReturnDest::Store(dest)
1998 }
1999 }
2000
2001 fn store_return(
2003 &mut self,
2004 bx: &mut Bx,
2005 dest: ReturnDest<'tcx, Bx::Value>,
2006 ret_abi: &ArgAbi<'tcx, Ty<'tcx>>,
2007 llval: Bx::Value,
2008 ) {
2009 use self::ReturnDest::*;
2010
2011 match dest {
2012 Nothing => (),
2013 Store(dst) => bx.store_arg(ret_abi, llval, dst),
2014 IndirectOperand(tmp, index) => {
2015 let op = bx.load_operand(tmp);
2016 tmp.storage_dead(bx);
2017 self.overwrite_local(index, LocalRef::Operand(op));
2018 self.debug_introduce_local(bx, index);
2019 }
2020 DirectOperand(index) => {
2021 let op = if let PassMode::Cast { .. } = ret_abi.mode {
2023 let tmp = PlaceRef::alloca(bx, ret_abi.layout);
2024 tmp.storage_live(bx);
2025 bx.store_arg(ret_abi, llval, tmp);
2026 let op = bx.load_operand(tmp);
2027 tmp.storage_dead(bx);
2028 op
2029 } else {
2030 OperandRef::from_immediate_or_packed_pair(bx, llval, ret_abi.layout)
2031 };
2032 self.overwrite_local(index, LocalRef::Operand(op));
2033 self.debug_introduce_local(bx, index);
2034 }
2035 }
2036 }
2037}
2038
2039enum ReturnDest<'tcx, V> {
2040 Nothing,
2042 Store(PlaceRef<'tcx, V>),
2044 IndirectOperand(PlaceRef<'tcx, V>, mir::Local),
2046 DirectOperand(mir::Local),
2048}
2049
2050fn load_cast<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
2051 bx: &mut Bx,
2052 cast: &CastTarget,
2053 ptr: Bx::Value,
2054 align: Align,
2055) -> Bx::Value {
2056 let cast_ty = bx.cast_backend_type(cast);
2057 if let Some(offset_from_start) = cast.rest_offset {
2058 assert!(cast.prefix[1..].iter().all(|p| p.is_none()));
2059 assert_eq!(cast.rest.unit.size, cast.rest.total);
2060 let first_ty = bx.reg_backend_type(&cast.prefix[0].unwrap());
2061 let second_ty = bx.reg_backend_type(&cast.rest.unit);
2062 let first = bx.load(first_ty, ptr, align);
2063 let second_ptr = bx.inbounds_ptradd(ptr, bx.const_usize(offset_from_start.bytes()));
2064 let second = bx.load(second_ty, second_ptr, align.restrict_for_offset(offset_from_start));
2065 let res = bx.cx().const_poison(cast_ty);
2066 let res = bx.insert_value(res, first, 0);
2067 bx.insert_value(res, second, 1)
2068 } else {
2069 bx.load(cast_ty, ptr, align)
2070 }
2071}
2072
2073pub fn store_cast<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
2074 bx: &mut Bx,
2075 cast: &CastTarget,
2076 value: Bx::Value,
2077 ptr: Bx::Value,
2078 align: Align,
2079) {
2080 if let Some(offset_from_start) = cast.rest_offset {
2081 assert!(cast.prefix[1..].iter().all(|p| p.is_none()));
2082 assert_eq!(cast.rest.unit.size, cast.rest.total);
2083 assert!(cast.prefix[0].is_some());
2084 let first = bx.extract_value(value, 0);
2085 let second = bx.extract_value(value, 1);
2086 bx.store(first, ptr, align);
2087 let second_ptr = bx.inbounds_ptradd(ptr, bx.const_usize(offset_from_start.bytes()));
2088 bx.store(second, second_ptr, align.restrict_for_offset(offset_from_start));
2089 } else {
2090 bx.store(value, ptr, align);
2091 };
2092}