1use std::assert_matches::assert_matches;
2use std::ops::Deref;
3
4use rustc_abi::{Align, Scalar, Size, WrappingRange};
5use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
6use rustc_middle::mir;
7use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
8use rustc_middle::ty::{AtomicOrdering, Instance, Ty};
9use rustc_session::config::OptLevel;
10use rustc_span::Span;
11use rustc_target::callconv::FnAbi;
12
13use super::abi::AbiBuilderMethods;
14use super::asm::AsmBuilderMethods;
15use super::consts::ConstCodegenMethods;
16use super::coverageinfo::CoverageInfoBuilderMethods;
17use super::debuginfo::DebugInfoBuilderMethods;
18use super::intrinsic::IntrinsicCallBuilderMethods;
19use super::misc::MiscCodegenMethods;
20use super::type_::{ArgAbiBuilderMethods, BaseTypeCodegenMethods, LayoutTypeCodegenMethods};
21use super::{CodegenMethods, StaticBuilderMethods};
22use crate::MemFlags;
23use crate::common::{AtomicRmwBinOp, IntPredicate, RealPredicate, SynchronizationScope, TypeKind};
24use crate::mir::operand::{OperandRef, OperandValue};
25use crate::mir::place::{PlaceRef, PlaceValue};
26
27#[derive(Copy, Clone, Debug, PartialEq, Eq)]
28pub enum OverflowOp {
29 Add,
30 Sub,
31 Mul,
32}
33
34pub trait BuilderMethods<'a, 'tcx>:
35 Sized
36 + LayoutOf<'tcx, LayoutOfResult = TyAndLayout<'tcx>>
37 + FnAbiOf<'tcx, FnAbiOfResult = &'tcx FnAbi<'tcx, Ty<'tcx>>>
38 + Deref<Target = Self::CodegenCx>
39 + CoverageInfoBuilderMethods<'tcx>
40 + DebugInfoBuilderMethods<'tcx>
41 + ArgAbiBuilderMethods<'tcx>
42 + AbiBuilderMethods
43 + IntrinsicCallBuilderMethods<'tcx>
44 + AsmBuilderMethods<'tcx>
45 + StaticBuilderMethods
46{
47 type CodegenCx: CodegenMethods<
51 'tcx,
52 Value = Self::Value,
53 Metadata = Self::Metadata,
54 Function = Self::Function,
55 BasicBlock = Self::BasicBlock,
56 Type = Self::Type,
57 Funclet = Self::Funclet,
58 DIScope = Self::DIScope,
59 DILocation = Self::DILocation,
60 DIVariable = Self::DIVariable,
61 >;
62
63 fn build(cx: &'a Self::CodegenCx, llbb: Self::BasicBlock) -> Self;
64
65 fn cx(&self) -> &Self::CodegenCx;
66 fn llbb(&self) -> Self::BasicBlock;
67
68 fn set_span(&mut self, span: Span);
69
70 fn append_block(cx: &'a Self::CodegenCx, llfn: Self::Function, name: &str) -> Self::BasicBlock;
72
73 fn append_sibling_block(&mut self, name: &str) -> Self::BasicBlock;
74
75 fn switch_to_block(&mut self, llbb: Self::BasicBlock);
76
77 fn ret_void(&mut self);
78 fn ret(&mut self, v: Self::Value);
79 fn br(&mut self, dest: Self::BasicBlock);
80 fn cond_br(
81 &mut self,
82 cond: Self::Value,
83 then_llbb: Self::BasicBlock,
84 else_llbb: Self::BasicBlock,
85 );
86
87 fn cond_br_with_expect(
94 &mut self,
95 mut cond: Self::Value,
96 then_llbb: Self::BasicBlock,
97 else_llbb: Self::BasicBlock,
98 expect: Option<bool>,
99 ) {
100 if let Some(expect) = expect {
101 cond = self.expect(cond, expect);
102 }
103 self.cond_br(cond, then_llbb, else_llbb)
104 }
105
106 fn switch(
107 &mut self,
108 v: Self::Value,
109 else_llbb: Self::BasicBlock,
110 cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock)>,
111 );
112
113 fn switch_with_weights(
117 &mut self,
118 v: Self::Value,
119 else_llbb: Self::BasicBlock,
120 _else_is_cold: bool,
121 cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock, bool)>,
122 ) {
123 self.switch(v, else_llbb, cases.map(|(val, bb, _)| (val, bb)))
124 }
125
126 fn invoke(
127 &mut self,
128 llty: Self::Type,
129 fn_attrs: Option<&CodegenFnAttrs>,
130 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
131 llfn: Self::Value,
132 args: &[Self::Value],
133 then: Self::BasicBlock,
134 catch: Self::BasicBlock,
135 funclet: Option<&Self::Funclet>,
136 instance: Option<Instance<'tcx>>,
137 ) -> Self::Value;
138 fn unreachable(&mut self);
139
140 fn unreachable_nonterminator(&mut self) {
142 let const_true = self.cx().const_bool(true);
146 let poison_ptr = self.const_poison(self.cx().type_ptr());
147 self.store(const_true, poison_ptr, Align::ONE);
148 }
149
150 fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
151 fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
152 fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
153 fn fadd_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
154 fn sub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
155 fn fsub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
156 fn fsub_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
157 fn fsub_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
158 fn mul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
159 fn fmul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
160 fn fmul_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
161 fn fmul_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
162 fn udiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
163 fn exactudiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
164 fn sdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
165 fn exactsdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
166 fn fdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
167 fn fdiv_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
168 fn fdiv_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
169 fn urem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
170 fn srem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
171 fn frem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
172 fn frem_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
173 fn frem_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
174 fn shl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
177 fn lshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
181 fn ashr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
185 fn unchecked_sadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
186 self.add(lhs, rhs)
187 }
188 fn unchecked_uadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
189 self.add(lhs, rhs)
190 }
191 fn unchecked_suadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
192 self.unchecked_sadd(lhs, rhs)
193 }
194 fn unchecked_ssub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
195 self.sub(lhs, rhs)
196 }
197 fn unchecked_usub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
198 self.sub(lhs, rhs)
199 }
200 fn unchecked_susub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
201 self.unchecked_ssub(lhs, rhs)
202 }
203 fn unchecked_smul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
204 self.mul(lhs, rhs)
205 }
206 fn unchecked_umul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
207 self.mul(lhs, rhs)
208 }
209 fn unchecked_sumul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
210 self.unchecked_smul(lhs, rhs)
213 }
214 fn and(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
215 fn or(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
216 fn or_disjoint(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
219 self.or(lhs, rhs)
220 }
221 fn xor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
222 fn neg(&mut self, v: Self::Value) -> Self::Value;
223 fn fneg(&mut self, v: Self::Value) -> Self::Value;
224 fn not(&mut self, v: Self::Value) -> Self::Value;
225
226 fn checked_binop(
227 &mut self,
228 oop: OverflowOp,
229 ty: Ty<'tcx>,
230 lhs: Self::Value,
231 rhs: Self::Value,
232 ) -> (Self::Value, Self::Value);
233
234 fn from_immediate(&mut self, val: Self::Value) -> Self::Value;
235 fn to_immediate_scalar(&mut self, val: Self::Value, scalar: Scalar) -> Self::Value;
236
237 fn alloca(&mut self, size: Size, align: Align) -> Self::Value;
238 fn scalable_alloca(&mut self, elt: u64, align: Align, element_ty: Ty<'_>) -> Self::Value;
239
240 fn load(&mut self, ty: Self::Type, ptr: Self::Value, align: Align) -> Self::Value;
241 fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value;
242 fn atomic_load(
243 &mut self,
244 ty: Self::Type,
245 ptr: Self::Value,
246 order: AtomicOrdering,
247 size: Size,
248 ) -> Self::Value;
249 fn load_from_place(&mut self, ty: Self::Type, place: PlaceValue<Self::Value>) -> Self::Value {
250 assert_eq!(place.llextra, None);
251 self.load(ty, place.llval, place.align)
252 }
253 fn load_operand(&mut self, place: PlaceRef<'tcx, Self::Value>)
254 -> OperandRef<'tcx, Self::Value>;
255
256 fn write_operand_repeatedly(
258 &mut self,
259 elem: OperandRef<'tcx, Self::Value>,
260 count: u64,
261 dest: PlaceRef<'tcx, Self::Value>,
262 );
263
264 fn assume_integer_range(&mut self, imm: Self::Value, ty: Self::Type, range: WrappingRange) {
269 let WrappingRange { start, end } = range;
270
271 let shifted = if start == 0 {
275 imm
276 } else {
277 let low = self.const_uint_big(ty, start);
278 self.sub(imm, low)
279 };
280 let width = self.const_uint_big(ty, u128::wrapping_sub(end, start));
281 let cmp = self.icmp(IntPredicate::IntULE, shifted, width);
282 self.assume(cmp);
283 }
284
285 fn assume_nonnull(&mut self, val: Self::Value) {
289 let null = self.const_null(self.type_ptr());
294 let is_null = self.icmp(IntPredicate::IntNE, val, null);
295 self.assume(is_null);
296 }
297
298 fn range_metadata(&mut self, load: Self::Value, range: WrappingRange);
299 fn nonnull_metadata(&mut self, load: Self::Value);
300
301 fn store(&mut self, val: Self::Value, ptr: Self::Value, align: Align) -> Self::Value;
302 fn store_to_place(&mut self, val: Self::Value, place: PlaceValue<Self::Value>) -> Self::Value {
303 assert_eq!(place.llextra, None);
304 self.store(val, place.llval, place.align)
305 }
306 fn store_with_flags(
307 &mut self,
308 val: Self::Value,
309 ptr: Self::Value,
310 align: Align,
311 flags: MemFlags,
312 ) -> Self::Value;
313 fn store_to_place_with_flags(
314 &mut self,
315 val: Self::Value,
316 place: PlaceValue<Self::Value>,
317 flags: MemFlags,
318 ) -> Self::Value {
319 assert_eq!(place.llextra, None);
320 self.store_with_flags(val, place.llval, place.align, flags)
321 }
322 fn atomic_store(
323 &mut self,
324 val: Self::Value,
325 ptr: Self::Value,
326 order: AtomicOrdering,
327 size: Size,
328 );
329
330 fn gep(&mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value;
331 fn inbounds_gep(
332 &mut self,
333 ty: Self::Type,
334 ptr: Self::Value,
335 indices: &[Self::Value],
336 ) -> Self::Value;
337 fn inbounds_nuw_gep(
338 &mut self,
339 ty: Self::Type,
340 ptr: Self::Value,
341 indices: &[Self::Value],
342 ) -> Self::Value {
343 self.inbounds_gep(ty, ptr, indices)
344 }
345 fn ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value {
346 self.gep(self.cx().type_i8(), ptr, &[offset])
347 }
348 fn inbounds_ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value {
349 self.inbounds_gep(self.cx().type_i8(), ptr, &[offset])
350 }
351
352 fn trunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
353 fn unchecked_utrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
356 self.trunc(val, dest_ty)
357 }
358 fn unchecked_strunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
361 self.trunc(val, dest_ty)
362 }
363
364 fn sext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
365 fn fptoui_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
366 fn fptosi_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
367 fn fptoui(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
368 fn fptosi(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
369 fn uitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
370 fn sitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
371 fn fptrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
372 fn fpext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
373 fn ptrtoint(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
374 fn inttoptr(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
375 fn bitcast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
376 fn intcast(&mut self, val: Self::Value, dest_ty: Self::Type, is_signed: bool) -> Self::Value;
377 fn pointercast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
378
379 fn cast_float_to_int(
380 &mut self,
381 signed: bool,
382 x: Self::Value,
383 dest_ty: Self::Type,
384 ) -> Self::Value {
385 let in_ty = self.cx().val_ty(x);
386 let (float_ty, int_ty) = if self.cx().type_kind(dest_ty) == TypeKind::Vector
387 && self.cx().type_kind(in_ty) == TypeKind::Vector
388 {
389 (self.cx().element_type(in_ty), self.cx().element_type(dest_ty))
390 } else {
391 (in_ty, dest_ty)
392 };
393 assert_matches!(
394 self.cx().type_kind(float_ty),
395 TypeKind::Half | TypeKind::Float | TypeKind::Double | TypeKind::FP128
396 );
397 assert_eq!(self.cx().type_kind(int_ty), TypeKind::Integer);
398
399 if let Some(false) = self.cx().sess().opts.unstable_opts.saturating_float_casts {
400 return if signed { self.fptosi(x, dest_ty) } else { self.fptoui(x, dest_ty) };
401 }
402
403 if signed { self.fptosi_sat(x, dest_ty) } else { self.fptoui_sat(x, dest_ty) }
404 }
405
406 fn icmp(&mut self, op: IntPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
407 fn fcmp(&mut self, op: RealPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
408
409 fn three_way_compare(
411 &mut self,
412 ty: Ty<'tcx>,
413 lhs: Self::Value,
414 rhs: Self::Value,
415 ) -> Self::Value {
416 use std::cmp::Ordering;
421 let pred = |op| crate::base::bin_op_to_icmp_predicate(op, ty.is_signed());
422 if self.cx().sess().opts.optimize == OptLevel::No {
423 let is_gt = self.icmp(pred(mir::BinOp::Gt), lhs, rhs);
429 let gtext = self.zext(is_gt, self.type_i8());
430 let is_lt = self.icmp(pred(mir::BinOp::Lt), lhs, rhs);
431 let ltext = self.zext(is_lt, self.type_i8());
432 self.unchecked_ssub(gtext, ltext)
433 } else {
434 let is_lt = self.icmp(pred(mir::BinOp::Lt), lhs, rhs);
437 let is_ne = self.icmp(pred(mir::BinOp::Ne), lhs, rhs);
438 let ge = self.select(
439 is_ne,
440 self.cx().const_i8(Ordering::Greater as i8),
441 self.cx().const_i8(Ordering::Equal as i8),
442 );
443 self.select(is_lt, self.cx().const_i8(Ordering::Less as i8), ge)
444 }
445 }
446
447 fn memcpy(
448 &mut self,
449 dst: Self::Value,
450 dst_align: Align,
451 src: Self::Value,
452 src_align: Align,
453 size: Self::Value,
454 flags: MemFlags,
455 tt: Option<rustc_ast::expand::typetree::FncTree>,
456 );
457 fn memmove(
458 &mut self,
459 dst: Self::Value,
460 dst_align: Align,
461 src: Self::Value,
462 src_align: Align,
463 size: Self::Value,
464 flags: MemFlags,
465 );
466 fn memset(
467 &mut self,
468 ptr: Self::Value,
469 fill_byte: Self::Value,
470 size: Self::Value,
471 align: Align,
472 flags: MemFlags,
473 );
474
475 fn typed_place_copy(
482 &mut self,
483 dst: PlaceValue<Self::Value>,
484 src: PlaceValue<Self::Value>,
485 layout: TyAndLayout<'tcx>,
486 ) {
487 self.typed_place_copy_with_flags(dst, src, layout, MemFlags::empty());
488 }
489
490 fn typed_place_copy_with_flags(
491 &mut self,
492 dst: PlaceValue<Self::Value>,
493 src: PlaceValue<Self::Value>,
494 layout: TyAndLayout<'tcx>,
495 flags: MemFlags,
496 ) {
497 assert!(layout.is_sized(), "cannot typed-copy an unsigned type");
498 assert!(src.llextra.is_none(), "cannot directly copy from unsized values");
499 assert!(dst.llextra.is_none(), "cannot directly copy into unsized values");
500 if flags.contains(MemFlags::NONTEMPORAL) {
501 let ty = self.backend_type(layout);
503 let val = self.load_from_place(ty, src);
504 self.store_to_place_with_flags(val, dst, flags);
505 } else if self.sess().opts.optimize == OptLevel::No && self.is_backend_immediate(layout) {
506 let temp = self.load_operand(src.with_type(layout));
509 temp.val.store_with_flags(self, dst.with_type(layout), flags);
510 } else if !layout.is_zst() {
511 let bytes = self.const_usize(layout.size.bytes());
512 self.memcpy(dst.llval, dst.align, src.llval, src.align, bytes, flags, None);
513 }
514 }
515
516 fn typed_place_swap(
524 &mut self,
525 left: PlaceValue<Self::Value>,
526 right: PlaceValue<Self::Value>,
527 layout: TyAndLayout<'tcx>,
528 ) {
529 let mut temp = self.load_operand(left.with_type(layout));
530 if let OperandValue::Ref(..) = temp.val {
531 let alloca = PlaceRef::alloca(self, layout);
533 self.typed_place_copy(alloca.val, left, layout);
534 temp = self.load_operand(alloca);
535 }
536 self.typed_place_copy(left, right, layout);
537 temp.val.store(self, right.with_type(layout));
538 }
539
540 fn select(
541 &mut self,
542 cond: Self::Value,
543 then_val: Self::Value,
544 else_val: Self::Value,
545 ) -> Self::Value;
546
547 fn va_arg(&mut self, list: Self::Value, ty: Self::Type) -> Self::Value;
548 fn extract_element(&mut self, vec: Self::Value, idx: Self::Value) -> Self::Value;
549 fn vector_splat(&mut self, num_elts: usize, elt: Self::Value) -> Self::Value;
550 fn extract_value(&mut self, agg_val: Self::Value, idx: u64) -> Self::Value;
551 fn insert_value(&mut self, agg_val: Self::Value, elt: Self::Value, idx: u64) -> Self::Value;
552
553 fn set_personality_fn(&mut self, personality: Self::Function);
554
555 fn cleanup_landing_pad(&mut self, pers_fn: Self::Function) -> (Self::Value, Self::Value);
557 fn filter_landing_pad(&mut self, pers_fn: Self::Function);
558 fn resume(&mut self, exn0: Self::Value, exn1: Self::Value);
559
560 fn cleanup_pad(&mut self, parent: Option<Self::Value>, args: &[Self::Value]) -> Self::Funclet;
562 fn cleanup_ret(&mut self, funclet: &Self::Funclet, unwind: Option<Self::BasicBlock>);
563 fn catch_pad(&mut self, parent: Self::Value, args: &[Self::Value]) -> Self::Funclet;
564 fn catch_switch(
565 &mut self,
566 parent: Option<Self::Value>,
567 unwind: Option<Self::BasicBlock>,
568 handlers: &[Self::BasicBlock],
569 ) -> Self::Value;
570
571 fn atomic_cmpxchg(
572 &mut self,
573 dst: Self::Value,
574 cmp: Self::Value,
575 src: Self::Value,
576 order: AtomicOrdering,
577 failure_order: AtomicOrdering,
578 weak: bool,
579 ) -> (Self::Value, Self::Value);
580 fn atomic_rmw(
583 &mut self,
584 op: AtomicRmwBinOp,
585 dst: Self::Value,
586 src: Self::Value,
587 order: AtomicOrdering,
588 ret_ptr: bool,
589 ) -> Self::Value;
590 fn atomic_fence(&mut self, order: AtomicOrdering, scope: SynchronizationScope);
591 fn set_invariant_load(&mut self, load: Self::Value);
592
593 fn lifetime_start(&mut self, ptr: Self::Value, size: Size);
595
596 fn lifetime_end(&mut self, ptr: Self::Value, size: Size);
598
599 fn call(
621 &mut self,
622 llty: Self::Type,
623 fn_attrs: Option<&CodegenFnAttrs>,
624 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
625 fn_val: Self::Value,
626 args: &[Self::Value],
627 funclet: Option<&Self::Funclet>,
628 instance: Option<Instance<'tcx>>,
629 ) -> Self::Value;
630
631 fn tail_call(
632 &mut self,
633 llty: Self::Type,
634 fn_attrs: Option<&CodegenFnAttrs>,
635 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
636 llfn: Self::Value,
637 args: &[Self::Value],
638 funclet: Option<&Self::Funclet>,
639 instance: Option<Instance<'tcx>>,
640 );
641
642 fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
643
644 fn apply_attrs_to_cleanup_callsite(&mut self, llret: Self::Value);
645}