1use std::assert_matches::assert_matches;
2use std::ops::Deref;
3
4use rustc_abi::{Align, Scalar, Size, WrappingRange};
5use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
6use rustc_middle::mir;
7use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
8use rustc_middle::ty::{AtomicOrdering, Instance, Ty};
9use rustc_session::config::OptLevel;
10use rustc_span::Span;
11use rustc_target::callconv::FnAbi;
12
13use super::abi::AbiBuilderMethods;
14use super::asm::AsmBuilderMethods;
15use super::consts::ConstCodegenMethods;
16use super::coverageinfo::CoverageInfoBuilderMethods;
17use super::debuginfo::DebugInfoBuilderMethods;
18use super::intrinsic::IntrinsicCallBuilderMethods;
19use super::misc::MiscCodegenMethods;
20use super::type_::{ArgAbiBuilderMethods, BaseTypeCodegenMethods, LayoutTypeCodegenMethods};
21use super::{CodegenMethods, StaticBuilderMethods};
22use crate::MemFlags;
23use crate::common::{AtomicRmwBinOp, IntPredicate, RealPredicate, SynchronizationScope, TypeKind};
24use crate::mir::operand::{OperandRef, OperandValue};
25use crate::mir::place::{PlaceRef, PlaceValue};
26
27#[derive(Copy, Clone, Debug, PartialEq, Eq)]
28pub enum OverflowOp {
29 Add,
30 Sub,
31 Mul,
32}
33
34pub trait BuilderMethods<'a, 'tcx>:
35 Sized
36 + LayoutOf<'tcx, LayoutOfResult = TyAndLayout<'tcx>>
37 + FnAbiOf<'tcx, FnAbiOfResult = &'tcx FnAbi<'tcx, Ty<'tcx>>>
38 + Deref<Target = Self::CodegenCx>
39 + CoverageInfoBuilderMethods<'tcx>
40 + DebugInfoBuilderMethods
41 + ArgAbiBuilderMethods<'tcx>
42 + AbiBuilderMethods
43 + IntrinsicCallBuilderMethods<'tcx>
44 + AsmBuilderMethods<'tcx>
45 + StaticBuilderMethods
46{
47 type CodegenCx: CodegenMethods<
51 'tcx,
52 Value = Self::Value,
53 Metadata = Self::Metadata,
54 Function = Self::Function,
55 BasicBlock = Self::BasicBlock,
56 Type = Self::Type,
57 Funclet = Self::Funclet,
58 DIScope = Self::DIScope,
59 DILocation = Self::DILocation,
60 DIVariable = Self::DIVariable,
61 >;
62
63 fn build(cx: &'a Self::CodegenCx, llbb: Self::BasicBlock) -> Self;
64
65 fn cx(&self) -> &Self::CodegenCx;
66 fn llbb(&self) -> Self::BasicBlock;
67
68 fn set_span(&mut self, span: Span);
69
70 fn append_block(cx: &'a Self::CodegenCx, llfn: Self::Function, name: &str) -> Self::BasicBlock;
72
73 fn append_sibling_block(&mut self, name: &str) -> Self::BasicBlock;
74
75 fn switch_to_block(&mut self, llbb: Self::BasicBlock);
76
77 fn ret_void(&mut self);
78 fn ret(&mut self, v: Self::Value);
79 fn br(&mut self, dest: Self::BasicBlock);
80 fn cond_br(
81 &mut self,
82 cond: Self::Value,
83 then_llbb: Self::BasicBlock,
84 else_llbb: Self::BasicBlock,
85 );
86
87 fn cond_br_with_expect(
94 &mut self,
95 mut cond: Self::Value,
96 then_llbb: Self::BasicBlock,
97 else_llbb: Self::BasicBlock,
98 expect: Option<bool>,
99 ) {
100 if let Some(expect) = expect {
101 cond = self.expect(cond, expect);
102 }
103 self.cond_br(cond, then_llbb, else_llbb)
104 }
105
106 fn switch(
107 &mut self,
108 v: Self::Value,
109 else_llbb: Self::BasicBlock,
110 cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock)>,
111 );
112
113 fn switch_with_weights(
117 &mut self,
118 v: Self::Value,
119 else_llbb: Self::BasicBlock,
120 _else_is_cold: bool,
121 cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock, bool)>,
122 ) {
123 self.switch(v, else_llbb, cases.map(|(val, bb, _)| (val, bb)))
124 }
125
126 fn invoke(
127 &mut self,
128 llty: Self::Type,
129 fn_attrs: Option<&CodegenFnAttrs>,
130 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
131 llfn: Self::Value,
132 args: &[Self::Value],
133 then: Self::BasicBlock,
134 catch: Self::BasicBlock,
135 funclet: Option<&Self::Funclet>,
136 instance: Option<Instance<'tcx>>,
137 ) -> Self::Value;
138 fn unreachable(&mut self);
139
140 fn unreachable_nonterminator(&mut self) {
142 let const_true = self.cx().const_bool(true);
146 let poison_ptr = self.const_poison(self.cx().type_ptr());
147 self.store(const_true, poison_ptr, Align::ONE);
148 }
149
150 fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
151 fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
152 fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
153 fn fadd_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
154 fn sub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
155 fn fsub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
156 fn fsub_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
157 fn fsub_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
158 fn mul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
159 fn fmul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
160 fn fmul_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
161 fn fmul_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
162 fn udiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
163 fn exactudiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
164 fn sdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
165 fn exactsdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
166 fn fdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
167 fn fdiv_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
168 fn fdiv_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
169 fn urem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
170 fn srem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
171 fn frem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
172 fn frem_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
173 fn frem_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
174 fn shl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
177 fn lshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
181 fn ashr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
185 fn unchecked_sadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
186 self.add(lhs, rhs)
187 }
188 fn unchecked_uadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
189 self.add(lhs, rhs)
190 }
191 fn unchecked_suadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
192 self.unchecked_sadd(lhs, rhs)
193 }
194 fn unchecked_ssub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
195 self.sub(lhs, rhs)
196 }
197 fn unchecked_usub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
198 self.sub(lhs, rhs)
199 }
200 fn unchecked_susub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
201 self.unchecked_ssub(lhs, rhs)
202 }
203 fn unchecked_smul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
204 self.mul(lhs, rhs)
205 }
206 fn unchecked_umul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
207 self.mul(lhs, rhs)
208 }
209 fn unchecked_sumul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
210 self.unchecked_smul(lhs, rhs)
213 }
214 fn and(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
215 fn or(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
216 fn or_disjoint(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
219 self.or(lhs, rhs)
220 }
221 fn xor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
222 fn neg(&mut self, v: Self::Value) -> Self::Value;
223 fn fneg(&mut self, v: Self::Value) -> Self::Value;
224 fn not(&mut self, v: Self::Value) -> Self::Value;
225
226 fn checked_binop(
227 &mut self,
228 oop: OverflowOp,
229 ty: Ty<'tcx>,
230 lhs: Self::Value,
231 rhs: Self::Value,
232 ) -> (Self::Value, Self::Value);
233
234 fn from_immediate(&mut self, val: Self::Value) -> Self::Value;
235 fn to_immediate_scalar(&mut self, val: Self::Value, scalar: Scalar) -> Self::Value;
236
237 fn alloca(&mut self, size: Size, align: Align) -> Self::Value;
238
239 fn load(&mut self, ty: Self::Type, ptr: Self::Value, align: Align) -> Self::Value;
240 fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value;
241 fn atomic_load(
242 &mut self,
243 ty: Self::Type,
244 ptr: Self::Value,
245 order: AtomicOrdering,
246 size: Size,
247 ) -> Self::Value;
248 fn load_from_place(&mut self, ty: Self::Type, place: PlaceValue<Self::Value>) -> Self::Value {
249 assert_eq!(place.llextra, None);
250 self.load(ty, place.llval, place.align)
251 }
252 fn load_operand(&mut self, place: PlaceRef<'tcx, Self::Value>)
253 -> OperandRef<'tcx, Self::Value>;
254
255 fn write_operand_repeatedly(
257 &mut self,
258 elem: OperandRef<'tcx, Self::Value>,
259 count: u64,
260 dest: PlaceRef<'tcx, Self::Value>,
261 );
262
263 fn assume_integer_range(&mut self, imm: Self::Value, ty: Self::Type, range: WrappingRange) {
268 let WrappingRange { start, end } = range;
269
270 let shifted = if start == 0 {
274 imm
275 } else {
276 let low = self.const_uint_big(ty, start);
277 self.sub(imm, low)
278 };
279 let width = self.const_uint_big(ty, u128::wrapping_sub(end, start));
280 let cmp = self.icmp(IntPredicate::IntULE, shifted, width);
281 self.assume(cmp);
282 }
283
284 fn assume_nonnull(&mut self, val: Self::Value) {
288 let null = self.const_null(self.type_ptr());
293 let is_null = self.icmp(IntPredicate::IntNE, val, null);
294 self.assume(is_null);
295 }
296
297 fn range_metadata(&mut self, load: Self::Value, range: WrappingRange);
298 fn nonnull_metadata(&mut self, load: Self::Value);
299
300 fn store(&mut self, val: Self::Value, ptr: Self::Value, align: Align) -> Self::Value;
301 fn store_to_place(&mut self, val: Self::Value, place: PlaceValue<Self::Value>) -> Self::Value {
302 assert_eq!(place.llextra, None);
303 self.store(val, place.llval, place.align)
304 }
305 fn store_with_flags(
306 &mut self,
307 val: Self::Value,
308 ptr: Self::Value,
309 align: Align,
310 flags: MemFlags,
311 ) -> Self::Value;
312 fn store_to_place_with_flags(
313 &mut self,
314 val: Self::Value,
315 place: PlaceValue<Self::Value>,
316 flags: MemFlags,
317 ) -> Self::Value {
318 assert_eq!(place.llextra, None);
319 self.store_with_flags(val, place.llval, place.align, flags)
320 }
321 fn atomic_store(
322 &mut self,
323 val: Self::Value,
324 ptr: Self::Value,
325 order: AtomicOrdering,
326 size: Size,
327 );
328
329 fn gep(&mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value;
330 fn inbounds_gep(
331 &mut self,
332 ty: Self::Type,
333 ptr: Self::Value,
334 indices: &[Self::Value],
335 ) -> Self::Value;
336 fn inbounds_nuw_gep(
337 &mut self,
338 ty: Self::Type,
339 ptr: Self::Value,
340 indices: &[Self::Value],
341 ) -> Self::Value {
342 self.inbounds_gep(ty, ptr, indices)
343 }
344 fn ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value {
345 self.gep(self.cx().type_i8(), ptr, &[offset])
346 }
347 fn inbounds_ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value {
348 self.inbounds_gep(self.cx().type_i8(), ptr, &[offset])
349 }
350
351 fn trunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
352 fn unchecked_utrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
355 self.trunc(val, dest_ty)
356 }
357 fn unchecked_strunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
360 self.trunc(val, dest_ty)
361 }
362
363 fn sext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
364 fn fptoui_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
365 fn fptosi_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
366 fn fptoui(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
367 fn fptosi(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
368 fn uitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
369 fn sitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
370 fn fptrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
371 fn fpext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
372 fn ptrtoint(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
373 fn inttoptr(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
374 fn bitcast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
375 fn intcast(&mut self, val: Self::Value, dest_ty: Self::Type, is_signed: bool) -> Self::Value;
376 fn pointercast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
377
378 fn cast_float_to_int(
379 &mut self,
380 signed: bool,
381 x: Self::Value,
382 dest_ty: Self::Type,
383 ) -> Self::Value {
384 let in_ty = self.cx().val_ty(x);
385 let (float_ty, int_ty) = if self.cx().type_kind(dest_ty) == TypeKind::Vector
386 && self.cx().type_kind(in_ty) == TypeKind::Vector
387 {
388 (self.cx().element_type(in_ty), self.cx().element_type(dest_ty))
389 } else {
390 (in_ty, dest_ty)
391 };
392 assert_matches!(
393 self.cx().type_kind(float_ty),
394 TypeKind::Half | TypeKind::Float | TypeKind::Double | TypeKind::FP128
395 );
396 assert_eq!(self.cx().type_kind(int_ty), TypeKind::Integer);
397
398 if let Some(false) = self.cx().sess().opts.unstable_opts.saturating_float_casts {
399 return if signed { self.fptosi(x, dest_ty) } else { self.fptoui(x, dest_ty) };
400 }
401
402 if signed { self.fptosi_sat(x, dest_ty) } else { self.fptoui_sat(x, dest_ty) }
403 }
404
405 fn icmp(&mut self, op: IntPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
406 fn fcmp(&mut self, op: RealPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
407
408 fn three_way_compare(
410 &mut self,
411 ty: Ty<'tcx>,
412 lhs: Self::Value,
413 rhs: Self::Value,
414 ) -> Self::Value {
415 use std::cmp::Ordering;
420 let pred = |op| crate::base::bin_op_to_icmp_predicate(op, ty.is_signed());
421 if self.cx().sess().opts.optimize == OptLevel::No {
422 let is_gt = self.icmp(pred(mir::BinOp::Gt), lhs, rhs);
428 let gtext = self.zext(is_gt, self.type_i8());
429 let is_lt = self.icmp(pred(mir::BinOp::Lt), lhs, rhs);
430 let ltext = self.zext(is_lt, self.type_i8());
431 self.unchecked_ssub(gtext, ltext)
432 } else {
433 let is_lt = self.icmp(pred(mir::BinOp::Lt), lhs, rhs);
436 let is_ne = self.icmp(pred(mir::BinOp::Ne), lhs, rhs);
437 let ge = self.select(
438 is_ne,
439 self.cx().const_i8(Ordering::Greater as i8),
440 self.cx().const_i8(Ordering::Equal as i8),
441 );
442 self.select(is_lt, self.cx().const_i8(Ordering::Less as i8), ge)
443 }
444 }
445
446 fn memcpy(
447 &mut self,
448 dst: Self::Value,
449 dst_align: Align,
450 src: Self::Value,
451 src_align: Align,
452 size: Self::Value,
453 flags: MemFlags,
454 tt: Option<rustc_ast::expand::typetree::FncTree>,
455 );
456 fn memmove(
457 &mut self,
458 dst: Self::Value,
459 dst_align: Align,
460 src: Self::Value,
461 src_align: Align,
462 size: Self::Value,
463 flags: MemFlags,
464 );
465 fn memset(
466 &mut self,
467 ptr: Self::Value,
468 fill_byte: Self::Value,
469 size: Self::Value,
470 align: Align,
471 flags: MemFlags,
472 );
473
474 fn typed_place_copy(
481 &mut self,
482 dst: PlaceValue<Self::Value>,
483 src: PlaceValue<Self::Value>,
484 layout: TyAndLayout<'tcx>,
485 ) {
486 self.typed_place_copy_with_flags(dst, src, layout, MemFlags::empty());
487 }
488
489 fn typed_place_copy_with_flags(
490 &mut self,
491 dst: PlaceValue<Self::Value>,
492 src: PlaceValue<Self::Value>,
493 layout: TyAndLayout<'tcx>,
494 flags: MemFlags,
495 ) {
496 assert!(layout.is_sized(), "cannot typed-copy an unsigned type");
497 assert!(src.llextra.is_none(), "cannot directly copy from unsized values");
498 assert!(dst.llextra.is_none(), "cannot directly copy into unsized values");
499 if flags.contains(MemFlags::NONTEMPORAL) {
500 let ty = self.backend_type(layout);
502 let val = self.load_from_place(ty, src);
503 self.store_to_place_with_flags(val, dst, flags);
504 } else if self.sess().opts.optimize == OptLevel::No && self.is_backend_immediate(layout) {
505 let temp = self.load_operand(src.with_type(layout));
508 temp.val.store_with_flags(self, dst.with_type(layout), flags);
509 } else if !layout.is_zst() {
510 let bytes = self.const_usize(layout.size.bytes());
511 self.memcpy(dst.llval, dst.align, src.llval, src.align, bytes, flags, None);
512 }
513 }
514
515 fn typed_place_swap(
523 &mut self,
524 left: PlaceValue<Self::Value>,
525 right: PlaceValue<Self::Value>,
526 layout: TyAndLayout<'tcx>,
527 ) {
528 let mut temp = self.load_operand(left.with_type(layout));
529 if let OperandValue::Ref(..) = temp.val {
530 let alloca = PlaceRef::alloca(self, layout);
532 self.typed_place_copy(alloca.val, left, layout);
533 temp = self.load_operand(alloca);
534 }
535 self.typed_place_copy(left, right, layout);
536 temp.val.store(self, right.with_type(layout));
537 }
538
539 fn select(
540 &mut self,
541 cond: Self::Value,
542 then_val: Self::Value,
543 else_val: Self::Value,
544 ) -> Self::Value;
545
546 fn va_arg(&mut self, list: Self::Value, ty: Self::Type) -> Self::Value;
547 fn extract_element(&mut self, vec: Self::Value, idx: Self::Value) -> Self::Value;
548 fn vector_splat(&mut self, num_elts: usize, elt: Self::Value) -> Self::Value;
549 fn extract_value(&mut self, agg_val: Self::Value, idx: u64) -> Self::Value;
550 fn insert_value(&mut self, agg_val: Self::Value, elt: Self::Value, idx: u64) -> Self::Value;
551
552 fn set_personality_fn(&mut self, personality: Self::Function);
553
554 fn cleanup_landing_pad(&mut self, pers_fn: Self::Function) -> (Self::Value, Self::Value);
556 fn filter_landing_pad(&mut self, pers_fn: Self::Function);
557 fn resume(&mut self, exn0: Self::Value, exn1: Self::Value);
558
559 fn cleanup_pad(&mut self, parent: Option<Self::Value>, args: &[Self::Value]) -> Self::Funclet;
561 fn cleanup_ret(&mut self, funclet: &Self::Funclet, unwind: Option<Self::BasicBlock>);
562 fn catch_pad(&mut self, parent: Self::Value, args: &[Self::Value]) -> Self::Funclet;
563 fn catch_switch(
564 &mut self,
565 parent: Option<Self::Value>,
566 unwind: Option<Self::BasicBlock>,
567 handlers: &[Self::BasicBlock],
568 ) -> Self::Value;
569
570 fn atomic_cmpxchg(
571 &mut self,
572 dst: Self::Value,
573 cmp: Self::Value,
574 src: Self::Value,
575 order: AtomicOrdering,
576 failure_order: AtomicOrdering,
577 weak: bool,
578 ) -> (Self::Value, Self::Value);
579 fn atomic_rmw(
582 &mut self,
583 op: AtomicRmwBinOp,
584 dst: Self::Value,
585 src: Self::Value,
586 order: AtomicOrdering,
587 ret_ptr: bool,
588 ) -> Self::Value;
589 fn atomic_fence(&mut self, order: AtomicOrdering, scope: SynchronizationScope);
590 fn set_invariant_load(&mut self, load: Self::Value);
591
592 fn lifetime_start(&mut self, ptr: Self::Value, size: Size);
594
595 fn lifetime_end(&mut self, ptr: Self::Value, size: Size);
597
598 fn call(
620 &mut self,
621 llty: Self::Type,
622 fn_attrs: Option<&CodegenFnAttrs>,
623 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
624 fn_val: Self::Value,
625 args: &[Self::Value],
626 funclet: Option<&Self::Funclet>,
627 instance: Option<Instance<'tcx>>,
628 ) -> Self::Value;
629
630 fn tail_call(
631 &mut self,
632 llty: Self::Type,
633 fn_attrs: Option<&CodegenFnAttrs>,
634 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
635 llfn: Self::Value,
636 args: &[Self::Value],
637 funclet: Option<&Self::Funclet>,
638 instance: Option<Instance<'tcx>>,
639 );
640
641 fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
642
643 fn apply_attrs_to_cleanup_callsite(&mut self, llret: Self::Value);
644}