1use std::assert_matches::assert_matches;
2use std::ops::Deref;
3
4use rustc_abi::{Align, BackendRepr, Scalar, Size, WrappingRange};
5use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
6use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
7use rustc_middle::ty::{Instance, Ty};
8use rustc_session::config::OptLevel;
9use rustc_span::Span;
10use rustc_target::callconv::FnAbi;
11
12use super::abi::AbiBuilderMethods;
13use super::asm::AsmBuilderMethods;
14use super::consts::ConstCodegenMethods;
15use super::coverageinfo::CoverageInfoBuilderMethods;
16use super::debuginfo::DebugInfoBuilderMethods;
17use super::intrinsic::IntrinsicCallBuilderMethods;
18use super::misc::MiscCodegenMethods;
19use super::type_::{ArgAbiBuilderMethods, BaseTypeCodegenMethods, LayoutTypeCodegenMethods};
20use super::{CodegenMethods, StaticBuilderMethods};
21use crate::MemFlags;
22use crate::common::{
23 AtomicOrdering, AtomicRmwBinOp, IntPredicate, RealPredicate, SynchronizationScope, TypeKind,
24};
25use crate::mir::operand::{OperandRef, OperandValue};
26use crate::mir::place::{PlaceRef, PlaceValue};
27
28#[derive(Copy, Clone, Debug)]
29pub enum OverflowOp {
30 Add,
31 Sub,
32 Mul,
33}
34
35pub trait BuilderMethods<'a, 'tcx>:
36 Sized
37 + LayoutOf<'tcx, LayoutOfResult = TyAndLayout<'tcx>>
38 + FnAbiOf<'tcx, FnAbiOfResult = &'tcx FnAbi<'tcx, Ty<'tcx>>>
39 + Deref<Target = Self::CodegenCx>
40 + CoverageInfoBuilderMethods<'tcx>
41 + DebugInfoBuilderMethods
42 + ArgAbiBuilderMethods<'tcx>
43 + AbiBuilderMethods<'tcx>
44 + IntrinsicCallBuilderMethods<'tcx>
45 + AsmBuilderMethods<'tcx>
46 + StaticBuilderMethods
47{
48 type CodegenCx: CodegenMethods<
52 'tcx,
53 Value = Self::Value,
54 Metadata = Self::Metadata,
55 Function = Self::Function,
56 BasicBlock = Self::BasicBlock,
57 Type = Self::Type,
58 Funclet = Self::Funclet,
59 DIScope = Self::DIScope,
60 DILocation = Self::DILocation,
61 DIVariable = Self::DIVariable,
62 >;
63
64 fn build(cx: &'a Self::CodegenCx, llbb: Self::BasicBlock) -> Self;
65
66 fn cx(&self) -> &Self::CodegenCx;
67 fn llbb(&self) -> Self::BasicBlock;
68
69 fn set_span(&mut self, span: Span);
70
71 fn append_block(cx: &'a Self::CodegenCx, llfn: Self::Function, name: &str) -> Self::BasicBlock;
73
74 fn append_sibling_block(&mut self, name: &str) -> Self::BasicBlock;
75
76 fn switch_to_block(&mut self, llbb: Self::BasicBlock);
77
78 fn ret_void(&mut self);
79 fn ret(&mut self, v: Self::Value);
80 fn br(&mut self, dest: Self::BasicBlock);
81 fn cond_br(
82 &mut self,
83 cond: Self::Value,
84 then_llbb: Self::BasicBlock,
85 else_llbb: Self::BasicBlock,
86 );
87
88 fn cond_br_with_expect(
95 &mut self,
96 mut cond: Self::Value,
97 then_llbb: Self::BasicBlock,
98 else_llbb: Self::BasicBlock,
99 expect: Option<bool>,
100 ) {
101 if let Some(expect) = expect {
102 cond = self.expect(cond, expect);
103 }
104 self.cond_br(cond, then_llbb, else_llbb)
105 }
106
107 fn switch(
108 &mut self,
109 v: Self::Value,
110 else_llbb: Self::BasicBlock,
111 cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock)>,
112 );
113 fn invoke(
114 &mut self,
115 llty: Self::Type,
116 fn_attrs: Option<&CodegenFnAttrs>,
117 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
118 llfn: Self::Value,
119 args: &[Self::Value],
120 then: Self::BasicBlock,
121 catch: Self::BasicBlock,
122 funclet: Option<&Self::Funclet>,
123 instance: Option<Instance<'tcx>>,
124 ) -> Self::Value;
125 fn unreachable(&mut self);
126
127 fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
128 fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
129 fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
130 fn fadd_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
131 fn sub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
132 fn fsub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
133 fn fsub_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
134 fn fsub_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
135 fn mul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
136 fn fmul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
137 fn fmul_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
138 fn fmul_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
139 fn udiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
140 fn exactudiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
141 fn sdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
142 fn exactsdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
143 fn fdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
144 fn fdiv_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
145 fn fdiv_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
146 fn urem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
147 fn srem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
148 fn frem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
149 fn frem_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
150 fn frem_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
151 fn shl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
154 fn lshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
158 fn ashr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
162 fn unchecked_sadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
163 self.add(lhs, rhs)
164 }
165 fn unchecked_uadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
166 self.add(lhs, rhs)
167 }
168 fn unchecked_suadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
169 self.unchecked_sadd(lhs, rhs)
170 }
171 fn unchecked_ssub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
172 self.sub(lhs, rhs)
173 }
174 fn unchecked_usub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
175 self.sub(lhs, rhs)
176 }
177 fn unchecked_susub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
178 self.unchecked_ssub(lhs, rhs)
179 }
180 fn unchecked_smul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
181 self.mul(lhs, rhs)
182 }
183 fn unchecked_umul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
184 self.mul(lhs, rhs)
185 }
186 fn unchecked_sumul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
187 self.unchecked_smul(lhs, rhs)
190 }
191 fn and(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
192 fn or(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
193 fn or_disjoint(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
196 self.or(lhs, rhs)
197 }
198 fn xor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
199 fn neg(&mut self, v: Self::Value) -> Self::Value;
200 fn fneg(&mut self, v: Self::Value) -> Self::Value;
201 fn not(&mut self, v: Self::Value) -> Self::Value;
202
203 fn checked_binop(
204 &mut self,
205 oop: OverflowOp,
206 ty: Ty<'_>,
207 lhs: Self::Value,
208 rhs: Self::Value,
209 ) -> (Self::Value, Self::Value);
210
211 fn from_immediate(&mut self, val: Self::Value) -> Self::Value;
212 fn to_immediate(&mut self, val: Self::Value, layout: TyAndLayout<'_>) -> Self::Value {
213 if let BackendRepr::Scalar(scalar) = layout.backend_repr {
214 self.to_immediate_scalar(val, scalar)
215 } else {
216 val
217 }
218 }
219 fn to_immediate_scalar(&mut self, val: Self::Value, scalar: Scalar) -> Self::Value;
220
221 fn alloca(&mut self, size: Size, align: Align) -> Self::Value;
222 fn dynamic_alloca(&mut self, size: Self::Value, align: Align) -> Self::Value;
223
224 fn load(&mut self, ty: Self::Type, ptr: Self::Value, align: Align) -> Self::Value;
225 fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value;
226 fn atomic_load(
227 &mut self,
228 ty: Self::Type,
229 ptr: Self::Value,
230 order: AtomicOrdering,
231 size: Size,
232 ) -> Self::Value;
233 fn load_from_place(&mut self, ty: Self::Type, place: PlaceValue<Self::Value>) -> Self::Value {
234 assert_eq!(place.llextra, None);
235 self.load(ty, place.llval, place.align)
236 }
237 fn load_operand(&mut self, place: PlaceRef<'tcx, Self::Value>)
238 -> OperandRef<'tcx, Self::Value>;
239
240 fn write_operand_repeatedly(
242 &mut self,
243 elem: OperandRef<'tcx, Self::Value>,
244 count: u64,
245 dest: PlaceRef<'tcx, Self::Value>,
246 );
247
248 fn assume_integer_range(&mut self, imm: Self::Value, ty: Self::Type, range: WrappingRange) {
253 let WrappingRange { start, end } = range;
254
255 let shifted = if start == 0 {
259 imm
260 } else {
261 let low = self.const_uint_big(ty, start);
262 self.sub(imm, low)
263 };
264 let width = self.const_uint_big(ty, u128::wrapping_sub(end, start));
265 let cmp = self.icmp(IntPredicate::IntULE, shifted, width);
266 self.assume(cmp);
267 }
268
269 fn assume_nonnull(&mut self, val: Self::Value) {
273 let null = self.const_null(self.type_ptr());
278 let is_null = self.icmp(IntPredicate::IntNE, val, null);
279 self.assume(is_null);
280 }
281
282 fn range_metadata(&mut self, load: Self::Value, range: WrappingRange);
283 fn nonnull_metadata(&mut self, load: Self::Value);
284
285 fn store(&mut self, val: Self::Value, ptr: Self::Value, align: Align) -> Self::Value;
286 fn store_to_place(&mut self, val: Self::Value, place: PlaceValue<Self::Value>) -> Self::Value {
287 assert_eq!(place.llextra, None);
288 self.store(val, place.llval, place.align)
289 }
290 fn store_with_flags(
291 &mut self,
292 val: Self::Value,
293 ptr: Self::Value,
294 align: Align,
295 flags: MemFlags,
296 ) -> Self::Value;
297 fn store_to_place_with_flags(
298 &mut self,
299 val: Self::Value,
300 place: PlaceValue<Self::Value>,
301 flags: MemFlags,
302 ) -> Self::Value {
303 assert_eq!(place.llextra, None);
304 self.store_with_flags(val, place.llval, place.align, flags)
305 }
306 fn atomic_store(
307 &mut self,
308 val: Self::Value,
309 ptr: Self::Value,
310 order: AtomicOrdering,
311 size: Size,
312 );
313
314 fn gep(&mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value;
315 fn inbounds_gep(
316 &mut self,
317 ty: Self::Type,
318 ptr: Self::Value,
319 indices: &[Self::Value],
320 ) -> Self::Value;
321 fn ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value {
322 self.gep(self.cx().type_i8(), ptr, &[offset])
323 }
324 fn inbounds_ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value {
325 self.inbounds_gep(self.cx().type_i8(), ptr, &[offset])
326 }
327
328 fn trunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
329 fn sext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
330 fn fptoui_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
331 fn fptosi_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
332 fn fptoui(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
333 fn fptosi(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
334 fn uitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
335 fn sitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
336 fn fptrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
337 fn fpext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
338 fn ptrtoint(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
339 fn inttoptr(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
340 fn bitcast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
341 fn intcast(&mut self, val: Self::Value, dest_ty: Self::Type, is_signed: bool) -> Self::Value;
342 fn pointercast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
343
344 fn cast_float_to_int(
345 &mut self,
346 signed: bool,
347 x: Self::Value,
348 dest_ty: Self::Type,
349 ) -> Self::Value {
350 let in_ty = self.cx().val_ty(x);
351 let (float_ty, int_ty) = if self.cx().type_kind(dest_ty) == TypeKind::Vector
352 && self.cx().type_kind(in_ty) == TypeKind::Vector
353 {
354 (self.cx().element_type(in_ty), self.cx().element_type(dest_ty))
355 } else {
356 (in_ty, dest_ty)
357 };
358 assert_matches!(
359 self.cx().type_kind(float_ty),
360 TypeKind::Half | TypeKind::Float | TypeKind::Double | TypeKind::FP128
361 );
362 assert_eq!(self.cx().type_kind(int_ty), TypeKind::Integer);
363
364 if let Some(false) = self.cx().sess().opts.unstable_opts.saturating_float_casts {
365 return if signed { self.fptosi(x, dest_ty) } else { self.fptoui(x, dest_ty) };
366 }
367
368 if signed { self.fptosi_sat(x, dest_ty) } else { self.fptoui_sat(x, dest_ty) }
369 }
370
371 fn icmp(&mut self, op: IntPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
372 fn fcmp(&mut self, op: RealPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
373
374 fn memcpy(
375 &mut self,
376 dst: Self::Value,
377 dst_align: Align,
378 src: Self::Value,
379 src_align: Align,
380 size: Self::Value,
381 flags: MemFlags,
382 );
383 fn memmove(
384 &mut self,
385 dst: Self::Value,
386 dst_align: Align,
387 src: Self::Value,
388 src_align: Align,
389 size: Self::Value,
390 flags: MemFlags,
391 );
392 fn memset(
393 &mut self,
394 ptr: Self::Value,
395 fill_byte: Self::Value,
396 size: Self::Value,
397 align: Align,
398 flags: MemFlags,
399 );
400
401 fn typed_place_copy(
408 &mut self,
409 dst: PlaceValue<Self::Value>,
410 src: PlaceValue<Self::Value>,
411 layout: TyAndLayout<'tcx>,
412 ) {
413 self.typed_place_copy_with_flags(dst, src, layout, MemFlags::empty());
414 }
415
416 fn typed_place_copy_with_flags(
417 &mut self,
418 dst: PlaceValue<Self::Value>,
419 src: PlaceValue<Self::Value>,
420 layout: TyAndLayout<'tcx>,
421 flags: MemFlags,
422 ) {
423 assert!(layout.is_sized(), "cannot typed-copy an unsigned type");
424 assert!(src.llextra.is_none(), "cannot directly copy from unsized values");
425 assert!(dst.llextra.is_none(), "cannot directly copy into unsized values");
426 if flags.contains(MemFlags::NONTEMPORAL) {
427 let ty = self.backend_type(layout);
429 let val = self.load_from_place(ty, src);
430 self.store_to_place_with_flags(val, dst, flags);
431 } else if self.sess().opts.optimize == OptLevel::No && self.is_backend_immediate(layout) {
432 let temp = self.load_operand(src.with_type(layout));
435 temp.val.store_with_flags(self, dst.with_type(layout), flags);
436 } else if !layout.is_zst() {
437 let bytes = self.const_usize(layout.size.bytes());
438 self.memcpy(dst.llval, dst.align, src.llval, src.align, bytes, flags);
439 }
440 }
441
442 fn typed_place_swap(
450 &mut self,
451 left: PlaceValue<Self::Value>,
452 right: PlaceValue<Self::Value>,
453 layout: TyAndLayout<'tcx>,
454 ) {
455 let mut temp = self.load_operand(left.with_type(layout));
456 if let OperandValue::Ref(..) = temp.val {
457 let alloca = PlaceRef::alloca(self, layout);
459 self.typed_place_copy(alloca.val, left, layout);
460 temp = self.load_operand(alloca);
461 }
462 self.typed_place_copy(left, right, layout);
463 temp.val.store(self, right.with_type(layout));
464 }
465
466 fn select(
467 &mut self,
468 cond: Self::Value,
469 then_val: Self::Value,
470 else_val: Self::Value,
471 ) -> Self::Value;
472
473 fn va_arg(&mut self, list: Self::Value, ty: Self::Type) -> Self::Value;
474 fn extract_element(&mut self, vec: Self::Value, idx: Self::Value) -> Self::Value;
475 fn vector_splat(&mut self, num_elts: usize, elt: Self::Value) -> Self::Value;
476 fn extract_value(&mut self, agg_val: Self::Value, idx: u64) -> Self::Value;
477 fn insert_value(&mut self, agg_val: Self::Value, elt: Self::Value, idx: u64) -> Self::Value;
478
479 fn set_personality_fn(&mut self, personality: Self::Value);
480
481 fn cleanup_landing_pad(&mut self, pers_fn: Self::Value) -> (Self::Value, Self::Value);
483 fn filter_landing_pad(&mut self, pers_fn: Self::Value) -> (Self::Value, Self::Value);
484 fn resume(&mut self, exn0: Self::Value, exn1: Self::Value);
485
486 fn cleanup_pad(&mut self, parent: Option<Self::Value>, args: &[Self::Value]) -> Self::Funclet;
488 fn cleanup_ret(&mut self, funclet: &Self::Funclet, unwind: Option<Self::BasicBlock>);
489 fn catch_pad(&mut self, parent: Self::Value, args: &[Self::Value]) -> Self::Funclet;
490 fn catch_switch(
491 &mut self,
492 parent: Option<Self::Value>,
493 unwind: Option<Self::BasicBlock>,
494 handlers: &[Self::BasicBlock],
495 ) -> Self::Value;
496
497 fn atomic_cmpxchg(
498 &mut self,
499 dst: Self::Value,
500 cmp: Self::Value,
501 src: Self::Value,
502 order: AtomicOrdering,
503 failure_order: AtomicOrdering,
504 weak: bool,
505 ) -> (Self::Value, Self::Value);
506 fn atomic_rmw(
507 &mut self,
508 op: AtomicRmwBinOp,
509 dst: Self::Value,
510 src: Self::Value,
511 order: AtomicOrdering,
512 ) -> Self::Value;
513 fn atomic_fence(&mut self, order: AtomicOrdering, scope: SynchronizationScope);
514 fn set_invariant_load(&mut self, load: Self::Value);
515
516 fn lifetime_start(&mut self, ptr: Self::Value, size: Size);
518
519 fn lifetime_end(&mut self, ptr: Self::Value, size: Size);
521
522 fn call(
523 &mut self,
524 llty: Self::Type,
525 fn_attrs: Option<&CodegenFnAttrs>,
526 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
527 llfn: Self::Value,
528 args: &[Self::Value],
529 funclet: Option<&Self::Funclet>,
530 instance: Option<Instance<'tcx>>,
531 ) -> Self::Value;
532 fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
533
534 fn apply_attrs_to_cleanup_callsite(&mut self, llret: Self::Value);
535}