1use std::borrow::{Borrow, Cow};
2use std::ops::Deref;
3use std::{iter, ptr};
4
5pub(crate) mod autodiff;
6
7use libc::{c_char, c_uint};
8use rustc_abi as abi;
9use rustc_abi::{Align, Size, WrappingRange};
10use rustc_codegen_ssa::MemFlags;
11use rustc_codegen_ssa::common::{IntPredicate, RealPredicate, SynchronizationScope, TypeKind};
12use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
13use rustc_codegen_ssa::mir::place::PlaceRef;
14use rustc_codegen_ssa::traits::*;
15use rustc_data_structures::small_c_str::SmallCStr;
16use rustc_hir::def_id::DefId;
17use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
18use rustc_middle::ty::layout::{
19 FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasTypingEnv, LayoutError, LayoutOfHelpers,
20 TyAndLayout,
21};
22use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
23use rustc_sanitizers::{cfi, kcfi};
24use rustc_session::config::OptLevel;
25use rustc_span::Span;
26use rustc_target::callconv::FnAbi;
27use rustc_target::spec::{HasTargetSpec, SanitizerSet, Target};
28use smallvec::SmallVec;
29use tracing::{debug, instrument};
30
31use crate::abi::FnAbiLlvmExt;
32use crate::attributes;
33use crate::common::Funclet;
34use crate::context::{CodegenCx, SimpleCx};
35use crate::llvm::{self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, False, True};
36use crate::type_::Type;
37use crate::type_of::LayoutLlvmExt;
38use crate::value::Value;
39
40#[must_use]
41pub(crate) struct GenericBuilder<'a, 'll, CX: Borrow<SimpleCx<'ll>>> {
42 pub llbuilder: &'ll mut llvm::Builder<'ll>,
43 pub cx: &'a CX,
44}
45
46pub(crate) type SBuilder<'a, 'll> = GenericBuilder<'a, 'll, SimpleCx<'ll>>;
47pub(crate) type Builder<'a, 'll, 'tcx> = GenericBuilder<'a, 'll, CodegenCx<'ll, 'tcx>>;
48
49impl<'a, 'll, CX: Borrow<SimpleCx<'ll>>> Drop for GenericBuilder<'a, 'll, CX> {
50 fn drop(&mut self) {
51 unsafe {
52 llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
53 }
54 }
55}
56
57impl<'a, 'll> SBuilder<'a, 'll> {
58 fn call(
59 &mut self,
60 llty: &'ll Type,
61 llfn: &'ll Value,
62 args: &[&'ll Value],
63 funclet: Option<&Funclet<'ll>>,
64 ) -> &'ll Value {
65 debug!("call {:?} with args ({:?})", llfn, args);
66
67 let args = self.check_call("call", llty, llfn, args);
68 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
69 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
70 if let Some(funclet_bundle) = funclet_bundle {
71 bundles.push(funclet_bundle);
72 }
73
74 let call = unsafe {
75 llvm::LLVMBuildCallWithOperandBundles(
76 self.llbuilder,
77 llty,
78 llfn,
79 args.as_ptr() as *const &llvm::Value,
80 args.len() as c_uint,
81 bundles.as_ptr(),
82 bundles.len() as c_uint,
83 c"".as_ptr(),
84 )
85 };
86 call
87 }
88
89 fn with_scx(scx: &'a SimpleCx<'ll>) -> Self {
90 let llbuilder = unsafe { llvm::LLVMCreateBuilderInContext(scx.llcx) };
92 SBuilder { llbuilder, cx: scx }
93 }
94}
95impl<'a, 'll, CX: Borrow<SimpleCx<'ll>>> GenericBuilder<'a, 'll, CX> {
96 pub(crate) fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
97 unsafe { llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED) }
98 }
99
100 fn ret_void(&mut self) {
101 unsafe {
102 llvm::LLVMBuildRetVoid(self.llbuilder);
103 }
104 }
105
106 fn ret(&mut self, v: &'ll Value) {
107 unsafe {
108 llvm::LLVMBuildRet(self.llbuilder, v);
109 }
110 }
111}
112impl<'a, 'll> SBuilder<'a, 'll> {
113 fn build(cx: &'a SimpleCx<'ll>, llbb: &'ll BasicBlock) -> SBuilder<'a, 'll> {
114 let bx = SBuilder::with_scx(cx);
115 unsafe {
116 llvm::LLVMPositionBuilderAtEnd(bx.llbuilder, llbb);
117 }
118 bx
119 }
120
121 fn check_call<'b>(
122 &mut self,
123 typ: &str,
124 fn_ty: &'ll Type,
125 llfn: &'ll Value,
126 args: &'b [&'ll Value],
127 ) -> Cow<'b, [&'ll Value]> {
128 assert!(
129 self.cx.type_kind(fn_ty) == TypeKind::Function,
130 "builder::{typ} not passed a function, but {fn_ty:?}"
131 );
132
133 let param_tys = self.cx.func_params_types(fn_ty);
134
135 let all_args_match = iter::zip(¶m_tys, args.iter().map(|&v| self.cx.val_ty(v)))
136 .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
137
138 if all_args_match {
139 return Cow::Borrowed(args);
140 }
141
142 let casted_args: Vec<_> = iter::zip(param_tys, args)
143 .enumerate()
144 .map(|(i, (expected_ty, &actual_val))| {
145 let actual_ty = self.cx.val_ty(actual_val);
146 if expected_ty != actual_ty {
147 debug!(
148 "type mismatch in function call of {:?}. \
149 Expected {:?} for param {}, got {:?}; injecting bitcast",
150 llfn, expected_ty, i, actual_ty
151 );
152 self.bitcast(actual_val, expected_ty)
153 } else {
154 actual_val
155 }
156 })
157 .collect();
158
159 Cow::Owned(casted_args)
160 }
161}
162
163const UNNAMED: *const c_char = c"".as_ptr();
167
168impl<'ll, 'tcx> BackendTypes for Builder<'_, 'll, 'tcx> {
169 type Value = <CodegenCx<'ll, 'tcx> as BackendTypes>::Value;
170 type Metadata = <CodegenCx<'ll, 'tcx> as BackendTypes>::Metadata;
171 type Function = <CodegenCx<'ll, 'tcx> as BackendTypes>::Function;
172 type BasicBlock = <CodegenCx<'ll, 'tcx> as BackendTypes>::BasicBlock;
173 type Type = <CodegenCx<'ll, 'tcx> as BackendTypes>::Type;
174 type Funclet = <CodegenCx<'ll, 'tcx> as BackendTypes>::Funclet;
175
176 type DIScope = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIScope;
177 type DILocation = <CodegenCx<'ll, 'tcx> as BackendTypes>::DILocation;
178 type DIVariable = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIVariable;
179}
180
181impl abi::HasDataLayout for Builder<'_, '_, '_> {
182 fn data_layout(&self) -> &abi::TargetDataLayout {
183 self.cx.data_layout()
184 }
185}
186
187impl<'tcx> ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> {
188 #[inline]
189 fn tcx(&self) -> TyCtxt<'tcx> {
190 self.cx.tcx
191 }
192}
193
194impl<'tcx> ty::layout::HasTypingEnv<'tcx> for Builder<'_, '_, 'tcx> {
195 fn typing_env(&self) -> ty::TypingEnv<'tcx> {
196 self.cx.typing_env()
197 }
198}
199
200impl HasTargetSpec for Builder<'_, '_, '_> {
201 #[inline]
202 fn target_spec(&self) -> &Target {
203 self.cx.target_spec()
204 }
205}
206
207impl<'tcx> LayoutOfHelpers<'tcx> for Builder<'_, '_, 'tcx> {
208 #[inline]
209 fn handle_layout_err(&self, err: LayoutError<'tcx>, span: Span, ty: Ty<'tcx>) -> ! {
210 self.cx.handle_layout_err(err, span, ty)
211 }
212}
213
214impl<'tcx> FnAbiOfHelpers<'tcx> for Builder<'_, '_, 'tcx> {
215 #[inline]
216 fn handle_fn_abi_err(
217 &self,
218 err: FnAbiError<'tcx>,
219 span: Span,
220 fn_abi_request: FnAbiRequest<'tcx>,
221 ) -> ! {
222 self.cx.handle_fn_abi_err(err, span, fn_abi_request)
223 }
224}
225
226impl<'ll, 'tcx> Deref for Builder<'_, 'll, 'tcx> {
227 type Target = CodegenCx<'ll, 'tcx>;
228
229 #[inline]
230 fn deref(&self) -> &Self::Target {
231 self.cx
232 }
233}
234
235macro_rules! math_builder_methods {
236 ($($name:ident($($arg:ident),*) => $llvm_capi:ident),+ $(,)?) => {
237 $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value {
238 unsafe {
239 llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED)
240 }
241 })+
242 }
243}
244
245macro_rules! set_math_builder_methods {
246 ($($name:ident($($arg:ident),*) => ($llvm_capi:ident, $llvm_set_math:ident)),+ $(,)?) => {
247 $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value {
248 unsafe {
249 let instr = llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED);
250 llvm::$llvm_set_math(instr);
251 instr
252 }
253 })+
254 }
255}
256
257impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
258 type CodegenCx = CodegenCx<'ll, 'tcx>;
259
260 fn build(cx: &'a CodegenCx<'ll, 'tcx>, llbb: &'ll BasicBlock) -> Self {
261 let bx = Builder::with_cx(cx);
262 unsafe {
263 llvm::LLVMPositionBuilderAtEnd(bx.llbuilder, llbb);
264 }
265 bx
266 }
267
268 fn cx(&self) -> &CodegenCx<'ll, 'tcx> {
269 self.cx
270 }
271
272 fn llbb(&self) -> &'ll BasicBlock {
273 unsafe { llvm::LLVMGetInsertBlock(self.llbuilder) }
274 }
275
276 fn set_span(&mut self, _span: Span) {}
277
278 fn append_block(cx: &'a CodegenCx<'ll, 'tcx>, llfn: &'ll Value, name: &str) -> &'ll BasicBlock {
279 unsafe {
280 let name = SmallCStr::new(name);
281 llvm::LLVMAppendBasicBlockInContext(cx.llcx, llfn, name.as_ptr())
282 }
283 }
284
285 fn append_sibling_block(&mut self, name: &str) -> &'ll BasicBlock {
286 Self::append_block(self.cx, self.llfn(), name)
287 }
288
289 fn switch_to_block(&mut self, llbb: Self::BasicBlock) {
290 *self = Self::build(self.cx, llbb)
291 }
292
293 fn ret_void(&mut self) {
294 unsafe {
295 llvm::LLVMBuildRetVoid(self.llbuilder);
296 }
297 }
298
299 fn ret(&mut self, v: &'ll Value) {
300 unsafe {
301 llvm::LLVMBuildRet(self.llbuilder, v);
302 }
303 }
304
305 fn br(&mut self, dest: &'ll BasicBlock) {
306 unsafe {
307 llvm::LLVMBuildBr(self.llbuilder, dest);
308 }
309 }
310
311 fn cond_br(
312 &mut self,
313 cond: &'ll Value,
314 then_llbb: &'ll BasicBlock,
315 else_llbb: &'ll BasicBlock,
316 ) {
317 unsafe {
318 llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
319 }
320 }
321
322 fn switch(
323 &mut self,
324 v: &'ll Value,
325 else_llbb: &'ll BasicBlock,
326 cases: impl ExactSizeIterator<Item = (u128, &'ll BasicBlock)>,
327 ) {
328 let switch =
329 unsafe { llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, cases.len() as c_uint) };
330 for (on_val, dest) in cases {
331 let on_val = self.const_uint_big(self.val_ty(v), on_val);
332 unsafe { llvm::LLVMAddCase(switch, on_val, dest) }
333 }
334 }
335
336 fn invoke(
337 &mut self,
338 llty: &'ll Type,
339 fn_attrs: Option<&CodegenFnAttrs>,
340 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
341 llfn: &'ll Value,
342 args: &[&'ll Value],
343 then: &'ll BasicBlock,
344 catch: &'ll BasicBlock,
345 funclet: Option<&Funclet<'ll>>,
346 instance: Option<Instance<'tcx>>,
347 ) -> &'ll Value {
348 debug!("invoke {:?} with args ({:?})", llfn, args);
349
350 let args = self.check_call("invoke", llty, llfn, args);
351 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
352 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
353 if let Some(funclet_bundle) = funclet_bundle {
354 bundles.push(funclet_bundle);
355 }
356
357 self.cfi_type_test(fn_attrs, fn_abi, instance, llfn);
359
360 let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn);
362 if let Some(kcfi_bundle) = kcfi_bundle.as_deref() {
363 bundles.push(kcfi_bundle);
364 }
365
366 let invoke = unsafe {
367 llvm::LLVMBuildInvokeWithOperandBundles(
368 self.llbuilder,
369 llty,
370 llfn,
371 args.as_ptr(),
372 args.len() as c_uint,
373 then,
374 catch,
375 bundles.as_ptr(),
376 bundles.len() as c_uint,
377 UNNAMED,
378 )
379 };
380 if let Some(fn_abi) = fn_abi {
381 fn_abi.apply_attrs_callsite(self, invoke);
382 }
383 invoke
384 }
385
386 fn unreachable(&mut self) {
387 unsafe {
388 llvm::LLVMBuildUnreachable(self.llbuilder);
389 }
390 }
391
392 math_builder_methods! {
393 add(a, b) => LLVMBuildAdd,
394 fadd(a, b) => LLVMBuildFAdd,
395 sub(a, b) => LLVMBuildSub,
396 fsub(a, b) => LLVMBuildFSub,
397 mul(a, b) => LLVMBuildMul,
398 fmul(a, b) => LLVMBuildFMul,
399 udiv(a, b) => LLVMBuildUDiv,
400 exactudiv(a, b) => LLVMBuildExactUDiv,
401 sdiv(a, b) => LLVMBuildSDiv,
402 exactsdiv(a, b) => LLVMBuildExactSDiv,
403 fdiv(a, b) => LLVMBuildFDiv,
404 urem(a, b) => LLVMBuildURem,
405 srem(a, b) => LLVMBuildSRem,
406 frem(a, b) => LLVMBuildFRem,
407 shl(a, b) => LLVMBuildShl,
408 lshr(a, b) => LLVMBuildLShr,
409 ashr(a, b) => LLVMBuildAShr,
410 and(a, b) => LLVMBuildAnd,
411 or(a, b) => LLVMBuildOr,
412 xor(a, b) => LLVMBuildXor,
413 neg(x) => LLVMBuildNeg,
414 fneg(x) => LLVMBuildFNeg,
415 not(x) => LLVMBuildNot,
416 unchecked_sadd(x, y) => LLVMBuildNSWAdd,
417 unchecked_uadd(x, y) => LLVMBuildNUWAdd,
418 unchecked_ssub(x, y) => LLVMBuildNSWSub,
419 unchecked_usub(x, y) => LLVMBuildNUWSub,
420 unchecked_smul(x, y) => LLVMBuildNSWMul,
421 unchecked_umul(x, y) => LLVMBuildNUWMul,
422 }
423
424 fn unchecked_suadd(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
425 unsafe {
426 let add = llvm::LLVMBuildAdd(self.llbuilder, a, b, UNNAMED);
427 if llvm::LLVMIsAInstruction(add).is_some() {
428 llvm::LLVMSetNUW(add, True);
429 llvm::LLVMSetNSW(add, True);
430 }
431 add
432 }
433 }
434 fn unchecked_susub(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
435 unsafe {
436 let sub = llvm::LLVMBuildSub(self.llbuilder, a, b, UNNAMED);
437 if llvm::LLVMIsAInstruction(sub).is_some() {
438 llvm::LLVMSetNUW(sub, True);
439 llvm::LLVMSetNSW(sub, True);
440 }
441 sub
442 }
443 }
444 fn unchecked_sumul(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
445 unsafe {
446 let mul = llvm::LLVMBuildMul(self.llbuilder, a, b, UNNAMED);
447 if llvm::LLVMIsAInstruction(mul).is_some() {
448 llvm::LLVMSetNUW(mul, True);
449 llvm::LLVMSetNSW(mul, True);
450 }
451 mul
452 }
453 }
454
455 fn or_disjoint(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
456 unsafe {
457 let or = llvm::LLVMBuildOr(self.llbuilder, a, b, UNNAMED);
458
459 if llvm::LLVMIsAInstruction(or).is_some() {
463 llvm::LLVMSetIsDisjoint(or, True);
464 }
465 or
466 }
467 }
468
469 set_math_builder_methods! {
470 fadd_fast(x, y) => (LLVMBuildFAdd, LLVMRustSetFastMath),
471 fsub_fast(x, y) => (LLVMBuildFSub, LLVMRustSetFastMath),
472 fmul_fast(x, y) => (LLVMBuildFMul, LLVMRustSetFastMath),
473 fdiv_fast(x, y) => (LLVMBuildFDiv, LLVMRustSetFastMath),
474 frem_fast(x, y) => (LLVMBuildFRem, LLVMRustSetFastMath),
475 fadd_algebraic(x, y) => (LLVMBuildFAdd, LLVMRustSetAlgebraicMath),
476 fsub_algebraic(x, y) => (LLVMBuildFSub, LLVMRustSetAlgebraicMath),
477 fmul_algebraic(x, y) => (LLVMBuildFMul, LLVMRustSetAlgebraicMath),
478 fdiv_algebraic(x, y) => (LLVMBuildFDiv, LLVMRustSetAlgebraicMath),
479 frem_algebraic(x, y) => (LLVMBuildFRem, LLVMRustSetAlgebraicMath),
480 }
481
482 fn checked_binop(
483 &mut self,
484 oop: OverflowOp,
485 ty: Ty<'_>,
486 lhs: Self::Value,
487 rhs: Self::Value,
488 ) -> (Self::Value, Self::Value) {
489 use rustc_middle::ty::IntTy::*;
490 use rustc_middle::ty::UintTy::*;
491 use rustc_middle::ty::{Int, Uint};
492
493 let new_kind = match ty.kind() {
494 Int(t @ Isize) => Int(t.normalize(self.tcx.sess.target.pointer_width)),
495 Uint(t @ Usize) => Uint(t.normalize(self.tcx.sess.target.pointer_width)),
496 t @ (Uint(_) | Int(_)) => *t,
497 _ => panic!("tried to get overflow intrinsic for op applied to non-int type"),
498 };
499
500 let name = match oop {
501 OverflowOp::Add => match new_kind {
502 Int(I8) => "llvm.sadd.with.overflow.i8",
503 Int(I16) => "llvm.sadd.with.overflow.i16",
504 Int(I32) => "llvm.sadd.with.overflow.i32",
505 Int(I64) => "llvm.sadd.with.overflow.i64",
506 Int(I128) => "llvm.sadd.with.overflow.i128",
507
508 Uint(U8) => "llvm.uadd.with.overflow.i8",
509 Uint(U16) => "llvm.uadd.with.overflow.i16",
510 Uint(U32) => "llvm.uadd.with.overflow.i32",
511 Uint(U64) => "llvm.uadd.with.overflow.i64",
512 Uint(U128) => "llvm.uadd.with.overflow.i128",
513
514 _ => unreachable!(),
515 },
516 OverflowOp::Sub => match new_kind {
517 Int(I8) => "llvm.ssub.with.overflow.i8",
518 Int(I16) => "llvm.ssub.with.overflow.i16",
519 Int(I32) => "llvm.ssub.with.overflow.i32",
520 Int(I64) => "llvm.ssub.with.overflow.i64",
521 Int(I128) => "llvm.ssub.with.overflow.i128",
522
523 Uint(_) => {
524 let sub = self.sub(lhs, rhs);
528 let cmp = self.icmp(IntPredicate::IntULT, lhs, rhs);
529 return (sub, cmp);
530 }
531
532 _ => unreachable!(),
533 },
534 OverflowOp::Mul => match new_kind {
535 Int(I8) => "llvm.smul.with.overflow.i8",
536 Int(I16) => "llvm.smul.with.overflow.i16",
537 Int(I32) => "llvm.smul.with.overflow.i32",
538 Int(I64) => "llvm.smul.with.overflow.i64",
539 Int(I128) => "llvm.smul.with.overflow.i128",
540
541 Uint(U8) => "llvm.umul.with.overflow.i8",
542 Uint(U16) => "llvm.umul.with.overflow.i16",
543 Uint(U32) => "llvm.umul.with.overflow.i32",
544 Uint(U64) => "llvm.umul.with.overflow.i64",
545 Uint(U128) => "llvm.umul.with.overflow.i128",
546
547 _ => unreachable!(),
548 },
549 };
550
551 let res = self.call_intrinsic(name, &[lhs, rhs]);
552 (self.extract_value(res, 0), self.extract_value(res, 1))
553 }
554
555 fn from_immediate(&mut self, val: Self::Value) -> Self::Value {
556 if self.cx().val_ty(val) == self.cx().type_i1() {
557 self.zext(val, self.cx().type_i8())
558 } else {
559 val
560 }
561 }
562
563 fn to_immediate_scalar(&mut self, val: Self::Value, scalar: abi::Scalar) -> Self::Value {
564 if scalar.is_bool() {
565 return self.trunc(val, self.cx().type_i1());
566 }
567 val
568 }
569
570 fn alloca(&mut self, size: Size, align: Align) -> &'ll Value {
571 let mut bx = Builder::with_cx(self.cx);
572 bx.position_at_start(unsafe { llvm::LLVMGetFirstBasicBlock(self.llfn()) });
573 let ty = self.cx().type_array(self.cx().type_i8(), size.bytes());
574 unsafe {
575 let alloca = llvm::LLVMBuildAlloca(bx.llbuilder, ty, UNNAMED);
576 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
577 llvm::LLVMBuildPointerCast(bx.llbuilder, alloca, self.cx().type_ptr(), UNNAMED)
579 }
580 }
581
582 fn dynamic_alloca(&mut self, size: &'ll Value, align: Align) -> &'ll Value {
583 unsafe {
584 let alloca =
585 llvm::LLVMBuildArrayAlloca(self.llbuilder, self.cx().type_i8(), size, UNNAMED);
586 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
587 llvm::LLVMBuildPointerCast(self.llbuilder, alloca, self.cx().type_ptr(), UNNAMED)
589 }
590 }
591
592 fn load(&mut self, ty: &'ll Type, ptr: &'ll Value, align: Align) -> &'ll Value {
593 unsafe {
594 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
595 llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
596 load
597 }
598 }
599
600 fn volatile_load(&mut self, ty: &'ll Type, ptr: &'ll Value) -> &'ll Value {
601 unsafe {
602 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
603 llvm::LLVMSetVolatile(load, llvm::True);
604 load
605 }
606 }
607
608 fn atomic_load(
609 &mut self,
610 ty: &'ll Type,
611 ptr: &'ll Value,
612 order: rustc_codegen_ssa::common::AtomicOrdering,
613 size: Size,
614 ) -> &'ll Value {
615 unsafe {
616 let load = llvm::LLVMRustBuildAtomicLoad(
617 self.llbuilder,
618 ty,
619 ptr,
620 UNNAMED,
621 AtomicOrdering::from_generic(order),
622 );
623 llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
625 load
626 }
627 }
628
629 #[instrument(level = "trace", skip(self))]
630 fn load_operand(&mut self, place: PlaceRef<'tcx, &'ll Value>) -> OperandRef<'tcx, &'ll Value> {
631 if place.layout.is_unsized() {
632 let tail = self.tcx.struct_tail_for_codegen(place.layout.ty, self.typing_env());
633 if matches!(tail.kind(), ty::Foreign(..)) {
634 panic!("unsized locals must not be `extern` types");
638 }
639 }
640 assert_eq!(place.val.llextra.is_some(), place.layout.is_unsized());
641
642 if place.layout.is_zst() {
643 return OperandRef::zero_sized(place.layout);
644 }
645
646 #[instrument(level = "trace", skip(bx))]
647 fn scalar_load_metadata<'a, 'll, 'tcx>(
648 bx: &mut Builder<'a, 'll, 'tcx>,
649 load: &'ll Value,
650 scalar: abi::Scalar,
651 layout: TyAndLayout<'tcx>,
652 offset: Size,
653 ) {
654 if bx.cx.sess().opts.optimize == OptLevel::No {
655 return;
657 }
658
659 if !scalar.is_uninit_valid() {
660 bx.noundef_metadata(load);
661 }
662
663 match scalar.primitive() {
664 abi::Primitive::Int(..) => {
665 if !scalar.is_always_valid(bx) {
666 bx.range_metadata(load, scalar.valid_range(bx));
667 }
668 }
669 abi::Primitive::Pointer(_) => {
670 if !scalar.valid_range(bx).contains(0) {
671 bx.nonnull_metadata(load);
672 }
673
674 if let Some(pointee) = layout.pointee_info_at(bx, offset) {
675 if let Some(_) = pointee.safe {
676 bx.align_metadata(load, pointee.align);
677 }
678 }
679 }
680 abi::Primitive::Float(_) => {}
681 }
682 }
683
684 let val = if let Some(_) = place.val.llextra {
685 OperandValue::Ref(place.val)
687 } else if place.layout.is_llvm_immediate() {
688 let mut const_llval = None;
689 let llty = place.layout.llvm_type(self);
690 unsafe {
691 if let Some(global) = llvm::LLVMIsAGlobalVariable(place.val.llval) {
692 if llvm::LLVMIsGlobalConstant(global) == llvm::True {
693 if let Some(init) = llvm::LLVMGetInitializer(global) {
694 if self.val_ty(init) == llty {
695 const_llval = Some(init);
696 }
697 }
698 }
699 }
700 }
701 let llval = const_llval.unwrap_or_else(|| {
702 let load = self.load(llty, place.val.llval, place.val.align);
703 if let abi::BackendRepr::Scalar(scalar) = place.layout.backend_repr {
704 scalar_load_metadata(self, load, scalar, place.layout, Size::ZERO);
705 }
706 load
707 });
708 OperandValue::Immediate(self.to_immediate(llval, place.layout))
709 } else if let abi::BackendRepr::ScalarPair(a, b) = place.layout.backend_repr {
710 let b_offset = a.size(self).align_to(b.align(self).abi);
711
712 let mut load = |i, scalar: abi::Scalar, layout, align, offset| {
713 let llptr = if i == 0 {
714 place.val.llval
715 } else {
716 self.inbounds_ptradd(place.val.llval, self.const_usize(b_offset.bytes()))
717 };
718 let llty = place.layout.scalar_pair_element_llvm_type(self, i, false);
719 let load = self.load(llty, llptr, align);
720 scalar_load_metadata(self, load, scalar, layout, offset);
721 self.to_immediate_scalar(load, scalar)
722 };
723
724 OperandValue::Pair(
725 load(0, a, place.layout, place.val.align, Size::ZERO),
726 load(1, b, place.layout, place.val.align.restrict_for_offset(b_offset), b_offset),
727 )
728 } else {
729 OperandValue::Ref(place.val)
730 };
731
732 OperandRef { val, layout: place.layout }
733 }
734
735 fn write_operand_repeatedly(
736 &mut self,
737 cg_elem: OperandRef<'tcx, &'ll Value>,
738 count: u64,
739 dest: PlaceRef<'tcx, &'ll Value>,
740 ) {
741 let zero = self.const_usize(0);
742 let count = self.const_usize(count);
743
744 let header_bb = self.append_sibling_block("repeat_loop_header");
745 let body_bb = self.append_sibling_block("repeat_loop_body");
746 let next_bb = self.append_sibling_block("repeat_loop_next");
747
748 self.br(header_bb);
749
750 let mut header_bx = Self::build(self.cx, header_bb);
751 let i = header_bx.phi(self.val_ty(zero), &[zero], &[self.llbb()]);
752
753 let keep_going = header_bx.icmp(IntPredicate::IntULT, i, count);
754 header_bx.cond_br(keep_going, body_bb, next_bb);
755
756 let mut body_bx = Self::build(self.cx, body_bb);
757 let dest_elem = dest.project_index(&mut body_bx, i);
758 cg_elem.val.store(&mut body_bx, dest_elem);
759
760 let next = body_bx.unchecked_uadd(i, self.const_usize(1));
761 body_bx.br(header_bb);
762 header_bx.add_incoming_to_phi(i, next, body_bb);
763
764 *self = Self::build(self.cx, next_bb);
765 }
766
767 fn range_metadata(&mut self, load: &'ll Value, range: WrappingRange) {
768 if self.cx.sess().opts.optimize == OptLevel::No {
769 return;
771 }
772
773 unsafe {
774 let llty = self.cx.val_ty(load);
775 let md = [
776 llvm::LLVMValueAsMetadata(self.cx.const_uint_big(llty, range.start)),
777 llvm::LLVMValueAsMetadata(self.cx.const_uint_big(llty, range.end.wrapping_add(1))),
778 ];
779 let md = llvm::LLVMMDNodeInContext2(self.cx.llcx, md.as_ptr(), md.len());
780 self.set_metadata(load, llvm::MD_range, md);
781 }
782 }
783
784 fn nonnull_metadata(&mut self, load: &'ll Value) {
785 unsafe {
786 let md = llvm::LLVMMDNodeInContext2(self.cx.llcx, ptr::null(), 0);
787 self.set_metadata(load, llvm::MD_nonnull, md);
788 }
789 }
790
791 fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
792 self.store_with_flags(val, ptr, align, MemFlags::empty())
793 }
794
795 fn store_with_flags(
796 &mut self,
797 val: &'ll Value,
798 ptr: &'ll Value,
799 align: Align,
800 flags: MemFlags,
801 ) -> &'ll Value {
802 debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
803 assert_eq!(self.cx.type_kind(self.cx.val_ty(ptr)), TypeKind::Pointer);
804 unsafe {
805 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
806 let align =
807 if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() as c_uint };
808 llvm::LLVMSetAlignment(store, align);
809 if flags.contains(MemFlags::VOLATILE) {
810 llvm::LLVMSetVolatile(store, llvm::True);
811 }
812 if flags.contains(MemFlags::NONTEMPORAL) {
813 const WELL_BEHAVED_NONTEMPORAL_ARCHS: &[&str] =
826 &["aarch64", "arm", "riscv32", "riscv64"];
827
828 let use_nontemporal =
829 WELL_BEHAVED_NONTEMPORAL_ARCHS.contains(&&*self.cx.tcx.sess.target.arch);
830 if use_nontemporal {
831 let one = llvm::LLVMValueAsMetadata(self.cx.const_i32(1));
836 let md = llvm::LLVMMDNodeInContext2(self.cx.llcx, &one, 1);
837 self.set_metadata(store, llvm::MD_nontemporal, md);
838 }
839 }
840 store
841 }
842 }
843
844 fn atomic_store(
845 &mut self,
846 val: &'ll Value,
847 ptr: &'ll Value,
848 order: rustc_codegen_ssa::common::AtomicOrdering,
849 size: Size,
850 ) {
851 debug!("Store {:?} -> {:?}", val, ptr);
852 assert_eq!(self.cx.type_kind(self.cx.val_ty(ptr)), TypeKind::Pointer);
853 unsafe {
854 let store = llvm::LLVMRustBuildAtomicStore(
855 self.llbuilder,
856 val,
857 ptr,
858 AtomicOrdering::from_generic(order),
859 );
860 llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
862 }
863 }
864
865 fn gep(&mut self, ty: &'ll Type, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
866 unsafe {
867 llvm::LLVMBuildGEP2(
868 self.llbuilder,
869 ty,
870 ptr,
871 indices.as_ptr(),
872 indices.len() as c_uint,
873 UNNAMED,
874 )
875 }
876 }
877
878 fn inbounds_gep(
879 &mut self,
880 ty: &'ll Type,
881 ptr: &'ll Value,
882 indices: &[&'ll Value],
883 ) -> &'ll Value {
884 unsafe {
885 llvm::LLVMBuildInBoundsGEP2(
886 self.llbuilder,
887 ty,
888 ptr,
889 indices.as_ptr(),
890 indices.len() as c_uint,
891 UNNAMED,
892 )
893 }
894 }
895
896 fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
898 unsafe { llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
899 }
900
901 fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
902 unsafe { llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, UNNAMED) }
903 }
904
905 fn fptoui_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
906 self.fptoint_sat(false, val, dest_ty)
907 }
908
909 fn fptosi_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
910 self.fptoint_sat(true, val, dest_ty)
911 }
912
913 fn fptoui(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
914 if self.sess().target.is_like_wasm {
929 let src_ty = self.cx.val_ty(val);
930 if self.cx.type_kind(src_ty) != TypeKind::Vector {
931 let float_width = self.cx.float_width(src_ty);
932 let int_width = self.cx.int_width(dest_ty);
933 let name = match (int_width, float_width) {
934 (32, 32) => Some("llvm.wasm.trunc.unsigned.i32.f32"),
935 (32, 64) => Some("llvm.wasm.trunc.unsigned.i32.f64"),
936 (64, 32) => Some("llvm.wasm.trunc.unsigned.i64.f32"),
937 (64, 64) => Some("llvm.wasm.trunc.unsigned.i64.f64"),
938 _ => None,
939 };
940 if let Some(name) = name {
941 return self.call_intrinsic(name, &[val]);
942 }
943 }
944 }
945 unsafe { llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, UNNAMED) }
946 }
947
948 fn fptosi(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
949 if self.sess().target.is_like_wasm {
951 let src_ty = self.cx.val_ty(val);
952 if self.cx.type_kind(src_ty) != TypeKind::Vector {
953 let float_width = self.cx.float_width(src_ty);
954 let int_width = self.cx.int_width(dest_ty);
955 let name = match (int_width, float_width) {
956 (32, 32) => Some("llvm.wasm.trunc.signed.i32.f32"),
957 (32, 64) => Some("llvm.wasm.trunc.signed.i32.f64"),
958 (64, 32) => Some("llvm.wasm.trunc.signed.i64.f32"),
959 (64, 64) => Some("llvm.wasm.trunc.signed.i64.f64"),
960 _ => None,
961 };
962 if let Some(name) = name {
963 return self.call_intrinsic(name, &[val]);
964 }
965 }
966 }
967 unsafe { llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty, UNNAMED) }
968 }
969
970 fn uitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
971 unsafe { llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
972 }
973
974 fn sitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
975 unsafe { llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
976 }
977
978 fn fptrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
979 unsafe { llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
980 }
981
982 fn fpext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
983 unsafe { llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, UNNAMED) }
984 }
985
986 fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
987 unsafe { llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, UNNAMED) }
988 }
989
990 fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
991 unsafe { llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, UNNAMED) }
992 }
993
994 fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
995 unsafe { llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED) }
996 }
997
998 fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
999 unsafe {
1000 llvm::LLVMBuildIntCast2(
1001 self.llbuilder,
1002 val,
1003 dest_ty,
1004 if is_signed { True } else { False },
1005 UNNAMED,
1006 )
1007 }
1008 }
1009
1010 fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1011 unsafe { llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, UNNAMED) }
1012 }
1013
1014 fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1016 let op = llvm::IntPredicate::from_generic(op);
1017 unsafe { llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1018 }
1019
1020 fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1021 let op = llvm::RealPredicate::from_generic(op);
1022 unsafe { llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1023 }
1024
1025 fn memcpy(
1027 &mut self,
1028 dst: &'ll Value,
1029 dst_align: Align,
1030 src: &'ll Value,
1031 src_align: Align,
1032 size: &'ll Value,
1033 flags: MemFlags,
1034 ) {
1035 assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memcpy not supported");
1036 let size = self.intcast(size, self.type_isize(), false);
1037 let is_volatile = flags.contains(MemFlags::VOLATILE);
1038 unsafe {
1039 llvm::LLVMRustBuildMemCpy(
1040 self.llbuilder,
1041 dst,
1042 dst_align.bytes() as c_uint,
1043 src,
1044 src_align.bytes() as c_uint,
1045 size,
1046 is_volatile,
1047 );
1048 }
1049 }
1050
1051 fn memmove(
1052 &mut self,
1053 dst: &'ll Value,
1054 dst_align: Align,
1055 src: &'ll Value,
1056 src_align: Align,
1057 size: &'ll Value,
1058 flags: MemFlags,
1059 ) {
1060 assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memmove not supported");
1061 let size = self.intcast(size, self.type_isize(), false);
1062 let is_volatile = flags.contains(MemFlags::VOLATILE);
1063 unsafe {
1064 llvm::LLVMRustBuildMemMove(
1065 self.llbuilder,
1066 dst,
1067 dst_align.bytes() as c_uint,
1068 src,
1069 src_align.bytes() as c_uint,
1070 size,
1071 is_volatile,
1072 );
1073 }
1074 }
1075
1076 fn memset(
1077 &mut self,
1078 ptr: &'ll Value,
1079 fill_byte: &'ll Value,
1080 size: &'ll Value,
1081 align: Align,
1082 flags: MemFlags,
1083 ) {
1084 assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memset not supported");
1085 let is_volatile = flags.contains(MemFlags::VOLATILE);
1086 unsafe {
1087 llvm::LLVMRustBuildMemSet(
1088 self.llbuilder,
1089 ptr,
1090 align.bytes() as c_uint,
1091 fill_byte,
1092 size,
1093 is_volatile,
1094 );
1095 }
1096 }
1097
1098 fn select(
1099 &mut self,
1100 cond: &'ll Value,
1101 then_val: &'ll Value,
1102 else_val: &'ll Value,
1103 ) -> &'ll Value {
1104 unsafe { llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, UNNAMED) }
1105 }
1106
1107 fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
1108 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
1109 }
1110
1111 fn extract_element(&mut self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
1112 unsafe { llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, UNNAMED) }
1113 }
1114
1115 fn vector_splat(&mut self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
1116 unsafe {
1117 let elt_ty = self.cx.val_ty(elt);
1118 let undef = llvm::LLVMGetUndef(self.type_vector(elt_ty, num_elts as u64));
1119 let vec = self.insert_element(undef, elt, self.cx.const_i32(0));
1120 let vec_i32_ty = self.type_vector(self.type_i32(), num_elts as u64);
1121 self.shuffle_vector(vec, undef, self.const_null(vec_i32_ty))
1122 }
1123 }
1124
1125 fn extract_value(&mut self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
1126 assert_eq!(idx as c_uint as u64, idx);
1127 unsafe { llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, UNNAMED) }
1128 }
1129
1130 fn insert_value(&mut self, agg_val: &'ll Value, elt: &'ll Value, idx: u64) -> &'ll Value {
1131 assert_eq!(idx as c_uint as u64, idx);
1132 unsafe { llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint, UNNAMED) }
1133 }
1134
1135 fn set_personality_fn(&mut self, personality: &'ll Value) {
1136 unsafe {
1137 llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1138 }
1139 }
1140
1141 fn cleanup_landing_pad(&mut self, pers_fn: &'ll Value) -> (&'ll Value, &'ll Value) {
1142 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
1143 let landing_pad = self.landing_pad(ty, pers_fn, 0);
1144 unsafe {
1145 llvm::LLVMSetCleanup(landing_pad, llvm::True);
1146 }
1147 (self.extract_value(landing_pad, 0), self.extract_value(landing_pad, 1))
1148 }
1149
1150 fn filter_landing_pad(&mut self, pers_fn: &'ll Value) -> (&'ll Value, &'ll Value) {
1151 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
1152 let landing_pad = self.landing_pad(ty, pers_fn, 1);
1153 self.add_clause(landing_pad, self.const_array(self.type_ptr(), &[]));
1154 (self.extract_value(landing_pad, 0), self.extract_value(landing_pad, 1))
1155 }
1156
1157 fn resume(&mut self, exn0: &'ll Value, exn1: &'ll Value) {
1158 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
1159 let mut exn = self.const_poison(ty);
1160 exn = self.insert_value(exn, exn0, 0);
1161 exn = self.insert_value(exn, exn1, 1);
1162 unsafe {
1163 llvm::LLVMBuildResume(self.llbuilder, exn);
1164 }
1165 }
1166
1167 fn cleanup_pad(&mut self, parent: Option<&'ll Value>, args: &[&'ll Value]) -> Funclet<'ll> {
1168 let ret = unsafe {
1169 llvm::LLVMBuildCleanupPad(
1170 self.llbuilder,
1171 parent,
1172 args.as_ptr(),
1173 args.len() as c_uint,
1174 c"cleanuppad".as_ptr(),
1175 )
1176 };
1177 Funclet::new(ret.expect("LLVM does not have support for cleanuppad"))
1178 }
1179
1180 fn cleanup_ret(&mut self, funclet: &Funclet<'ll>, unwind: Option<&'ll BasicBlock>) {
1181 unsafe {
1182 llvm::LLVMBuildCleanupRet(self.llbuilder, funclet.cleanuppad(), unwind)
1183 .expect("LLVM does not have support for cleanupret");
1184 }
1185 }
1186
1187 fn catch_pad(&mut self, parent: &'ll Value, args: &[&'ll Value]) -> Funclet<'ll> {
1188 let ret = unsafe {
1189 llvm::LLVMBuildCatchPad(
1190 self.llbuilder,
1191 parent,
1192 args.as_ptr(),
1193 args.len() as c_uint,
1194 c"catchpad".as_ptr(),
1195 )
1196 };
1197 Funclet::new(ret.expect("LLVM does not have support for catchpad"))
1198 }
1199
1200 fn catch_switch(
1201 &mut self,
1202 parent: Option<&'ll Value>,
1203 unwind: Option<&'ll BasicBlock>,
1204 handlers: &[&'ll BasicBlock],
1205 ) -> &'ll Value {
1206 let ret = unsafe {
1207 llvm::LLVMBuildCatchSwitch(
1208 self.llbuilder,
1209 parent,
1210 unwind,
1211 handlers.len() as c_uint,
1212 c"catchswitch".as_ptr(),
1213 )
1214 };
1215 let ret = ret.expect("LLVM does not have support for catchswitch");
1216 for handler in handlers {
1217 unsafe {
1218 llvm::LLVMAddHandler(ret, handler);
1219 }
1220 }
1221 ret
1222 }
1223
1224 fn atomic_cmpxchg(
1226 &mut self,
1227 dst: &'ll Value,
1228 cmp: &'ll Value,
1229 src: &'ll Value,
1230 order: rustc_codegen_ssa::common::AtomicOrdering,
1231 failure_order: rustc_codegen_ssa::common::AtomicOrdering,
1232 weak: bool,
1233 ) -> (&'ll Value, &'ll Value) {
1234 let weak = if weak { llvm::True } else { llvm::False };
1235 unsafe {
1236 let value = llvm::LLVMBuildAtomicCmpXchg(
1237 self.llbuilder,
1238 dst,
1239 cmp,
1240 src,
1241 AtomicOrdering::from_generic(order),
1242 AtomicOrdering::from_generic(failure_order),
1243 llvm::False, );
1245 llvm::LLVMSetWeak(value, weak);
1246 let val = self.extract_value(value, 0);
1247 let success = self.extract_value(value, 1);
1248 (val, success)
1249 }
1250 }
1251
1252 fn atomic_rmw(
1253 &mut self,
1254 op: rustc_codegen_ssa::common::AtomicRmwBinOp,
1255 dst: &'ll Value,
1256 mut src: &'ll Value,
1257 order: rustc_codegen_ssa::common::AtomicOrdering,
1258 ) -> &'ll Value {
1259 let requires_cast_to_int = self.val_ty(src) == self.type_ptr()
1261 && op != rustc_codegen_ssa::common::AtomicRmwBinOp::AtomicXchg;
1262 if requires_cast_to_int {
1263 src = self.ptrtoint(src, self.type_isize());
1264 }
1265 let mut res = unsafe {
1266 llvm::LLVMBuildAtomicRMW(
1267 self.llbuilder,
1268 AtomicRmwBinOp::from_generic(op),
1269 dst,
1270 src,
1271 AtomicOrdering::from_generic(order),
1272 llvm::False, )
1274 };
1275 if requires_cast_to_int {
1276 res = self.inttoptr(res, self.type_ptr());
1277 }
1278 res
1279 }
1280
1281 fn atomic_fence(
1282 &mut self,
1283 order: rustc_codegen_ssa::common::AtomicOrdering,
1284 scope: SynchronizationScope,
1285 ) {
1286 let single_threaded = match scope {
1287 SynchronizationScope::SingleThread => llvm::True,
1288 SynchronizationScope::CrossThread => llvm::False,
1289 };
1290 unsafe {
1291 llvm::LLVMBuildFence(
1292 self.llbuilder,
1293 AtomicOrdering::from_generic(order),
1294 single_threaded,
1295 UNNAMED,
1296 );
1297 }
1298 }
1299
1300 fn set_invariant_load(&mut self, load: &'ll Value) {
1301 unsafe {
1302 let md = llvm::LLVMMDNodeInContext2(self.cx.llcx, ptr::null(), 0);
1303 self.set_metadata(load, llvm::MD_invariant_load, md);
1304 }
1305 }
1306
1307 fn lifetime_start(&mut self, ptr: &'ll Value, size: Size) {
1308 self.call_lifetime_intrinsic("llvm.lifetime.start.p0i8", ptr, size);
1309 }
1310
1311 fn lifetime_end(&mut self, ptr: &'ll Value, size: Size) {
1312 self.call_lifetime_intrinsic("llvm.lifetime.end.p0i8", ptr, size);
1313 }
1314
1315 fn call(
1316 &mut self,
1317 llty: &'ll Type,
1318 fn_attrs: Option<&CodegenFnAttrs>,
1319 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1320 llfn: &'ll Value,
1321 args: &[&'ll Value],
1322 funclet: Option<&Funclet<'ll>>,
1323 instance: Option<Instance<'tcx>>,
1324 ) -> &'ll Value {
1325 debug!("call {:?} with args ({:?})", llfn, args);
1326
1327 let args = self.check_call("call", llty, llfn, args);
1328 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
1329 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
1330 if let Some(funclet_bundle) = funclet_bundle {
1331 bundles.push(funclet_bundle);
1332 }
1333
1334 self.cfi_type_test(fn_attrs, fn_abi, instance, llfn);
1336
1337 let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn);
1339 if let Some(kcfi_bundle) = kcfi_bundle.as_deref() {
1340 bundles.push(kcfi_bundle);
1341 }
1342
1343 let call = unsafe {
1344 llvm::LLVMBuildCallWithOperandBundles(
1345 self.llbuilder,
1346 llty,
1347 llfn,
1348 args.as_ptr() as *const &llvm::Value,
1349 args.len() as c_uint,
1350 bundles.as_ptr(),
1351 bundles.len() as c_uint,
1352 c"".as_ptr(),
1353 )
1354 };
1355 if let Some(fn_abi) = fn_abi {
1356 fn_abi.apply_attrs_callsite(self, call);
1357 }
1358 call
1359 }
1360
1361 fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1362 unsafe { llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, UNNAMED) }
1363 }
1364
1365 fn apply_attrs_to_cleanup_callsite(&mut self, llret: &'ll Value) {
1366 let cold_inline = llvm::AttributeKind::Cold.create_attr(self.llcx);
1368 attributes::apply_to_callsite(llret, llvm::AttributePlace::Function, &[cold_inline]);
1369 }
1370}
1371
1372impl<'ll> StaticBuilderMethods for Builder<'_, 'll, '_> {
1373 fn get_static(&mut self, def_id: DefId) -> &'ll Value {
1374 let s = self.cx().get_static(def_id);
1376 self.cx().const_pointercast(s, self.type_ptr())
1378 }
1379}
1380
1381impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1382 fn build(cx: &'a CodegenCx<'ll, 'tcx>, llbb: &'ll BasicBlock) -> Builder<'a, 'll, 'tcx> {
1383 let bx = Builder::with_cx(cx);
1384 unsafe {
1385 llvm::LLVMPositionBuilderAtEnd(bx.llbuilder, llbb);
1386 }
1387 bx
1388 }
1389
1390 fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
1391 let llbuilder = unsafe { llvm::LLVMCreateBuilderInContext(cx.llcx) };
1393 Builder { llbuilder, cx }
1394 }
1395
1396 pub(crate) fn llfn(&self) -> &'ll Value {
1397 unsafe { llvm::LLVMGetBasicBlockParent(self.llbb()) }
1398 }
1399}
1400
1401impl<'a, 'll, CX: Borrow<SimpleCx<'ll>>> GenericBuilder<'a, 'll, CX> {
1402 fn position_at_start(&mut self, llbb: &'ll BasicBlock) {
1403 unsafe {
1404 llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
1405 }
1406 }
1407}
1408impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1409 fn align_metadata(&mut self, load: &'ll Value, align: Align) {
1410 unsafe {
1411 let md = [llvm::LLVMValueAsMetadata(self.cx.const_u64(align.bytes()))];
1412 let md = llvm::LLVMMDNodeInContext2(self.cx.llcx, md.as_ptr(), md.len());
1413 self.set_metadata(load, llvm::MD_align, md);
1414 }
1415 }
1416
1417 fn noundef_metadata(&mut self, load: &'ll Value) {
1418 unsafe {
1419 let md = llvm::LLVMMDNodeInContext2(self.cx.llcx, ptr::null(), 0);
1420 self.set_metadata(load, llvm::MD_noundef, md);
1421 }
1422 }
1423
1424 pub(crate) fn set_unpredictable(&mut self, inst: &'ll Value) {
1425 unsafe {
1426 let md = llvm::LLVMMDNodeInContext2(self.cx.llcx, ptr::null(), 0);
1427 self.set_metadata(inst, llvm::MD_unpredictable, md);
1428 }
1429 }
1430}
1431impl<'a, 'll, CX: Borrow<SimpleCx<'ll>>> GenericBuilder<'a, 'll, CX> {
1432 pub(crate) fn minnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1433 unsafe { llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs) }
1434 }
1435
1436 pub(crate) fn maxnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1437 unsafe { llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs) }
1438 }
1439
1440 pub(crate) fn insert_element(
1441 &mut self,
1442 vec: &'ll Value,
1443 elt: &'ll Value,
1444 idx: &'ll Value,
1445 ) -> &'ll Value {
1446 unsafe { llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, UNNAMED) }
1447 }
1448
1449 pub(crate) fn shuffle_vector(
1450 &mut self,
1451 v1: &'ll Value,
1452 v2: &'ll Value,
1453 mask: &'ll Value,
1454 ) -> &'ll Value {
1455 unsafe { llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, UNNAMED) }
1456 }
1457
1458 pub(crate) fn vector_reduce_fadd(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1459 unsafe { llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src) }
1460 }
1461 pub(crate) fn vector_reduce_fmul(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1462 unsafe { llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src) }
1463 }
1464 pub(crate) fn vector_reduce_fadd_reassoc(
1465 &mut self,
1466 acc: &'ll Value,
1467 src: &'ll Value,
1468 ) -> &'ll Value {
1469 unsafe {
1470 let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
1471 llvm::LLVMRustSetAllowReassoc(instr);
1472 instr
1473 }
1474 }
1475 pub(crate) fn vector_reduce_fmul_reassoc(
1476 &mut self,
1477 acc: &'ll Value,
1478 src: &'ll Value,
1479 ) -> &'ll Value {
1480 unsafe {
1481 let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
1482 llvm::LLVMRustSetAllowReassoc(instr);
1483 instr
1484 }
1485 }
1486 pub(crate) fn vector_reduce_add(&mut self, src: &'ll Value) -> &'ll Value {
1487 unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
1488 }
1489 pub(crate) fn vector_reduce_mul(&mut self, src: &'ll Value) -> &'ll Value {
1490 unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
1491 }
1492 pub(crate) fn vector_reduce_and(&mut self, src: &'ll Value) -> &'ll Value {
1493 unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
1494 }
1495 pub(crate) fn vector_reduce_or(&mut self, src: &'ll Value) -> &'ll Value {
1496 unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
1497 }
1498 pub(crate) fn vector_reduce_xor(&mut self, src: &'ll Value) -> &'ll Value {
1499 unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
1500 }
1501 pub(crate) fn vector_reduce_fmin(&mut self, src: &'ll Value) -> &'ll Value {
1502 unsafe {
1503 llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, false)
1504 }
1505 }
1506 pub(crate) fn vector_reduce_fmax(&mut self, src: &'ll Value) -> &'ll Value {
1507 unsafe {
1508 llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, false)
1509 }
1510 }
1511 pub(crate) fn vector_reduce_min(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
1512 unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
1513 }
1514 pub(crate) fn vector_reduce_max(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
1515 unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
1516 }
1517
1518 pub(crate) fn add_clause(&mut self, landing_pad: &'ll Value, clause: &'ll Value) {
1519 unsafe {
1520 llvm::LLVMAddClause(landing_pad, clause);
1521 }
1522 }
1523
1524 pub(crate) fn catch_ret(
1525 &mut self,
1526 funclet: &Funclet<'ll>,
1527 unwind: &'ll BasicBlock,
1528 ) -> &'ll Value {
1529 let ret = unsafe { llvm::LLVMBuildCatchRet(self.llbuilder, funclet.cleanuppad(), unwind) };
1530 ret.expect("LLVM does not have support for catchret")
1531 }
1532}
1533
1534impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1535 fn check_call<'b>(
1536 &mut self,
1537 typ: &str,
1538 fn_ty: &'ll Type,
1539 llfn: &'ll Value,
1540 args: &'b [&'ll Value],
1541 ) -> Cow<'b, [&'ll Value]> {
1542 assert!(
1543 self.cx.type_kind(fn_ty) == TypeKind::Function,
1544 "builder::{typ} not passed a function, but {fn_ty:?}"
1545 );
1546
1547 let param_tys = self.cx.func_params_types(fn_ty);
1548
1549 let all_args_match = iter::zip(¶m_tys, args.iter().map(|&v| self.val_ty(v)))
1550 .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1551
1552 if all_args_match {
1553 return Cow::Borrowed(args);
1554 }
1555
1556 let casted_args: Vec<_> = iter::zip(param_tys, args)
1557 .enumerate()
1558 .map(|(i, (expected_ty, &actual_val))| {
1559 let actual_ty = self.val_ty(actual_val);
1560 if expected_ty != actual_ty {
1561 debug!(
1562 "type mismatch in function call of {:?}. \
1563 Expected {:?} for param {}, got {:?}; injecting bitcast",
1564 llfn, expected_ty, i, actual_ty
1565 );
1566 self.bitcast(actual_val, expected_ty)
1567 } else {
1568 actual_val
1569 }
1570 })
1571 .collect();
1572
1573 Cow::Owned(casted_args)
1574 }
1575}
1576impl<'a, 'll, CX: Borrow<SimpleCx<'ll>>> GenericBuilder<'a, 'll, CX> {
1577 pub(crate) fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
1578 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
1579 }
1580}
1581impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1582 pub(crate) fn call_intrinsic(&mut self, intrinsic: &str, args: &[&'ll Value]) -> &'ll Value {
1583 let (ty, f) = self.cx.get_intrinsic(intrinsic);
1584 self.call(ty, None, None, f, args, None, None)
1585 }
1586
1587 fn call_lifetime_intrinsic(&mut self, intrinsic: &str, ptr: &'ll Value, size: Size) {
1588 let size = size.bytes();
1589 if size == 0 {
1590 return;
1591 }
1592
1593 if !self.cx().sess().emit_lifetime_markers() {
1594 return;
1595 }
1596
1597 self.call_intrinsic(intrinsic, &[self.cx.const_u64(size), ptr]);
1598 }
1599}
1600impl<'a, 'll, CX: Borrow<SimpleCx<'ll>>> GenericBuilder<'a, 'll, CX> {
1601 pub(crate) fn phi(
1602 &mut self,
1603 ty: &'ll Type,
1604 vals: &[&'ll Value],
1605 bbs: &[&'ll BasicBlock],
1606 ) -> &'ll Value {
1607 assert_eq!(vals.len(), bbs.len());
1608 let phi = unsafe { llvm::LLVMBuildPhi(self.llbuilder, ty, UNNAMED) };
1609 unsafe {
1610 llvm::LLVMAddIncoming(phi, vals.as_ptr(), bbs.as_ptr(), vals.len() as c_uint);
1611 phi
1612 }
1613 }
1614
1615 fn add_incoming_to_phi(&mut self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
1616 unsafe {
1617 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1618 }
1619 }
1620}
1621impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1622 fn fptoint_sat(&mut self, signed: bool, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1623 let src_ty = self.cx.val_ty(val);
1624 let (float_ty, int_ty, vector_length) = if self.cx.type_kind(src_ty) == TypeKind::Vector {
1625 assert_eq!(self.cx.vector_length(src_ty), self.cx.vector_length(dest_ty));
1626 (
1627 self.cx.element_type(src_ty),
1628 self.cx.element_type(dest_ty),
1629 Some(self.cx.vector_length(src_ty)),
1630 )
1631 } else {
1632 (src_ty, dest_ty, None)
1633 };
1634 let float_width = self.cx.float_width(float_ty);
1635 let int_width = self.cx.int_width(int_ty);
1636
1637 let instr = if signed { "fptosi" } else { "fptoui" };
1638 let name = if let Some(vector_length) = vector_length {
1639 format!("llvm.{instr}.sat.v{vector_length}i{int_width}.v{vector_length}f{float_width}")
1640 } else {
1641 format!("llvm.{instr}.sat.i{int_width}.f{float_width}")
1642 };
1643 let f = self.declare_cfn(&name, llvm::UnnamedAddr::No, self.type_func(&[src_ty], dest_ty));
1644 self.call(self.type_func(&[src_ty], dest_ty), None, None, f, &[val], None, None)
1645 }
1646
1647 pub(crate) fn landing_pad(
1648 &mut self,
1649 ty: &'ll Type,
1650 pers_fn: &'ll Value,
1651 num_clauses: usize,
1652 ) -> &'ll Value {
1653 self.set_personality_fn(pers_fn);
1657 unsafe {
1658 llvm::LLVMBuildLandingPad(self.llbuilder, ty, None, num_clauses as c_uint, UNNAMED)
1659 }
1660 }
1661
1662 pub(crate) fn callbr(
1663 &mut self,
1664 llty: &'ll Type,
1665 fn_attrs: Option<&CodegenFnAttrs>,
1666 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1667 llfn: &'ll Value,
1668 args: &[&'ll Value],
1669 default_dest: &'ll BasicBlock,
1670 indirect_dest: &[&'ll BasicBlock],
1671 funclet: Option<&Funclet<'ll>>,
1672 instance: Option<Instance<'tcx>>,
1673 ) -> &'ll Value {
1674 debug!("invoke {:?} with args ({:?})", llfn, args);
1675
1676 let args = self.check_call("callbr", llty, llfn, args);
1677 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
1678 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
1679 if let Some(funclet_bundle) = funclet_bundle {
1680 bundles.push(funclet_bundle);
1681 }
1682
1683 self.cfi_type_test(fn_attrs, fn_abi, instance, llfn);
1685
1686 let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn);
1688 if let Some(kcfi_bundle) = kcfi_bundle.as_deref() {
1689 bundles.push(kcfi_bundle);
1690 }
1691
1692 let callbr = unsafe {
1693 llvm::LLVMBuildCallBr(
1694 self.llbuilder,
1695 llty,
1696 llfn,
1697 default_dest,
1698 indirect_dest.as_ptr(),
1699 indirect_dest.len() as c_uint,
1700 args.as_ptr(),
1701 args.len() as c_uint,
1702 bundles.as_ptr(),
1703 bundles.len() as c_uint,
1704 UNNAMED,
1705 )
1706 };
1707 if let Some(fn_abi) = fn_abi {
1708 fn_abi.apply_attrs_callsite(self, callbr);
1709 }
1710 callbr
1711 }
1712
1713 fn cfi_type_test(
1715 &mut self,
1716 fn_attrs: Option<&CodegenFnAttrs>,
1717 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1718 instance: Option<Instance<'tcx>>,
1719 llfn: &'ll Value,
1720 ) {
1721 let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) };
1722 if self.tcx.sess.is_sanitizer_cfi_enabled()
1723 && let Some(fn_abi) = fn_abi
1724 && is_indirect_call
1725 {
1726 if let Some(fn_attrs) = fn_attrs
1727 && fn_attrs.no_sanitize.contains(SanitizerSet::CFI)
1728 {
1729 return;
1730 }
1731
1732 let mut options = cfi::TypeIdOptions::empty();
1733 if self.tcx.sess.is_sanitizer_cfi_generalize_pointers_enabled() {
1734 options.insert(cfi::TypeIdOptions::GENERALIZE_POINTERS);
1735 }
1736 if self.tcx.sess.is_sanitizer_cfi_normalize_integers_enabled() {
1737 options.insert(cfi::TypeIdOptions::NORMALIZE_INTEGERS);
1738 }
1739
1740 let typeid = if let Some(instance) = instance {
1741 cfi::typeid_for_instance(self.tcx, instance, options)
1742 } else {
1743 cfi::typeid_for_fnabi(self.tcx, fn_abi, options)
1744 };
1745 let typeid_metadata = self.cx.typeid_metadata(typeid).unwrap();
1746 let dbg_loc = self.get_dbg_loc();
1747
1748 let cond = self.type_test(llfn, typeid_metadata);
1750 let bb_pass = self.append_sibling_block("type_test.pass");
1751 let bb_fail = self.append_sibling_block("type_test.fail");
1752 self.cond_br(cond, bb_pass, bb_fail);
1753
1754 self.switch_to_block(bb_fail);
1755 if let Some(dbg_loc) = dbg_loc {
1756 self.set_dbg_loc(dbg_loc);
1757 }
1758 self.abort();
1759 self.unreachable();
1760
1761 self.switch_to_block(bb_pass);
1762 if let Some(dbg_loc) = dbg_loc {
1763 self.set_dbg_loc(dbg_loc);
1764 }
1765 }
1766 }
1767
1768 fn kcfi_operand_bundle(
1770 &mut self,
1771 fn_attrs: Option<&CodegenFnAttrs>,
1772 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1773 instance: Option<Instance<'tcx>>,
1774 llfn: &'ll Value,
1775 ) -> Option<llvm::OperandBundleOwned<'ll>> {
1776 let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) };
1777 let kcfi_bundle = if self.tcx.sess.is_sanitizer_kcfi_enabled()
1778 && let Some(fn_abi) = fn_abi
1779 && is_indirect_call
1780 {
1781 if let Some(fn_attrs) = fn_attrs
1782 && fn_attrs.no_sanitize.contains(SanitizerSet::KCFI)
1783 {
1784 return None;
1785 }
1786
1787 let mut options = kcfi::TypeIdOptions::empty();
1788 if self.tcx.sess.is_sanitizer_cfi_generalize_pointers_enabled() {
1789 options.insert(kcfi::TypeIdOptions::GENERALIZE_POINTERS);
1790 }
1791 if self.tcx.sess.is_sanitizer_cfi_normalize_integers_enabled() {
1792 options.insert(kcfi::TypeIdOptions::NORMALIZE_INTEGERS);
1793 }
1794
1795 let kcfi_typeid = if let Some(instance) = instance {
1796 kcfi::typeid_for_instance(self.tcx, instance, options)
1797 } else {
1798 kcfi::typeid_for_fnabi(self.tcx, fn_abi, options)
1799 };
1800
1801 Some(llvm::OperandBundleOwned::new("kcfi", &[self.const_u32(kcfi_typeid)]))
1802 } else {
1803 None
1804 };
1805 kcfi_bundle
1806 }
1807
1808 #[instrument(level = "debug", skip(self))]
1810 pub(crate) fn instrprof_increment(
1811 &mut self,
1812 fn_name: &'ll Value,
1813 hash: &'ll Value,
1814 num_counters: &'ll Value,
1815 index: &'ll Value,
1816 ) {
1817 self.call_intrinsic("llvm.instrprof.increment", &[fn_name, hash, num_counters, index]);
1818 }
1819
1820 #[instrument(level = "debug", skip(self))]
1830 pub(crate) fn mcdc_parameters(
1831 &mut self,
1832 fn_name: &'ll Value,
1833 hash: &'ll Value,
1834 bitmap_bits: &'ll Value,
1835 ) {
1836 assert!(
1837 crate::llvm_util::get_version() >= (19, 0, 0),
1838 "MCDC intrinsics require LLVM 19 or later"
1839 );
1840 self.call_intrinsic("llvm.instrprof.mcdc.parameters", &[fn_name, hash, bitmap_bits]);
1841 }
1842
1843 #[instrument(level = "debug", skip(self))]
1844 pub(crate) fn mcdc_tvbitmap_update(
1845 &mut self,
1846 fn_name: &'ll Value,
1847 hash: &'ll Value,
1848 bitmap_index: &'ll Value,
1849 mcdc_temp: &'ll Value,
1850 ) {
1851 assert!(
1852 crate::llvm_util::get_version() >= (19, 0, 0),
1853 "MCDC intrinsics require LLVM 19 or later"
1854 );
1855 let args = &[fn_name, hash, bitmap_index, mcdc_temp];
1856 self.call_intrinsic("llvm.instrprof.mcdc.tvbitmap.update", args);
1857 }
1858
1859 #[instrument(level = "debug", skip(self))]
1860 pub(crate) fn mcdc_condbitmap_reset(&mut self, mcdc_temp: &'ll Value) {
1861 self.store(self.const_i32(0), mcdc_temp, self.tcx.data_layout.i32_align.abi);
1862 }
1863
1864 #[instrument(level = "debug", skip(self))]
1865 pub(crate) fn mcdc_condbitmap_update(&mut self, cond_index: &'ll Value, mcdc_temp: &'ll Value) {
1866 assert!(
1867 crate::llvm_util::get_version() >= (19, 0, 0),
1868 "MCDC intrinsics require LLVM 19 or later"
1869 );
1870 let align = self.tcx.data_layout.i32_align.abi;
1871 let current_tv_index = self.load(self.cx.type_i32(), mcdc_temp, align);
1872 let new_tv_index = self.add(current_tv_index, cond_index);
1873 self.store(new_tv_index, mcdc_temp, align);
1874 }
1875}