1use std::borrow::{Borrow, Cow};
2use std::iter;
3use std::ops::Deref;
4
5use rustc_ast::expand::typetree::FncTree;
6pub(crate) mod autodiff;
7pub(crate) mod gpu_offload;
8
9use libc::{c_char, c_uint};
10use rustc_abi::{self as abi, Align, Size, WrappingRange};
11use rustc_codegen_ssa::MemFlags;
12use rustc_codegen_ssa::common::{IntPredicate, RealPredicate, SynchronizationScope, TypeKind};
13use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
14use rustc_codegen_ssa::mir::place::PlaceRef;
15use rustc_codegen_ssa::traits::*;
16use rustc_data_structures::small_c_str::SmallCStr;
17use rustc_hir::def_id::DefId;
18use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrs, TargetFeature, TargetFeatureKind};
19use rustc_middle::ty::layout::{
20 FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasTypingEnv, LayoutError, LayoutOfHelpers,
21 TyAndLayout,
22};
23use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
24use rustc_sanitizers::{cfi, kcfi};
25use rustc_session::config::OptLevel;
26use rustc_span::Span;
27use rustc_target::callconv::{FnAbi, PassMode};
28use rustc_target::spec::{Arch, HasTargetSpec, SanitizerSet, Target};
29use smallvec::SmallVec;
30use tracing::{debug, instrument};
31
32use crate::abi::FnAbiLlvmExt;
33use crate::attributes;
34use crate::common::Funclet;
35use crate::context::{CodegenCx, FullCx, GenericCx, SCx};
36use crate::llvm::{
37 self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, FromGeneric, GEPNoWrapFlags, Metadata, TRUE,
38 ToLlvmBool, Type, Value,
39};
40use crate::type_of::LayoutLlvmExt;
41
42#[must_use]
43pub(crate) struct GenericBuilder<'a, 'll, CX: Borrow<SCx<'ll>>> {
44 pub llbuilder: &'ll mut llvm::Builder<'ll>,
45 pub cx: &'a GenericCx<'ll, CX>,
46}
47
48pub(crate) type SBuilder<'a, 'll> = GenericBuilder<'a, 'll, SCx<'ll>>;
49pub(crate) type Builder<'a, 'll, 'tcx> = GenericBuilder<'a, 'll, FullCx<'ll, 'tcx>>;
50
51impl<'a, 'll, CX: Borrow<SCx<'ll>>> Drop for GenericBuilder<'a, 'll, CX> {
52 fn drop(&mut self) {
53 unsafe {
54 llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
55 }
56 }
57}
58
59impl<'a, 'll> SBuilder<'a, 'll> {
60 pub(crate) fn call(
61 &mut self,
62 llty: &'ll Type,
63 llfn: &'ll Value,
64 args: &[&'ll Value],
65 funclet: Option<&Funclet<'ll>>,
66 ) -> &'ll Value {
67 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:67",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(67u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("call {0:?} with args ({1:?})",
llfn, args) as &dyn Value))])
});
} else { ; }
};debug!("call {:?} with args ({:?})", llfn, args);
68
69 let args = self.check_call("call", llty, llfn, args);
70 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
71 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
72 if let Some(funclet_bundle) = funclet_bundle {
73 bundles.push(funclet_bundle);
74 }
75
76 let call = unsafe {
77 llvm::LLVMBuildCallWithOperandBundles(
78 self.llbuilder,
79 llty,
80 llfn,
81 args.as_ptr() as *const &llvm::Value,
82 args.len() as c_uint,
83 bundles.as_ptr(),
84 bundles.len() as c_uint,
85 c"".as_ptr(),
86 )
87 };
88 call
89 }
90}
91
92impl<'a, 'll, CX: Borrow<SCx<'ll>>> GenericBuilder<'a, 'll, CX> {
93 fn with_cx(scx: &'a GenericCx<'ll, CX>) -> Self {
94 let llbuilder = unsafe { llvm::LLVMCreateBuilderInContext(scx.deref().borrow().llcx) };
96 GenericBuilder { llbuilder, cx: scx }
97 }
98
99 pub(crate) fn append_block(
100 cx: &'a GenericCx<'ll, CX>,
101 llfn: &'ll Value,
102 name: &str,
103 ) -> &'ll BasicBlock {
104 unsafe {
105 let name = SmallCStr::new(name);
106 llvm::LLVMAppendBasicBlockInContext(cx.llcx(), llfn, name.as_ptr())
107 }
108 }
109
110 pub(crate) fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
111 unsafe { llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
112 }
113
114 pub(crate) fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
115 unsafe { llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED) }
116 }
117
118 pub(crate) fn ret_void(&mut self) {
119 llvm::LLVMBuildRetVoid(self.llbuilder);
120 }
121
122 pub(crate) fn ret(&mut self, v: &'ll Value) {
123 unsafe {
124 llvm::LLVMBuildRet(self.llbuilder, v);
125 }
126 }
127
128 pub(crate) fn build(cx: &'a GenericCx<'ll, CX>, llbb: &'ll BasicBlock) -> Self {
129 let bx = Self::with_cx(cx);
130 unsafe {
131 llvm::LLVMPositionBuilderAtEnd(bx.llbuilder, llbb);
132 }
133 bx
134 }
135
136 pub(crate) fn direct_alloca(&mut self, ty: &'ll Type, align: Align, name: &str) -> &'ll Value {
141 let val = unsafe {
142 let alloca = llvm::LLVMBuildAlloca(self.llbuilder, ty, UNNAMED);
143 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
144 llvm::LLVMBuildPointerCast(self.llbuilder, alloca, self.cx.type_ptr(), UNNAMED)
146 };
147 if name != "" {
148 let name = std::ffi::CString::new(name).unwrap();
149 llvm::set_value_name(val, &name.as_bytes());
150 }
151 val
152 }
153
154 pub(crate) fn inbounds_gep(
155 &mut self,
156 ty: &'ll Type,
157 ptr: &'ll Value,
158 indices: &[&'ll Value],
159 ) -> &'ll Value {
160 unsafe {
161 llvm::LLVMBuildGEPWithNoWrapFlags(
162 self.llbuilder,
163 ty,
164 ptr,
165 indices.as_ptr(),
166 indices.len() as c_uint,
167 UNNAMED,
168 GEPNoWrapFlags::InBounds,
169 )
170 }
171 }
172
173 pub(crate) fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
174 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:174",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(174u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Store {0:?} -> {1:?}",
val, ptr) as &dyn Value))])
});
} else { ; }
};debug!("Store {:?} -> {:?}", val, ptr);
175 match (&self.cx.type_kind(self.cx.val_ty(ptr)), &TypeKind::Pointer) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(self.cx.type_kind(self.cx.val_ty(ptr)), TypeKind::Pointer);
176 unsafe {
177 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
178 llvm::LLVMSetAlignment(store, align.bytes() as c_uint);
179 store
180 }
181 }
182
183 pub(crate) fn load(&mut self, ty: &'ll Type, ptr: &'ll Value, align: Align) -> &'ll Value {
184 unsafe {
185 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
186 llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
187 load
188 }
189 }
190}
191
192pub(crate) const UNNAMED: *const c_char = c"".as_ptr();
196
197impl<'ll, CX: Borrow<SCx<'ll>>> BackendTypes for GenericBuilder<'_, 'll, CX> {
198 type Function = <GenericCx<'ll, CX> as BackendTypes>::Function;
199 type BasicBlock = <GenericCx<'ll, CX> as BackendTypes>::BasicBlock;
200 type Funclet = <GenericCx<'ll, CX> as BackendTypes>::Funclet;
201
202 type Value = <GenericCx<'ll, CX> as BackendTypes>::Value;
203 type Type = <GenericCx<'ll, CX> as BackendTypes>::Type;
204 type FunctionSignature = <GenericCx<'ll, CX> as BackendTypes>::FunctionSignature;
205
206 type DIScope = <GenericCx<'ll, CX> as BackendTypes>::DIScope;
207 type DILocation = <GenericCx<'ll, CX> as BackendTypes>::DILocation;
208 type DIVariable = <GenericCx<'ll, CX> as BackendTypes>::DIVariable;
209}
210
211impl abi::HasDataLayout for Builder<'_, '_, '_> {
212 fn data_layout(&self) -> &abi::TargetDataLayout {
213 self.cx.data_layout()
214 }
215}
216
217impl<'tcx> ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> {
218 #[inline]
219 fn tcx(&self) -> TyCtxt<'tcx> {
220 self.cx.tcx
221 }
222}
223
224impl<'tcx> ty::layout::HasTypingEnv<'tcx> for Builder<'_, '_, 'tcx> {
225 fn typing_env(&self) -> ty::TypingEnv<'tcx> {
226 self.cx.typing_env()
227 }
228}
229
230impl HasTargetSpec for Builder<'_, '_, '_> {
231 #[inline]
232 fn target_spec(&self) -> &Target {
233 self.cx.target_spec()
234 }
235}
236
237impl<'tcx> LayoutOfHelpers<'tcx> for Builder<'_, '_, 'tcx> {
238 #[inline]
239 fn handle_layout_err(&self, err: LayoutError<'tcx>, span: Span, ty: Ty<'tcx>) -> ! {
240 self.cx.handle_layout_err(err, span, ty)
241 }
242}
243
244impl<'tcx> FnAbiOfHelpers<'tcx> for Builder<'_, '_, 'tcx> {
245 #[inline]
246 fn handle_fn_abi_err(
247 &self,
248 err: FnAbiError<'tcx>,
249 span: Span,
250 fn_abi_request: FnAbiRequest<'tcx>,
251 ) -> ! {
252 self.cx.handle_fn_abi_err(err, span, fn_abi_request)
253 }
254}
255
256impl<'ll, 'tcx> Deref for Builder<'_, 'll, 'tcx> {
257 type Target = CodegenCx<'ll, 'tcx>;
258
259 #[inline]
260 fn deref(&self) -> &Self::Target {
261 self.cx
262 }
263}
264
265macro_rules! math_builder_methods {
266 ($($name:ident($($arg:ident),*) => $llvm_capi:ident),+ $(,)?) => {
267 $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value {
268 unsafe {
269 llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED)
270 }
271 })+
272 }
273}
274
275macro_rules! set_math_builder_methods {
276 ($($name:ident($($arg:ident),*) => ($llvm_capi:ident, $llvm_set_math:ident)),+ $(,)?) => {
277 $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value {
278 unsafe {
279 let instr = llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED);
280 llvm::$llvm_set_math(instr);
281 instr
282 }
283 })+
284 }
285}
286
287impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
288 type CodegenCx = CodegenCx<'ll, 'tcx>;
289
290 fn build(cx: &'a CodegenCx<'ll, 'tcx>, llbb: &'ll BasicBlock) -> Self {
291 let bx = Builder::with_cx(cx);
292 unsafe {
293 llvm::LLVMPositionBuilderAtEnd(bx.llbuilder, llbb);
294 }
295 bx
296 }
297
298 fn cx(&self) -> &CodegenCx<'ll, 'tcx> {
299 self.cx
300 }
301
302 fn llbb(&self) -> &'ll BasicBlock {
303 unsafe { llvm::LLVMGetInsertBlock(self.llbuilder) }
304 }
305
306 fn set_span(&mut self, _span: Span) {}
307
308 fn append_block(cx: &'a CodegenCx<'ll, 'tcx>, llfn: &'ll Value, name: &str) -> &'ll BasicBlock {
309 unsafe {
310 let name = SmallCStr::new(name);
311 llvm::LLVMAppendBasicBlockInContext(cx.llcx, llfn, name.as_ptr())
312 }
313 }
314
315 fn append_sibling_block(&mut self, name: &str) -> &'ll BasicBlock {
316 Self::append_block(self.cx, self.llfn(), name)
317 }
318
319 fn switch_to_block(&mut self, llbb: Self::BasicBlock) {
320 *self = Self::build(self.cx, llbb)
321 }
322
323 fn ret_void(&mut self) {
324 llvm::LLVMBuildRetVoid(self.llbuilder);
325 }
326
327 fn ret(&mut self, v: &'ll Value) {
328 unsafe {
329 llvm::LLVMBuildRet(self.llbuilder, v);
330 }
331 }
332
333 fn br(&mut self, dest: &'ll BasicBlock) {
334 unsafe {
335 llvm::LLVMBuildBr(self.llbuilder, dest);
336 }
337 }
338
339 fn cond_br(
340 &mut self,
341 cond: &'ll Value,
342 then_llbb: &'ll BasicBlock,
343 else_llbb: &'ll BasicBlock,
344 ) {
345 unsafe {
346 llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
347 }
348 }
349
350 fn switch(
351 &mut self,
352 v: &'ll Value,
353 else_llbb: &'ll BasicBlock,
354 cases: impl ExactSizeIterator<Item = (u128, &'ll BasicBlock)>,
355 ) {
356 let switch =
357 unsafe { llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, cases.len() as c_uint) };
358 for (on_val, dest) in cases {
359 let on_val = self.const_uint_big(self.val_ty(v), on_val);
360 unsafe { llvm::LLVMAddCase(switch, on_val, dest) }
361 }
362 }
363
364 fn switch_with_weights(
365 &mut self,
366 v: Self::Value,
367 else_llbb: Self::BasicBlock,
368 else_is_cold: bool,
369 cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock, bool)>,
370 ) {
371 if self.cx.sess().opts.optimize == rustc_session::config::OptLevel::No {
372 self.switch(v, else_llbb, cases.map(|(val, dest, _)| (val, dest)));
373 return;
374 }
375
376 let id = self.cx.create_metadata(b"branch_weights");
377
378 let cold_weight = llvm::LLVMValueAsMetadata(self.cx.const_u32(1));
383 let hot_weight = llvm::LLVMValueAsMetadata(self.cx.const_u32(2000));
384 let weight =
385 |is_cold: bool| -> &Metadata { if is_cold { cold_weight } else { hot_weight } };
386
387 let mut md: SmallVec<[&Metadata; 16]> = SmallVec::with_capacity(cases.len() + 2);
388 md.push(id);
389 md.push(weight(else_is_cold));
390
391 let switch =
392 unsafe { llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, cases.len() as c_uint) };
393 for (on_val, dest, is_cold) in cases {
394 let on_val = self.const_uint_big(self.val_ty(v), on_val);
395 unsafe { llvm::LLVMAddCase(switch, on_val, dest) }
396 md.push(weight(is_cold));
397 }
398
399 self.cx.set_metadata_node(switch, llvm::MD_prof, &md);
400 }
401
402 fn invoke(
403 &mut self,
404 llty: &'ll Type,
405 fn_attrs: Option<&CodegenFnAttrs>,
406 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
407 llfn: &'ll Value,
408 args: &[&'ll Value],
409 then: &'ll BasicBlock,
410 catch: &'ll BasicBlock,
411 funclet: Option<&Funclet<'ll>>,
412 instance: Option<Instance<'tcx>>,
413 ) -> &'ll Value {
414 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:414",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(414u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("invoke {0:?} with args ({1:?})",
llfn, args) as &dyn Value))])
});
} else { ; }
};debug!("invoke {:?} with args ({:?})", llfn, args);
415
416 let args = self.check_call("invoke", llty, llfn, args);
417 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
418 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
419 if let Some(funclet_bundle) = funclet_bundle {
420 bundles.push(funclet_bundle);
421 }
422
423 self.cfi_type_test(fn_attrs, fn_abi, instance, llfn);
425
426 let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn);
428 if let Some(kcfi_bundle) = kcfi_bundle.as_ref().map(|b| b.as_ref()) {
429 bundles.push(kcfi_bundle);
430 }
431
432 let invoke = unsafe {
433 llvm::LLVMBuildInvokeWithOperandBundles(
434 self.llbuilder,
435 llty,
436 llfn,
437 args.as_ptr(),
438 args.len() as c_uint,
439 then,
440 catch,
441 bundles.as_ptr(),
442 bundles.len() as c_uint,
443 UNNAMED,
444 )
445 };
446 if let Some(fn_abi) = fn_abi {
447 fn_abi.apply_attrs_callsite(self, invoke);
448 }
449 invoke
450 }
451
452 fn unreachable(&mut self) {
453 unsafe {
454 llvm::LLVMBuildUnreachable(self.llbuilder);
455 }
456 }
457
458 self
&'ll Value
x
&'ll Value
y
&'ll Value
unsafe { llvm::LLVMBuildNUWMul(self.llbuilder, x, y, UNNAMED) }math_builder_methods! {
459 add(a, b) => LLVMBuildAdd,
460 fadd(a, b) => LLVMBuildFAdd,
461 sub(a, b) => LLVMBuildSub,
462 fsub(a, b) => LLVMBuildFSub,
463 mul(a, b) => LLVMBuildMul,
464 fmul(a, b) => LLVMBuildFMul,
465 udiv(a, b) => LLVMBuildUDiv,
466 exactudiv(a, b) => LLVMBuildExactUDiv,
467 sdiv(a, b) => LLVMBuildSDiv,
468 exactsdiv(a, b) => LLVMBuildExactSDiv,
469 fdiv(a, b) => LLVMBuildFDiv,
470 urem(a, b) => LLVMBuildURem,
471 srem(a, b) => LLVMBuildSRem,
472 frem(a, b) => LLVMBuildFRem,
473 shl(a, b) => LLVMBuildShl,
474 lshr(a, b) => LLVMBuildLShr,
475 ashr(a, b) => LLVMBuildAShr,
476 and(a, b) => LLVMBuildAnd,
477 or(a, b) => LLVMBuildOr,
478 xor(a, b) => LLVMBuildXor,
479 neg(x) => LLVMBuildNeg,
480 fneg(x) => LLVMBuildFNeg,
481 not(x) => LLVMBuildNot,
482 unchecked_sadd(x, y) => LLVMBuildNSWAdd,
483 unchecked_uadd(x, y) => LLVMBuildNUWAdd,
484 unchecked_ssub(x, y) => LLVMBuildNSWSub,
485 unchecked_usub(x, y) => LLVMBuildNUWSub,
486 unchecked_smul(x, y) => LLVMBuildNSWMul,
487 unchecked_umul(x, y) => LLVMBuildNUWMul,
488 }
489
490 fn unchecked_suadd(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
491 unsafe {
492 let add = llvm::LLVMBuildAdd(self.llbuilder, a, b, UNNAMED);
493 if llvm::LLVMIsAInstruction(add).is_some() {
494 llvm::LLVMSetNUW(add, TRUE);
495 llvm::LLVMSetNSW(add, TRUE);
496 }
497 add
498 }
499 }
500 fn unchecked_susub(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
501 unsafe {
502 let sub = llvm::LLVMBuildSub(self.llbuilder, a, b, UNNAMED);
503 if llvm::LLVMIsAInstruction(sub).is_some() {
504 llvm::LLVMSetNUW(sub, TRUE);
505 llvm::LLVMSetNSW(sub, TRUE);
506 }
507 sub
508 }
509 }
510 fn unchecked_sumul(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
511 unsafe {
512 let mul = llvm::LLVMBuildMul(self.llbuilder, a, b, UNNAMED);
513 if llvm::LLVMIsAInstruction(mul).is_some() {
514 llvm::LLVMSetNUW(mul, TRUE);
515 llvm::LLVMSetNSW(mul, TRUE);
516 }
517 mul
518 }
519 }
520
521 fn or_disjoint(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
522 unsafe {
523 let or = llvm::LLVMBuildOr(self.llbuilder, a, b, UNNAMED);
524
525 if llvm::LLVMIsAInstruction(or).is_some() {
529 llvm::LLVMSetIsDisjoint(or, TRUE);
530 }
531 or
532 }
533 }
534
535 self
&'ll Value
x
&'ll Value
y
&'ll Value
unsafe {
let instr = llvm::LLVMBuildFRem(self.llbuilder, x, y, UNNAMED);
llvm::LLVMRustSetAlgebraicMath(instr);
instr
}set_math_builder_methods! {
536 fadd_fast(x, y) => (LLVMBuildFAdd, LLVMRustSetFastMath),
537 fsub_fast(x, y) => (LLVMBuildFSub, LLVMRustSetFastMath),
538 fmul_fast(x, y) => (LLVMBuildFMul, LLVMRustSetFastMath),
539 fdiv_fast(x, y) => (LLVMBuildFDiv, LLVMRustSetFastMath),
540 frem_fast(x, y) => (LLVMBuildFRem, LLVMRustSetFastMath),
541 fadd_algebraic(x, y) => (LLVMBuildFAdd, LLVMRustSetAlgebraicMath),
542 fsub_algebraic(x, y) => (LLVMBuildFSub, LLVMRustSetAlgebraicMath),
543 fmul_algebraic(x, y) => (LLVMBuildFMul, LLVMRustSetAlgebraicMath),
544 fdiv_algebraic(x, y) => (LLVMBuildFDiv, LLVMRustSetAlgebraicMath),
545 frem_algebraic(x, y) => (LLVMBuildFRem, LLVMRustSetAlgebraicMath),
546 }
547
548 fn checked_binop(
549 &mut self,
550 oop: OverflowOp,
551 ty: Ty<'tcx>,
552 lhs: Self::Value,
553 rhs: Self::Value,
554 ) -> (Self::Value, Self::Value) {
555 let (size, signed) = ty.int_size_and_signed(self.tcx);
556 let width = size.bits();
557
558 if !signed {
559 match oop {
560 OverflowOp::Sub => {
561 let sub = self.sub(lhs, rhs);
565 let cmp = self.icmp(IntPredicate::IntULT, lhs, rhs);
566 return (sub, cmp);
567 }
568 OverflowOp::Add => {
569 let add = self.add(lhs, rhs);
572 let cmp = self.icmp(IntPredicate::IntULT, add, lhs);
573 return (add, cmp);
574 }
575 OverflowOp::Mul => {}
576 }
577 }
578
579 let oop_str = match oop {
580 OverflowOp::Add => "add",
581 OverflowOp::Sub => "sub",
582 OverflowOp::Mul => "mul",
583 };
584
585 let name = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("llvm.{0}{1}.with.overflow",
if signed { 's' } else { 'u' }, oop_str))
})format!("llvm.{}{oop_str}.with.overflow", if signed { 's' } else { 'u' });
586
587 let res = self.call_intrinsic(name, &[self.type_ix(width)], &[lhs, rhs]);
588 (self.extract_value(res, 0), self.extract_value(res, 1))
589 }
590
591 fn from_immediate(&mut self, val: Self::Value) -> Self::Value {
592 if self.cx().val_ty(val) == self.cx().type_i1() {
593 self.zext(val, self.cx().type_i8())
594 } else {
595 val
596 }
597 }
598
599 fn to_immediate_scalar(&mut self, val: Self::Value, scalar: abi::Scalar) -> Self::Value {
600 if scalar.is_bool() {
601 return self.unchecked_utrunc(val, self.cx().type_i1());
602 }
603 val
604 }
605
606 fn alloca(&mut self, size: Size, align: Align) -> &'ll Value {
607 let mut bx = Builder::with_cx(self.cx);
608 bx.position_at_start(unsafe { llvm::LLVMGetFirstBasicBlock(self.llfn()) });
609 let ty = self.cx().type_array(self.cx().type_i8(), size.bytes());
610 unsafe {
611 let alloca = llvm::LLVMBuildAlloca(bx.llbuilder, ty, UNNAMED);
612 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
613 llvm::LLVMBuildPointerCast(bx.llbuilder, alloca, self.cx().type_ptr(), UNNAMED)
615 }
616 }
617
618 fn alloca_with_ty(&mut self, layout: TyAndLayout<'tcx>) -> Self::Value {
619 let mut bx = Builder::with_cx(self.cx);
620 bx.position_at_start(unsafe { llvm::LLVMGetFirstBasicBlock(self.llfn()) });
621 let scalable_vector_ty = layout.llvm_type(self.cx);
622
623 unsafe {
624 let alloca = llvm::LLVMBuildAlloca(&bx.llbuilder, scalable_vector_ty, UNNAMED);
625 llvm::LLVMSetAlignment(alloca, layout.align.abi.bytes() as c_uint);
626 alloca
627 }
628 }
629
630 fn load(&mut self, ty: &'ll Type, ptr: &'ll Value, align: Align) -> &'ll Value {
631 unsafe {
632 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
633 let align = align.min(self.cx().tcx.sess.target.max_reliable_alignment());
634 llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
635 load
636 }
637 }
638
639 fn volatile_load(&mut self, ty: &'ll Type, ptr: &'ll Value) -> &'ll Value {
640 unsafe {
641 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
642 llvm::LLVMSetVolatile(load, llvm::TRUE);
643 load
644 }
645 }
646
647 fn atomic_load(
648 &mut self,
649 ty: &'ll Type,
650 ptr: &'ll Value,
651 order: rustc_middle::ty::AtomicOrdering,
652 size: Size,
653 ) -> &'ll Value {
654 unsafe {
655 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
656 llvm::LLVMSetOrdering(load, AtomicOrdering::from_generic(order));
658 llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
660 load
661 }
662 }
663
664 #[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("load_operand",
"rustc_codegen_llvm::builder", ::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(664u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["place"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&place)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: OperandRef<'tcx, &'ll Value> =
loop {};
return __tracing_attr_fake_return;
}
{
if place.layout.is_unsized() {
let tail =
self.tcx.struct_tail_for_codegen(place.layout.ty,
self.typing_env());
if #[allow(non_exhaustive_omitted_patterns)] match tail.kind()
{
ty::Foreign(..) => true,
_ => false,
} {
{
::core::panicking::panic_fmt(format_args!("unsized locals must not be `extern` types"));
};
}
}
match (&place.val.llextra.is_some(), &place.layout.is_unsized()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
if place.layout.is_zst() {
return OperandRef::zero_sized(place.layout);
}
fn scalar_load_metadata<'a, 'll,
'tcx>(bx: &mut Builder<'a, 'll, 'tcx>, load: &'ll Value,
scalar: abi::Scalar, layout: TyAndLayout<'tcx>,
offset: Size) {
{}
#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("scalar_load_metadata",
"rustc_codegen_llvm::builder", ::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(681u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["load", "scalar",
"layout", "offset"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&load)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&scalar)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&layout)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&offset)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy
:: needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: () = loop {};
return __tracing_attr_fake_return;
}
{
if bx.cx.sess().opts.optimize == OptLevel::No { return; }
if !scalar.is_uninit_valid() { bx.noundef_metadata(load); }
match scalar.primitive() {
abi::Primitive::Int(..) => {
if !scalar.is_always_valid(bx) {
bx.range_metadata(load, scalar.valid_range(bx));
}
}
abi::Primitive::Pointer(_) => {
if !scalar.valid_range(bx).contains(0) {
bx.nonnull_metadata(load);
}
if let Some(pointee) = layout.pointee_info_at(bx, offset) &&
pointee.align > Align::ONE {
bx.align_metadata(load, pointee.align);
}
}
abi::Primitive::Float(_) => {}
}
}
}
}
}
let val =
if let Some(_) = place.val.llextra {
OperandValue::Ref(place.val)
} else if place.layout.is_llvm_immediate() {
let mut const_llval = None;
let llty = place.layout.llvm_type(self);
if let Some(global) =
llvm::LLVMIsAGlobalVariable(place.val.llval) {
if llvm::LLVMIsGlobalConstant(global).is_true() {
if let Some(init) = llvm::LLVMGetInitializer(global) {
if self.val_ty(init) == llty { const_llval = Some(init); }
}
}
}
let llval =
const_llval.unwrap_or_else(||
{
let load =
self.load(llty, place.val.llval, place.val.align);
if let abi::BackendRepr::Scalar(scalar) =
place.layout.backend_repr {
scalar_load_metadata(self, load, scalar, place.layout,
Size::ZERO);
self.to_immediate_scalar(load, scalar)
} else { load }
});
OperandValue::Immediate(llval)
} else if let abi::BackendRepr::ScalarPair(a, b) =
place.layout.backend_repr {
let b_offset = a.size(self).align_to(b.align(self).abi);
let mut load =
|i, scalar: abi::Scalar, layout, align, offset|
{
let llptr =
if i == 0 {
place.val.llval
} else {
self.inbounds_ptradd(place.val.llval,
self.const_usize(b_offset.bytes()))
};
let llty =
place.layout.scalar_pair_element_llvm_type(self, i, false);
let load = self.load(llty, llptr, align);
scalar_load_metadata(self, load, scalar, layout, offset);
self.to_immediate_scalar(load, scalar)
};
OperandValue::Pair(load(0, a, place.layout, place.val.align,
Size::ZERO),
load(1, b, place.layout,
place.val.align.restrict_for_offset(b_offset), b_offset))
} else { OperandValue::Ref(place.val) };
OperandRef { val, layout: place.layout, move_annotation: None }
}
}
}#[instrument(level = "trace", skip(self))]
665 fn load_operand(&mut self, place: PlaceRef<'tcx, &'ll Value>) -> OperandRef<'tcx, &'ll Value> {
666 if place.layout.is_unsized() {
667 let tail = self.tcx.struct_tail_for_codegen(place.layout.ty, self.typing_env());
668 if matches!(tail.kind(), ty::Foreign(..)) {
669 panic!("unsized locals must not be `extern` types");
673 }
674 }
675 assert_eq!(place.val.llextra.is_some(), place.layout.is_unsized());
676
677 if place.layout.is_zst() {
678 return OperandRef::zero_sized(place.layout);
679 }
680
681 #[instrument(level = "trace", skip(bx))]
682 fn scalar_load_metadata<'a, 'll, 'tcx>(
683 bx: &mut Builder<'a, 'll, 'tcx>,
684 load: &'ll Value,
685 scalar: abi::Scalar,
686 layout: TyAndLayout<'tcx>,
687 offset: Size,
688 ) {
689 if bx.cx.sess().opts.optimize == OptLevel::No {
690 return;
692 }
693
694 if !scalar.is_uninit_valid() {
695 bx.noundef_metadata(load);
696 }
697
698 match scalar.primitive() {
699 abi::Primitive::Int(..) => {
700 if !scalar.is_always_valid(bx) {
701 bx.range_metadata(load, scalar.valid_range(bx));
702 }
703 }
704 abi::Primitive::Pointer(_) => {
705 if !scalar.valid_range(bx).contains(0) {
706 bx.nonnull_metadata(load);
707 }
708
709 if let Some(pointee) = layout.pointee_info_at(bx, offset)
710 && pointee.align > Align::ONE
711 {
712 bx.align_metadata(load, pointee.align);
713 }
714 }
715 abi::Primitive::Float(_) => {}
716 }
717 }
718
719 let val = if let Some(_) = place.val.llextra {
720 OperandValue::Ref(place.val)
722 } else if place.layout.is_llvm_immediate() {
723 let mut const_llval = None;
724 let llty = place.layout.llvm_type(self);
725 if let Some(global) = llvm::LLVMIsAGlobalVariable(place.val.llval) {
726 if llvm::LLVMIsGlobalConstant(global).is_true() {
727 if let Some(init) = llvm::LLVMGetInitializer(global) {
728 if self.val_ty(init) == llty {
729 const_llval = Some(init);
730 }
731 }
732 }
733 }
734
735 let llval = const_llval.unwrap_or_else(|| {
736 let load = self.load(llty, place.val.llval, place.val.align);
737 if let abi::BackendRepr::Scalar(scalar) = place.layout.backend_repr {
738 scalar_load_metadata(self, load, scalar, place.layout, Size::ZERO);
739 self.to_immediate_scalar(load, scalar)
740 } else {
741 load
742 }
743 });
744 OperandValue::Immediate(llval)
745 } else if let abi::BackendRepr::ScalarPair(a, b) = place.layout.backend_repr {
746 let b_offset = a.size(self).align_to(b.align(self).abi);
747
748 let mut load = |i, scalar: abi::Scalar, layout, align, offset| {
749 let llptr = if i == 0 {
750 place.val.llval
751 } else {
752 self.inbounds_ptradd(place.val.llval, self.const_usize(b_offset.bytes()))
753 };
754 let llty = place.layout.scalar_pair_element_llvm_type(self, i, false);
755 let load = self.load(llty, llptr, align);
756 scalar_load_metadata(self, load, scalar, layout, offset);
757 self.to_immediate_scalar(load, scalar)
758 };
759
760 OperandValue::Pair(
761 load(0, a, place.layout, place.val.align, Size::ZERO),
762 load(1, b, place.layout, place.val.align.restrict_for_offset(b_offset), b_offset),
763 )
764 } else {
765 OperandValue::Ref(place.val)
766 };
767
768 OperandRef { val, layout: place.layout, move_annotation: None }
769 }
770
771 fn write_operand_repeatedly(
772 &mut self,
773 cg_elem: OperandRef<'tcx, &'ll Value>,
774 count: u64,
775 dest: PlaceRef<'tcx, &'ll Value>,
776 ) {
777 if self.cx.sess().opts.optimize == OptLevel::No {
778 self.write_operand_repeatedly_unoptimized(cg_elem, count, dest);
786 } else {
787 self.write_operand_repeatedly_optimized(cg_elem, count, dest);
788 }
789 }
790
791 fn range_metadata(&mut self, load: &'ll Value, range: WrappingRange) {
792 if self.cx.sess().opts.optimize == OptLevel::No {
793 return;
795 }
796
797 let llty = self.cx.val_ty(load);
798 let md = [
799 llvm::LLVMValueAsMetadata(self.cx.const_uint_big(llty, range.start)),
800 llvm::LLVMValueAsMetadata(self.cx.const_uint_big(llty, range.end.wrapping_add(1))),
801 ];
802 self.set_metadata_node(load, llvm::MD_range, &md);
803 }
804
805 fn nonnull_metadata(&mut self, load: &'ll Value) {
806 self.set_metadata_node(load, llvm::MD_nonnull, &[]);
807 }
808
809 fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
810 self.store_with_flags(val, ptr, align, MemFlags::empty())
811 }
812
813 fn store_with_flags(
814 &mut self,
815 val: &'ll Value,
816 ptr: &'ll Value,
817 align: Align,
818 flags: MemFlags,
819 ) -> &'ll Value {
820 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:820",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(820u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Store {0:?} -> {1:?} ({2:?})",
val, ptr, flags) as &dyn Value))])
});
} else { ; }
};debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
821 match (&self.cx.type_kind(self.cx.val_ty(ptr)), &TypeKind::Pointer) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(self.cx.type_kind(self.cx.val_ty(ptr)), TypeKind::Pointer);
822 unsafe {
823 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
824 let align = align.min(self.cx().tcx.sess.target.max_reliable_alignment());
825 let align =
826 if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() as c_uint };
827 llvm::LLVMSetAlignment(store, align);
828 if flags.contains(MemFlags::VOLATILE) {
829 llvm::LLVMSetVolatile(store, llvm::TRUE);
830 }
831 if flags.contains(MemFlags::NONTEMPORAL) {
832 let use_nontemporal = #[allow(non_exhaustive_omitted_patterns)] match self.cx.tcx.sess.target.arch {
Arch::AArch64 | Arch::Arm | Arch::RiscV32 | Arch::RiscV64 => true,
_ => false,
}matches!(
845 self.cx.tcx.sess.target.arch,
846 Arch::AArch64 | Arch::Arm | Arch::RiscV32 | Arch::RiscV64
847 );
848 if use_nontemporal {
849 let one = llvm::LLVMValueAsMetadata(self.cx.const_i32(1));
854 self.set_metadata_node(store, llvm::MD_nontemporal, &[one]);
855 }
856 }
857 store
858 }
859 }
860
861 fn atomic_store(
862 &mut self,
863 val: &'ll Value,
864 ptr: &'ll Value,
865 order: rustc_middle::ty::AtomicOrdering,
866 size: Size,
867 ) {
868 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:868",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(868u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Store {0:?} -> {1:?}",
val, ptr) as &dyn Value))])
});
} else { ; }
};debug!("Store {:?} -> {:?}", val, ptr);
869 match (&self.cx.type_kind(self.cx.val_ty(ptr)), &TypeKind::Pointer) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(self.cx.type_kind(self.cx.val_ty(ptr)), TypeKind::Pointer);
870 unsafe {
871 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
872 llvm::LLVMSetOrdering(store, AtomicOrdering::from_generic(order));
874 llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
876 }
877 }
878
879 fn gep(&mut self, ty: &'ll Type, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
880 unsafe {
881 llvm::LLVMBuildGEPWithNoWrapFlags(
882 self.llbuilder,
883 ty,
884 ptr,
885 indices.as_ptr(),
886 indices.len() as c_uint,
887 UNNAMED,
888 GEPNoWrapFlags::default(),
889 )
890 }
891 }
892
893 fn inbounds_gep(
894 &mut self,
895 ty: &'ll Type,
896 ptr: &'ll Value,
897 indices: &[&'ll Value],
898 ) -> &'ll Value {
899 unsafe {
900 llvm::LLVMBuildGEPWithNoWrapFlags(
901 self.llbuilder,
902 ty,
903 ptr,
904 indices.as_ptr(),
905 indices.len() as c_uint,
906 UNNAMED,
907 GEPNoWrapFlags::InBounds,
908 )
909 }
910 }
911
912 fn inbounds_nuw_gep(
913 &mut self,
914 ty: &'ll Type,
915 ptr: &'ll Value,
916 indices: &[&'ll Value],
917 ) -> &'ll Value {
918 unsafe {
919 llvm::LLVMBuildGEPWithNoWrapFlags(
920 self.llbuilder,
921 ty,
922 ptr,
923 indices.as_ptr(),
924 indices.len() as c_uint,
925 UNNAMED,
926 GEPNoWrapFlags::InBounds | GEPNoWrapFlags::NUW,
927 )
928 }
929 }
930
931 fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
933 unsafe { llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
934 }
935
936 fn unchecked_utrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
937 if true {
match (&self.val_ty(val), &dest_ty) {
(left_val, right_val) => {
if *left_val == *right_val {
let kind = ::core::panicking::AssertKind::Ne;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_ne!(self.val_ty(val), dest_ty);
938
939 let trunc = self.trunc(val, dest_ty);
940 unsafe {
941 if llvm::LLVMIsAInstruction(trunc).is_some() {
942 llvm::LLVMSetNUW(trunc, TRUE);
943 }
944 }
945 trunc
946 }
947
948 fn unchecked_strunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
949 if true {
match (&self.val_ty(val), &dest_ty) {
(left_val, right_val) => {
if *left_val == *right_val {
let kind = ::core::panicking::AssertKind::Ne;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_ne!(self.val_ty(val), dest_ty);
950
951 let trunc = self.trunc(val, dest_ty);
952 unsafe {
953 if llvm::LLVMIsAInstruction(trunc).is_some() {
954 llvm::LLVMSetNSW(trunc, TRUE);
955 }
956 }
957 trunc
958 }
959
960 fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
961 unsafe { llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, UNNAMED) }
962 }
963
964 fn fptoui_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
965 self.call_intrinsic("llvm.fptoui.sat", &[dest_ty, self.val_ty(val)], &[val])
966 }
967
968 fn fptosi_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
969 self.call_intrinsic("llvm.fptosi.sat", &[dest_ty, self.val_ty(val)], &[val])
970 }
971
972 fn fptoui(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
973 if self.sess().target.is_like_wasm {
988 let src_ty = self.cx.val_ty(val);
989 if self.cx.type_kind(src_ty) != TypeKind::Vector {
990 let float_width = self.cx.float_width(src_ty);
991 let int_width = self.cx.int_width(dest_ty);
992 if #[allow(non_exhaustive_omitted_patterns)] match (int_width, float_width) {
(32 | 64, 32 | 64) => true,
_ => false,
}matches!((int_width, float_width), (32 | 64, 32 | 64)) {
993 return self.call_intrinsic(
994 "llvm.wasm.trunc.unsigned",
995 &[dest_ty, src_ty],
996 &[val],
997 );
998 }
999 }
1000 }
1001 unsafe { llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, UNNAMED) }
1002 }
1003
1004 fn fptosi(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1005 if self.sess().target.is_like_wasm {
1007 let src_ty = self.cx.val_ty(val);
1008 if self.cx.type_kind(src_ty) != TypeKind::Vector {
1009 let float_width = self.cx.float_width(src_ty);
1010 let int_width = self.cx.int_width(dest_ty);
1011 if #[allow(non_exhaustive_omitted_patterns)] match (int_width, float_width) {
(32 | 64, 32 | 64) => true,
_ => false,
}matches!((int_width, float_width), (32 | 64, 32 | 64)) {
1012 return self.call_intrinsic(
1013 "llvm.wasm.trunc.signed",
1014 &[dest_ty, src_ty],
1015 &[val],
1016 );
1017 }
1018 }
1019 }
1020 unsafe { llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty, UNNAMED) }
1021 }
1022
1023 fn uitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1024 unsafe { llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
1025 }
1026
1027 fn sitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1028 unsafe { llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
1029 }
1030
1031 fn fptrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1032 unsafe { llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
1033 }
1034
1035 fn fpext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1036 unsafe { llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, UNNAMED) }
1037 }
1038
1039 fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1040 unsafe { llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, UNNAMED) }
1041 }
1042
1043 fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1044 unsafe { llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, UNNAMED) }
1045 }
1046
1047 fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1048 unsafe { llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED) }
1049 }
1050
1051 fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
1052 unsafe {
1053 llvm::LLVMBuildIntCast2(self.llbuilder, val, dest_ty, is_signed.to_llvm_bool(), UNNAMED)
1054 }
1055 }
1056
1057 fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1058 unsafe { llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, UNNAMED) }
1059 }
1060
1061 fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1063 let op = llvm::IntPredicate::from_generic(op);
1064 unsafe { llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1065 }
1066
1067 fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1068 let op = llvm::RealPredicate::from_generic(op);
1069 unsafe { llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1070 }
1071
1072 fn three_way_compare(
1073 &mut self,
1074 ty: Ty<'tcx>,
1075 lhs: Self::Value,
1076 rhs: Self::Value,
1077 ) -> Self::Value {
1078 let size = ty.primitive_size(self.tcx);
1079 let name = if ty.is_signed() { "llvm.scmp" } else { "llvm.ucmp" };
1080
1081 self.call_intrinsic(name, &[self.type_i8(), self.type_ix(size.bits())], &[lhs, rhs])
1082 }
1083
1084 fn memcpy(
1086 &mut self,
1087 dst: &'ll Value,
1088 dst_align: Align,
1089 src: &'ll Value,
1090 src_align: Align,
1091 size: &'ll Value,
1092 flags: MemFlags,
1093 tt: Option<FncTree>,
1094 ) {
1095 if !!flags.contains(MemFlags::NONTEMPORAL) {
{
::core::panicking::panic_fmt(format_args!("non-temporal memcpy not supported"));
}
};assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memcpy not supported");
1096 let size = self.intcast(size, self.type_isize(), false);
1097 let is_volatile = flags.contains(MemFlags::VOLATILE);
1098 let memcpy = unsafe {
1099 llvm::LLVMRustBuildMemCpy(
1100 self.llbuilder,
1101 dst,
1102 dst_align.bytes() as c_uint,
1103 src,
1104 src_align.bytes() as c_uint,
1105 size,
1106 is_volatile,
1107 )
1108 };
1109
1110 if let Some(tt) = tt {
1116 crate::typetree::add_tt(self.cx().llmod, self.cx().llcx, memcpy, tt);
1117 }
1118 }
1119
1120 fn memmove(
1121 &mut self,
1122 dst: &'ll Value,
1123 dst_align: Align,
1124 src: &'ll Value,
1125 src_align: Align,
1126 size: &'ll Value,
1127 flags: MemFlags,
1128 ) {
1129 if !!flags.contains(MemFlags::NONTEMPORAL) {
{
::core::panicking::panic_fmt(format_args!("non-temporal memmove not supported"));
}
};assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memmove not supported");
1130 let size = self.intcast(size, self.type_isize(), false);
1131 let is_volatile = flags.contains(MemFlags::VOLATILE);
1132 unsafe {
1133 llvm::LLVMRustBuildMemMove(
1134 self.llbuilder,
1135 dst,
1136 dst_align.bytes() as c_uint,
1137 src,
1138 src_align.bytes() as c_uint,
1139 size,
1140 is_volatile,
1141 );
1142 }
1143 }
1144
1145 fn memset(
1146 &mut self,
1147 ptr: &'ll Value,
1148 fill_byte: &'ll Value,
1149 size: &'ll Value,
1150 align: Align,
1151 flags: MemFlags,
1152 ) {
1153 if !!flags.contains(MemFlags::NONTEMPORAL) {
{
::core::panicking::panic_fmt(format_args!("non-temporal memset not supported"));
}
};assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memset not supported");
1154 let is_volatile = flags.contains(MemFlags::VOLATILE);
1155 unsafe {
1156 llvm::LLVMRustBuildMemSet(
1157 self.llbuilder,
1158 ptr,
1159 align.bytes() as c_uint,
1160 fill_byte,
1161 size,
1162 is_volatile,
1163 );
1164 }
1165 }
1166
1167 fn select(
1168 &mut self,
1169 cond: &'ll Value,
1170 then_val: &'ll Value,
1171 else_val: &'ll Value,
1172 ) -> &'ll Value {
1173 unsafe { llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, UNNAMED) }
1174 }
1175
1176 fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
1177 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
1178 }
1179
1180 fn extract_element(&mut self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
1181 unsafe { llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, UNNAMED) }
1182 }
1183
1184 fn vector_splat(&mut self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
1185 unsafe {
1186 let elt_ty = self.cx.val_ty(elt);
1187 let undef = llvm::LLVMGetUndef(self.type_vector(elt_ty, num_elts as u64));
1188 let vec = self.insert_element(undef, elt, self.cx.const_i32(0));
1189 let vec_i32_ty = self.type_vector(self.type_i32(), num_elts as u64);
1190 self.shuffle_vector(vec, undef, self.const_null(vec_i32_ty))
1191 }
1192 }
1193
1194 fn extract_value(&mut self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
1195 match (&(idx as c_uint as u64), &idx) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(idx as c_uint as u64, idx);
1196 unsafe { llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, UNNAMED) }
1197 }
1198
1199 fn insert_value(&mut self, agg_val: &'ll Value, elt: &'ll Value, idx: u64) -> &'ll Value {
1200 match (&(idx as c_uint as u64), &idx) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(idx as c_uint as u64, idx);
1201 unsafe { llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint, UNNAMED) }
1202 }
1203
1204 fn set_personality_fn(&mut self, personality: &'ll Value) {
1205 unsafe {
1206 llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1207 }
1208 }
1209
1210 fn cleanup_landing_pad(&mut self, pers_fn: &'ll Value) -> (&'ll Value, &'ll Value) {
1211 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
1212 let landing_pad = self.landing_pad(ty, pers_fn, 0);
1213 unsafe {
1214 llvm::LLVMSetCleanup(landing_pad, llvm::TRUE);
1215 }
1216 (self.extract_value(landing_pad, 0), self.extract_value(landing_pad, 1))
1217 }
1218
1219 fn filter_landing_pad(&mut self, pers_fn: &'ll Value) {
1220 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
1221 let landing_pad = self.landing_pad(ty, pers_fn, 1);
1222 self.add_clause(landing_pad, self.const_array(self.type_ptr(), &[]));
1223 }
1224
1225 fn resume(&mut self, exn0: &'ll Value, exn1: &'ll Value) {
1226 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
1227 let mut exn = self.const_poison(ty);
1228 exn = self.insert_value(exn, exn0, 0);
1229 exn = self.insert_value(exn, exn1, 1);
1230 unsafe {
1231 llvm::LLVMBuildResume(self.llbuilder, exn);
1232 }
1233 }
1234
1235 fn cleanup_pad(&mut self, parent: Option<&'ll Value>, args: &[&'ll Value]) -> Funclet<'ll> {
1236 let ret = unsafe {
1237 llvm::LLVMBuildCleanupPad(
1238 self.llbuilder,
1239 parent,
1240 args.as_ptr(),
1241 args.len() as c_uint,
1242 c"cleanuppad".as_ptr(),
1243 )
1244 };
1245 Funclet::new(ret.expect("LLVM does not have support for cleanuppad"))
1246 }
1247
1248 fn cleanup_ret(&mut self, funclet: &Funclet<'ll>, unwind: Option<&'ll BasicBlock>) {
1249 unsafe {
1250 llvm::LLVMBuildCleanupRet(self.llbuilder, funclet.cleanuppad(), unwind)
1251 .expect("LLVM does not have support for cleanupret");
1252 }
1253 }
1254
1255 fn catch_pad(&mut self, parent: &'ll Value, args: &[&'ll Value]) -> Funclet<'ll> {
1256 let ret = unsafe {
1257 llvm::LLVMBuildCatchPad(
1258 self.llbuilder,
1259 parent,
1260 args.as_ptr(),
1261 args.len() as c_uint,
1262 c"catchpad".as_ptr(),
1263 )
1264 };
1265 Funclet::new(ret.expect("LLVM does not have support for catchpad"))
1266 }
1267
1268 fn catch_switch(
1269 &mut self,
1270 parent: Option<&'ll Value>,
1271 unwind: Option<&'ll BasicBlock>,
1272 handlers: &[&'ll BasicBlock],
1273 ) -> &'ll Value {
1274 let ret = unsafe {
1275 llvm::LLVMBuildCatchSwitch(
1276 self.llbuilder,
1277 parent,
1278 unwind,
1279 handlers.len() as c_uint,
1280 c"catchswitch".as_ptr(),
1281 )
1282 };
1283 let ret = ret.expect("LLVM does not have support for catchswitch");
1284 for handler in handlers {
1285 unsafe {
1286 llvm::LLVMAddHandler(ret, handler);
1287 }
1288 }
1289 ret
1290 }
1291
1292 fn atomic_cmpxchg(
1294 &mut self,
1295 dst: &'ll Value,
1296 cmp: &'ll Value,
1297 src: &'ll Value,
1298 order: rustc_middle::ty::AtomicOrdering,
1299 failure_order: rustc_middle::ty::AtomicOrdering,
1300 weak: bool,
1301 ) -> (&'ll Value, &'ll Value) {
1302 unsafe {
1303 let value = llvm::LLVMBuildAtomicCmpXchg(
1304 self.llbuilder,
1305 dst,
1306 cmp,
1307 src,
1308 AtomicOrdering::from_generic(order),
1309 AtomicOrdering::from_generic(failure_order),
1310 llvm::FALSE, );
1312 llvm::LLVMSetWeak(value, weak.to_llvm_bool());
1313 let val = self.extract_value(value, 0);
1314 let success = self.extract_value(value, 1);
1315 (val, success)
1316 }
1317 }
1318
1319 fn atomic_rmw(
1320 &mut self,
1321 op: rustc_codegen_ssa::common::AtomicRmwBinOp,
1322 dst: &'ll Value,
1323 src: &'ll Value,
1324 order: rustc_middle::ty::AtomicOrdering,
1325 ret_ptr: bool,
1326 ) -> &'ll Value {
1327 let mut res = unsafe {
1331 llvm::LLVMBuildAtomicRMW(
1332 self.llbuilder,
1333 AtomicRmwBinOp::from_generic(op),
1334 dst,
1335 src,
1336 AtomicOrdering::from_generic(order),
1337 llvm::FALSE, )
1339 };
1340 if ret_ptr && self.val_ty(res) != self.type_ptr() {
1341 res = self.inttoptr(res, self.type_ptr());
1342 }
1343 res
1344 }
1345
1346 fn atomic_fence(
1347 &mut self,
1348 order: rustc_middle::ty::AtomicOrdering,
1349 scope: SynchronizationScope,
1350 ) {
1351 let single_threaded = match scope {
1352 SynchronizationScope::SingleThread => true,
1353 SynchronizationScope::CrossThread => false,
1354 };
1355 unsafe {
1356 llvm::LLVMBuildFence(
1357 self.llbuilder,
1358 AtomicOrdering::from_generic(order),
1359 single_threaded.to_llvm_bool(),
1360 UNNAMED,
1361 );
1362 }
1363 }
1364
1365 fn set_invariant_load(&mut self, load: &'ll Value) {
1366 self.set_metadata_node(load, llvm::MD_invariant_load, &[]);
1367 }
1368
1369 fn lifetime_start(&mut self, ptr: &'ll Value, size: Size) {
1370 self.call_lifetime_intrinsic("llvm.lifetime.start", ptr, size);
1371 }
1372
1373 fn lifetime_end(&mut self, ptr: &'ll Value, size: Size) {
1374 self.call_lifetime_intrinsic("llvm.lifetime.end", ptr, size);
1375 }
1376
1377 fn call(
1378 &mut self,
1379 llty: &'ll Type,
1380 caller_attrs: Option<&CodegenFnAttrs>,
1381 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1382 llfn: &'ll Value,
1383 args: &[&'ll Value],
1384 funclet: Option<&Funclet<'ll>>,
1385 callee_instance: Option<Instance<'tcx>>,
1386 ) -> &'ll Value {
1387 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:1387",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(1387u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("call {0:?} with args ({1:?})",
llfn, args) as &dyn Value))])
});
} else { ; }
};debug!("call {:?} with args ({:?})", llfn, args);
1388
1389 let args = self.check_call("call", llty, llfn, args);
1390 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
1391 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
1392 if let Some(funclet_bundle) = funclet_bundle {
1393 bundles.push(funclet_bundle);
1394 }
1395
1396 self.cfi_type_test(caller_attrs, fn_abi, callee_instance, llfn);
1398
1399 let kcfi_bundle = self.kcfi_operand_bundle(caller_attrs, fn_abi, callee_instance, llfn);
1401 if let Some(kcfi_bundle) = kcfi_bundle.as_ref().map(|b| b.as_ref()) {
1402 bundles.push(kcfi_bundle);
1403 }
1404
1405 let call = unsafe {
1406 llvm::LLVMBuildCallWithOperandBundles(
1407 self.llbuilder,
1408 llty,
1409 llfn,
1410 args.as_ptr() as *const &llvm::Value,
1411 args.len() as c_uint,
1412 bundles.as_ptr(),
1413 bundles.len() as c_uint,
1414 c"".as_ptr(),
1415 )
1416 };
1417
1418 if let Some(callee_instance) = callee_instance {
1419 let callee_attrs = self.cx.tcx.codegen_fn_attrs(callee_instance.def_id());
1421 if let Some(caller_attrs) = caller_attrs
1422 && let Some(inlining_rule) = attributes::inline_attr(&self.cx, self.cx.tcx, callee_instance)
1426 && self.cx.tcx.is_target_feature_call_safe(
1427 &callee_attrs.target_features,
1428 &caller_attrs.target_features.iter().cloned().chain(
1429 self.cx.tcx.sess.target_features.iter().map(|feat| TargetFeature {
1430 name: *feat,
1431 kind: TargetFeatureKind::Implied,
1432 })
1433 ).collect::<Vec<_>>(),
1434 )
1435 {
1436 attributes::apply_to_callsite(
1437 call,
1438 llvm::AttributePlace::Function,
1439 &[inlining_rule],
1440 );
1441 }
1442 }
1443
1444 if let Some(fn_abi) = fn_abi {
1445 fn_abi.apply_attrs_callsite(self, call);
1446 }
1447 call
1448 }
1449
1450 fn tail_call(
1451 &mut self,
1452 llty: Self::Type,
1453 caller_attrs: Option<&CodegenFnAttrs>,
1454 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
1455 llfn: Self::Value,
1456 args: &[Self::Value],
1457 funclet: Option<&Self::Funclet>,
1458 callee_instance: Option<Instance<'tcx>>,
1459 ) {
1460 let call =
1461 self.call(llty, caller_attrs, Some(fn_abi), llfn, args, funclet, callee_instance);
1462 llvm::LLVMSetTailCallKind(call, llvm::TailCallKind::MustTail);
1463
1464 match &fn_abi.ret.mode {
1465 PassMode::Ignore | PassMode::Indirect { .. } => self.ret_void(),
1466 PassMode::Direct(_) | PassMode::Pair { .. } | PassMode::Cast { .. } => self.ret(call),
1467 }
1468 }
1469
1470 fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1471 unsafe { llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, UNNAMED) }
1472 }
1473
1474 fn apply_attrs_to_cleanup_callsite(&mut self, llret: &'ll Value) {
1475 let cold_inline = llvm::AttributeKind::Cold.create_attr(self.llcx);
1477 attributes::apply_to_callsite(llret, llvm::AttributePlace::Function, &[cold_inline]);
1478 }
1479}
1480
1481impl<'ll> StaticBuilderMethods for Builder<'_, 'll, '_> {
1482 fn get_static(&mut self, def_id: DefId) -> &'ll Value {
1483 let global = self.cx().get_static(def_id);
1485 if self.cx().tcx.is_thread_local_static(def_id) {
1486 let pointer =
1487 self.call_intrinsic("llvm.threadlocal.address", &[self.val_ty(global)], &[global]);
1488 self.pointercast(pointer, self.type_ptr())
1490 } else {
1491 self.cx().const_pointercast(global, self.type_ptr())
1493 }
1494 }
1495}
1496
1497impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1498 pub(crate) fn llfn(&self) -> &'ll Value {
1499 unsafe { llvm::LLVMGetBasicBlockParent(self.llbb()) }
1500 }
1501}
1502
1503impl<'a, 'll, CX: Borrow<SCx<'ll>>> GenericBuilder<'a, 'll, CX> {
1504 fn position_at_start(&mut self, llbb: &'ll BasicBlock) {
1505 unsafe {
1506 llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
1507 }
1508 }
1509}
1510impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1511 fn align_metadata(&mut self, load: &'ll Value, align: Align) {
1512 let md = [llvm::LLVMValueAsMetadata(self.cx.const_u64(align.bytes()))];
1513 self.set_metadata_node(load, llvm::MD_align, &md);
1514 }
1515
1516 fn noundef_metadata(&mut self, load: &'ll Value) {
1517 self.set_metadata_node(load, llvm::MD_noundef, &[]);
1518 }
1519
1520 pub(crate) fn set_unpredictable(&mut self, inst: &'ll Value) {
1521 self.set_metadata_node(inst, llvm::MD_unpredictable, &[]);
1522 }
1523
1524 fn write_operand_repeatedly_optimized(
1525 &mut self,
1526 cg_elem: OperandRef<'tcx, &'ll Value>,
1527 count: u64,
1528 dest: PlaceRef<'tcx, &'ll Value>,
1529 ) {
1530 let zero = self.const_usize(0);
1531 let count = self.const_usize(count);
1532
1533 let header_bb = self.append_sibling_block("repeat_loop_header");
1534 let body_bb = self.append_sibling_block("repeat_loop_body");
1535 let next_bb = self.append_sibling_block("repeat_loop_next");
1536
1537 self.br(header_bb);
1538
1539 let mut header_bx = Self::build(self.cx, header_bb);
1540 let i = header_bx.phi(self.val_ty(zero), &[zero], &[self.llbb()]);
1541
1542 let keep_going = header_bx.icmp(IntPredicate::IntULT, i, count);
1543 header_bx.cond_br(keep_going, body_bb, next_bb);
1544
1545 let mut body_bx = Self::build(self.cx, body_bb);
1546 let dest_elem = dest.project_index(&mut body_bx, i);
1547 cg_elem.val.store(&mut body_bx, dest_elem);
1548
1549 let next = body_bx.unchecked_uadd(i, self.const_usize(1));
1550 body_bx.br(header_bb);
1551 header_bx.add_incoming_to_phi(i, next, body_bb);
1552
1553 *self = Self::build(self.cx, next_bb);
1554 }
1555
1556 fn write_operand_repeatedly_unoptimized(
1557 &mut self,
1558 cg_elem: OperandRef<'tcx, &'ll Value>,
1559 count: u64,
1560 dest: PlaceRef<'tcx, &'ll Value>,
1561 ) {
1562 let zero = self.const_usize(0);
1563 let count = self.const_usize(count);
1564 let start = dest.project_index(self, zero).val.llval;
1565 let end = dest.project_index(self, count).val.llval;
1566
1567 let header_bb = self.append_sibling_block("repeat_loop_header");
1568 let body_bb = self.append_sibling_block("repeat_loop_body");
1569 let next_bb = self.append_sibling_block("repeat_loop_next");
1570
1571 self.br(header_bb);
1572
1573 let mut header_bx = Self::build(self.cx, header_bb);
1574 let current = header_bx.phi(self.val_ty(start), &[start], &[self.llbb()]);
1575
1576 let keep_going = header_bx.icmp(IntPredicate::IntNE, current, end);
1577 header_bx.cond_br(keep_going, body_bb, next_bb);
1578
1579 let mut body_bx = Self::build(self.cx, body_bb);
1580 let align = dest.val.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
1581 cg_elem
1582 .val
1583 .store(&mut body_bx, PlaceRef::new_sized_aligned(current, cg_elem.layout, align));
1584
1585 let next = body_bx.inbounds_gep(
1586 self.backend_type(cg_elem.layout),
1587 current,
1588 &[self.const_usize(1)],
1589 );
1590 body_bx.br(header_bb);
1591 header_bx.add_incoming_to_phi(current, next, body_bb);
1592
1593 *self = Self::build(self.cx, next_bb);
1594 }
1595
1596 pub(crate) fn minimum_number_nsz(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1597 let call = self.call_intrinsic("llvm.minimumnum", &[self.val_ty(lhs)], &[lhs, rhs]);
1598 unsafe { llvm::LLVMRustSetNoSignedZeros(call) };
1599 call
1600 }
1601
1602 pub(crate) fn maximum_number_nsz(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1603 let call = self.call_intrinsic("llvm.maximumnum", &[self.val_ty(lhs)], &[lhs, rhs]);
1604 unsafe { llvm::LLVMRustSetNoSignedZeros(call) };
1605 call
1606 }
1607
1608 pub(crate) fn insert_element(
1609 &mut self,
1610 vec: &'ll Value,
1611 elt: &'ll Value,
1612 idx: &'ll Value,
1613 ) -> &'ll Value {
1614 unsafe { llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, UNNAMED) }
1615 }
1616
1617 pub(crate) fn shuffle_vector(
1618 &mut self,
1619 v1: &'ll Value,
1620 v2: &'ll Value,
1621 mask: &'ll Value,
1622 ) -> &'ll Value {
1623 unsafe { llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, UNNAMED) }
1624 }
1625
1626 pub(crate) fn vector_reduce_fadd(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1627 self.call_intrinsic("llvm.vector.reduce.fadd", &[self.val_ty(src)], &[acc, src])
1628 }
1629 pub(crate) fn vector_reduce_fmul(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1630 self.call_intrinsic("llvm.vector.reduce.fmul", &[self.val_ty(src)], &[acc, src])
1631 }
1632 pub(crate) fn vector_reduce_fadd_reassoc(
1633 &mut self,
1634 acc: &'ll Value,
1635 src: &'ll Value,
1636 ) -> &'ll Value {
1637 unsafe {
1638 let instr =
1639 self.call_intrinsic("llvm.vector.reduce.fadd", &[self.val_ty(src)], &[acc, src]);
1640 llvm::LLVMRustSetAllowReassoc(instr);
1641 instr
1642 }
1643 }
1644 pub(crate) fn vector_reduce_fmul_reassoc(
1645 &mut self,
1646 acc: &'ll Value,
1647 src: &'ll Value,
1648 ) -> &'ll Value {
1649 unsafe {
1650 let instr =
1651 self.call_intrinsic("llvm.vector.reduce.fmul", &[self.val_ty(src)], &[acc, src]);
1652 llvm::LLVMRustSetAllowReassoc(instr);
1653 instr
1654 }
1655 }
1656 pub(crate) fn vector_reduce_add(&mut self, src: &'ll Value) -> &'ll Value {
1657 self.call_intrinsic("llvm.vector.reduce.add", &[self.val_ty(src)], &[src])
1658 }
1659 pub(crate) fn vector_reduce_mul(&mut self, src: &'ll Value) -> &'ll Value {
1660 self.call_intrinsic("llvm.vector.reduce.mul", &[self.val_ty(src)], &[src])
1661 }
1662 pub(crate) fn vector_reduce_and(&mut self, src: &'ll Value) -> &'ll Value {
1663 self.call_intrinsic("llvm.vector.reduce.and", &[self.val_ty(src)], &[src])
1664 }
1665 pub(crate) fn vector_reduce_or(&mut self, src: &'ll Value) -> &'ll Value {
1666 self.call_intrinsic("llvm.vector.reduce.or", &[self.val_ty(src)], &[src])
1667 }
1668 pub(crate) fn vector_reduce_xor(&mut self, src: &'ll Value) -> &'ll Value {
1669 self.call_intrinsic("llvm.vector.reduce.xor", &[self.val_ty(src)], &[src])
1670 }
1671 pub(crate) fn vector_reduce_fmin(&mut self, src: &'ll Value) -> &'ll Value {
1672 self.call_intrinsic("llvm.vector.reduce.fmin", &[self.val_ty(src)], &[src])
1673 }
1674 pub(crate) fn vector_reduce_fmax(&mut self, src: &'ll Value) -> &'ll Value {
1675 self.call_intrinsic("llvm.vector.reduce.fmax", &[self.val_ty(src)], &[src])
1676 }
1677 pub(crate) fn vector_reduce_min(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
1678 self.call_intrinsic(
1679 if is_signed { "llvm.vector.reduce.smin" } else { "llvm.vector.reduce.umin" },
1680 &[self.val_ty(src)],
1681 &[src],
1682 )
1683 }
1684 pub(crate) fn vector_reduce_max(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
1685 self.call_intrinsic(
1686 if is_signed { "llvm.vector.reduce.smax" } else { "llvm.vector.reduce.umax" },
1687 &[self.val_ty(src)],
1688 &[src],
1689 )
1690 }
1691}
1692impl<'a, 'll, CX: Borrow<SCx<'ll>>> GenericBuilder<'a, 'll, CX> {
1693 pub(crate) fn add_clause(&mut self, landing_pad: &'ll Value, clause: &'ll Value) {
1694 unsafe {
1695 llvm::LLVMAddClause(landing_pad, clause);
1696 }
1697 }
1698
1699 pub(crate) fn catch_ret(
1700 &mut self,
1701 funclet: &Funclet<'ll>,
1702 unwind: &'ll BasicBlock,
1703 ) -> &'ll Value {
1704 let ret = unsafe { llvm::LLVMBuildCatchRet(self.llbuilder, funclet.cleanuppad(), unwind) };
1705 ret.expect("LLVM does not have support for catchret")
1706 }
1707
1708 pub(crate) fn check_call<'b>(
1709 &mut self,
1710 typ: &str,
1711 fn_ty: &'ll Type,
1712 llfn: &'ll Value,
1713 args: &'b [&'ll Value],
1714 ) -> Cow<'b, [&'ll Value]> {
1715 if !(self.cx.type_kind(fn_ty) == TypeKind::Function) {
{
::core::panicking::panic_fmt(format_args!("builder::{0} not passed a function, but {1:?}",
typ, fn_ty));
}
};assert!(
1716 self.cx.type_kind(fn_ty) == TypeKind::Function,
1717 "builder::{typ} not passed a function, but {fn_ty:?}"
1718 );
1719
1720 let param_tys = self.cx.func_params_types(fn_ty);
1721
1722 let all_args_match = iter::zip(¶m_tys, args.iter().map(|&v| self.cx.val_ty(v)))
1723 .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1724
1725 if all_args_match {
1726 return Cow::Borrowed(args);
1727 }
1728
1729 let casted_args: Vec<_> = iter::zip(param_tys, args)
1730 .enumerate()
1731 .map(|(i, (expected_ty, &actual_val))| {
1732 let actual_ty = self.cx.val_ty(actual_val);
1733 if expected_ty != actual_ty {
1734 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:1734",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(1734u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("type mismatch in function call of {0:?}. Expected {1:?} for param {2}, got {3:?}; injecting bitcast",
llfn, expected_ty, i, actual_ty) as &dyn Value))])
});
} else { ; }
};debug!(
1735 "type mismatch in function call of {:?}. \
1736 Expected {:?} for param {}, got {:?}; injecting bitcast",
1737 llfn, expected_ty, i, actual_ty
1738 );
1739 self.bitcast(actual_val, expected_ty)
1740 } else {
1741 actual_val
1742 }
1743 })
1744 .collect();
1745
1746 Cow::Owned(casted_args)
1747 }
1748
1749 pub(crate) fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
1750 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
1751 }
1752}
1753
1754impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1755 pub(crate) fn call_intrinsic(
1756 &mut self,
1757 base_name: impl Into<Cow<'static, str>>,
1758 type_params: &[&'ll Type],
1759 args: &[&'ll Value],
1760 ) -> &'ll Value {
1761 let (ty, f) = self.cx.get_intrinsic(base_name.into(), type_params);
1762 self.call(ty, None, None, f, args, None, None)
1763 }
1764
1765 fn call_lifetime_intrinsic(&mut self, intrinsic: &'static str, ptr: &'ll Value, size: Size) {
1766 let size = size.bytes();
1767 if size == 0 {
1768 return;
1769 }
1770
1771 if !self.cx().sess().emit_lifetime_markers() {
1772 return;
1773 }
1774
1775 if crate::llvm_util::get_version() >= (22, 0, 0) {
1776 let ptr = unsafe { llvm::LLVMRustStripPointerCasts(ptr) };
1779 self.call_intrinsic(intrinsic, &[self.val_ty(ptr)], &[ptr]);
1780 } else {
1781 self.call_intrinsic(intrinsic, &[self.val_ty(ptr)], &[self.cx.const_u64(size), ptr]);
1782 }
1783 }
1784}
1785impl<'a, 'll, CX: Borrow<SCx<'ll>>> GenericBuilder<'a, 'll, CX> {
1786 pub(crate) fn phi(
1787 &mut self,
1788 ty: &'ll Type,
1789 vals: &[&'ll Value],
1790 bbs: &[&'ll BasicBlock],
1791 ) -> &'ll Value {
1792 match (&vals.len(), &bbs.len()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(vals.len(), bbs.len());
1793 let phi = unsafe { llvm::LLVMBuildPhi(self.llbuilder, ty, UNNAMED) };
1794 unsafe {
1795 llvm::LLVMAddIncoming(phi, vals.as_ptr(), bbs.as_ptr(), vals.len() as c_uint);
1796 phi
1797 }
1798 }
1799
1800 fn add_incoming_to_phi(&mut self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
1801 unsafe {
1802 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1803 }
1804 }
1805}
1806impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1807 pub(crate) fn landing_pad(
1808 &mut self,
1809 ty: &'ll Type,
1810 pers_fn: &'ll Value,
1811 num_clauses: usize,
1812 ) -> &'ll Value {
1813 self.set_personality_fn(pers_fn);
1817 unsafe {
1818 llvm::LLVMBuildLandingPad(self.llbuilder, ty, None, num_clauses as c_uint, UNNAMED)
1819 }
1820 }
1821
1822 pub(crate) fn callbr(
1823 &mut self,
1824 llty: &'ll Type,
1825 fn_attrs: Option<&CodegenFnAttrs>,
1826 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1827 llfn: &'ll Value,
1828 args: &[&'ll Value],
1829 default_dest: &'ll BasicBlock,
1830 indirect_dest: &[&'ll BasicBlock],
1831 funclet: Option<&Funclet<'ll>>,
1832 instance: Option<Instance<'tcx>>,
1833 ) -> &'ll Value {
1834 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:1834",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(1834u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("invoke {0:?} with args ({1:?})",
llfn, args) as &dyn Value))])
});
} else { ; }
};debug!("invoke {:?} with args ({:?})", llfn, args);
1835
1836 let args = self.check_call("callbr", llty, llfn, args);
1837 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
1838 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
1839 if let Some(funclet_bundle) = funclet_bundle {
1840 bundles.push(funclet_bundle);
1841 }
1842
1843 self.cfi_type_test(fn_attrs, fn_abi, instance, llfn);
1845
1846 let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn);
1848 if let Some(kcfi_bundle) = kcfi_bundle.as_ref().map(|b| b.as_ref()) {
1849 bundles.push(kcfi_bundle);
1850 }
1851
1852 let callbr = unsafe {
1853 llvm::LLVMBuildCallBr(
1854 self.llbuilder,
1855 llty,
1856 llfn,
1857 default_dest,
1858 indirect_dest.as_ptr(),
1859 indirect_dest.len() as c_uint,
1860 args.as_ptr(),
1861 args.len() as c_uint,
1862 bundles.as_ptr(),
1863 bundles.len() as c_uint,
1864 UNNAMED,
1865 )
1866 };
1867 if let Some(fn_abi) = fn_abi {
1868 fn_abi.apply_attrs_callsite(self, callbr);
1869 }
1870 callbr
1871 }
1872
1873 fn cfi_type_test(
1875 &mut self,
1876 fn_attrs: Option<&CodegenFnAttrs>,
1877 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1878 instance: Option<Instance<'tcx>>,
1879 llfn: &'ll Value,
1880 ) {
1881 let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) };
1882 if self.tcx.sess.is_sanitizer_cfi_enabled()
1883 && let Some(fn_abi) = fn_abi
1884 && is_indirect_call
1885 {
1886 if let Some(fn_attrs) = fn_attrs
1887 && fn_attrs.sanitizers.disabled.contains(SanitizerSet::CFI)
1888 {
1889 return;
1890 }
1891
1892 let mut options = cfi::TypeIdOptions::empty();
1893 if self.tcx.sess.is_sanitizer_cfi_generalize_pointers_enabled() {
1894 options.insert(cfi::TypeIdOptions::GENERALIZE_POINTERS);
1895 }
1896 if self.tcx.sess.is_sanitizer_cfi_normalize_integers_enabled() {
1897 options.insert(cfi::TypeIdOptions::NORMALIZE_INTEGERS);
1898 }
1899
1900 let typeid = if let Some(instance) = instance {
1901 cfi::typeid_for_instance(self.tcx, instance, options)
1902 } else {
1903 cfi::typeid_for_fnabi(self.tcx, fn_abi, options)
1904 };
1905 let typeid_metadata = self.cx.create_metadata(typeid.as_bytes());
1906 let dbg_loc = self.get_dbg_loc();
1907
1908 let typeid = self.get_metadata_value(typeid_metadata);
1912 let cond = self.call_intrinsic("llvm.type.test", &[], &[llfn, typeid]);
1913 let bb_pass = self.append_sibling_block("type_test.pass");
1914 let bb_fail = self.append_sibling_block("type_test.fail");
1915 self.cond_br(cond, bb_pass, bb_fail);
1916
1917 self.switch_to_block(bb_fail);
1918 if let Some(dbg_loc) = dbg_loc {
1919 self.set_dbg_loc(dbg_loc);
1920 }
1921 self.abort();
1922 self.unreachable();
1923
1924 self.switch_to_block(bb_pass);
1925 if let Some(dbg_loc) = dbg_loc {
1926 self.set_dbg_loc(dbg_loc);
1927 }
1928 }
1929 }
1930
1931 fn kcfi_operand_bundle(
1933 &mut self,
1934 fn_attrs: Option<&CodegenFnAttrs>,
1935 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1936 instance: Option<Instance<'tcx>>,
1937 llfn: &'ll Value,
1938 ) -> Option<llvm::OperandBundleBox<'ll>> {
1939 let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) };
1940 let kcfi_bundle = if self.tcx.sess.is_sanitizer_kcfi_enabled()
1941 && let Some(fn_abi) = fn_abi
1942 && is_indirect_call
1943 {
1944 if let Some(fn_attrs) = fn_attrs
1945 && fn_attrs.sanitizers.disabled.contains(SanitizerSet::KCFI)
1946 {
1947 return None;
1948 }
1949
1950 let mut options = kcfi::TypeIdOptions::empty();
1951 if self.tcx.sess.is_sanitizer_cfi_generalize_pointers_enabled() {
1952 options.insert(kcfi::TypeIdOptions::GENERALIZE_POINTERS);
1953 }
1954 if self.tcx.sess.is_sanitizer_cfi_normalize_integers_enabled() {
1955 options.insert(kcfi::TypeIdOptions::NORMALIZE_INTEGERS);
1956 }
1957
1958 let kcfi_typeid = if let Some(instance) = instance {
1959 kcfi::typeid_for_instance(self.tcx, instance, options)
1960 } else {
1961 kcfi::typeid_for_fnabi(self.tcx, fn_abi, options)
1962 };
1963
1964 Some(llvm::OperandBundleBox::new("kcfi", &[self.const_u32(kcfi_typeid)]))
1965 } else {
1966 None
1967 };
1968 kcfi_bundle
1969 }
1970
1971 #[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("instrprof_increment",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(1972u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["fn_name", "hash",
"num_counters", "index"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&fn_name)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&hash)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&num_counters)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&index)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: () = loop {};
return __tracing_attr_fake_return;
}
{
self.call_intrinsic("llvm.instrprof.increment", &[],
&[fn_name, hash, num_counters, index]);
}
}
}#[instrument(level = "debug", skip(self))]
1973 pub(crate) fn instrprof_increment(
1974 &mut self,
1975 fn_name: &'ll Value,
1976 hash: &'ll Value,
1977 num_counters: &'ll Value,
1978 index: &'ll Value,
1979 ) {
1980 self.call_intrinsic("llvm.instrprof.increment", &[], &[fn_name, hash, num_counters, index]);
1981 }
1982}