1use std::borrow::{Borrow, Cow};
2use std::iter;
3use std::ops::Deref;
4
5use rustc_ast::expand::typetree::FncTree;
6pub(crate) mod autodiff;
7pub(crate) mod gpu_offload;
8
9use libc::{c_char, c_uint};
10use rustc_abi as abi;
11use rustc_abi::{Align, Size, WrappingRange};
12use rustc_codegen_ssa::MemFlags;
13use rustc_codegen_ssa::common::{IntPredicate, RealPredicate, SynchronizationScope, TypeKind};
14use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
15use rustc_codegen_ssa::mir::place::PlaceRef;
16use rustc_codegen_ssa::traits::*;
17use rustc_data_structures::small_c_str::SmallCStr;
18use rustc_hir::def_id::DefId;
19use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrs, TargetFeature, TargetFeatureKind};
20use rustc_middle::ty::layout::{
21 FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasTypingEnv, LayoutError, LayoutOfHelpers,
22 TyAndLayout,
23};
24use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
25use rustc_sanitizers::{cfi, kcfi};
26use rustc_session::config::OptLevel;
27use rustc_span::Span;
28use rustc_target::callconv::{FnAbi, PassMode};
29use rustc_target::spec::{Arch, HasTargetSpec, SanitizerSet, Target};
30use smallvec::SmallVec;
31use tracing::{debug, instrument};
32
33use crate::abi::FnAbiLlvmExt;
34use crate::attributes;
35use crate::common::Funclet;
36use crate::context::{CodegenCx, FullCx, GenericCx, SCx};
37use crate::llvm::{
38 self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, FromGeneric, GEPNoWrapFlags, Metadata, TRUE,
39 ToLlvmBool, Type, Value,
40};
41use crate::type_of::LayoutLlvmExt;
42
43#[must_use]
44pub(crate) struct GenericBuilder<'a, 'll, CX: Borrow<SCx<'ll>>> {
45 pub llbuilder: &'ll mut llvm::Builder<'ll>,
46 pub cx: &'a GenericCx<'ll, CX>,
47}
48
49pub(crate) type SBuilder<'a, 'll> = GenericBuilder<'a, 'll, SCx<'ll>>;
50pub(crate) type Builder<'a, 'll, 'tcx> = GenericBuilder<'a, 'll, FullCx<'ll, 'tcx>>;
51
52impl<'a, 'll, CX: Borrow<SCx<'ll>>> Drop for GenericBuilder<'a, 'll, CX> {
53 fn drop(&mut self) {
54 unsafe {
55 llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
56 }
57 }
58}
59
60impl<'a, 'll> SBuilder<'a, 'll> {
61 pub(crate) fn call(
62 &mut self,
63 llty: &'ll Type,
64 llfn: &'ll Value,
65 args: &[&'ll Value],
66 funclet: Option<&Funclet<'ll>>,
67 ) -> &'ll Value {
68 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:68",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(68u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("call {0:?} with args ({1:?})",
llfn, args) as &dyn Value))])
});
} else { ; }
};debug!("call {:?} with args ({:?})", llfn, args);
69
70 let args = self.check_call("call", llty, llfn, args);
71 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
72 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
73 if let Some(funclet_bundle) = funclet_bundle {
74 bundles.push(funclet_bundle);
75 }
76
77 let call = unsafe {
78 llvm::LLVMBuildCallWithOperandBundles(
79 self.llbuilder,
80 llty,
81 llfn,
82 args.as_ptr() as *const &llvm::Value,
83 args.len() as c_uint,
84 bundles.as_ptr(),
85 bundles.len() as c_uint,
86 c"".as_ptr(),
87 )
88 };
89 call
90 }
91}
92
93impl<'a, 'll, CX: Borrow<SCx<'ll>>> GenericBuilder<'a, 'll, CX> {
94 fn with_cx(scx: &'a GenericCx<'ll, CX>) -> Self {
95 let llbuilder = unsafe { llvm::LLVMCreateBuilderInContext(scx.deref().borrow().llcx) };
97 GenericBuilder { llbuilder, cx: scx }
98 }
99
100 pub(crate) fn append_block(
101 cx: &'a GenericCx<'ll, CX>,
102 llfn: &'ll Value,
103 name: &str,
104 ) -> &'ll BasicBlock {
105 unsafe {
106 let name = SmallCStr::new(name);
107 llvm::LLVMAppendBasicBlockInContext(cx.llcx(), llfn, name.as_ptr())
108 }
109 }
110
111 pub(crate) fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
112 unsafe { llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
113 }
114
115 pub(crate) fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
116 unsafe { llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED) }
117 }
118
119 pub(crate) fn ret_void(&mut self) {
120 llvm::LLVMBuildRetVoid(self.llbuilder);
121 }
122
123 pub(crate) fn ret(&mut self, v: &'ll Value) {
124 unsafe {
125 llvm::LLVMBuildRet(self.llbuilder, v);
126 }
127 }
128
129 pub(crate) fn build(cx: &'a GenericCx<'ll, CX>, llbb: &'ll BasicBlock) -> Self {
130 let bx = Self::with_cx(cx);
131 unsafe {
132 llvm::LLVMPositionBuilderAtEnd(bx.llbuilder, llbb);
133 }
134 bx
135 }
136
137 pub(crate) fn direct_alloca(&mut self, ty: &'ll Type, align: Align, name: &str) -> &'ll Value {
142 let val = unsafe {
143 let alloca = llvm::LLVMBuildAlloca(self.llbuilder, ty, UNNAMED);
144 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
145 llvm::LLVMBuildPointerCast(self.llbuilder, alloca, self.cx.type_ptr(), UNNAMED)
147 };
148 if name != "" {
149 let name = std::ffi::CString::new(name).unwrap();
150 llvm::set_value_name(val, &name.as_bytes());
151 }
152 val
153 }
154
155 pub(crate) fn inbounds_gep(
156 &mut self,
157 ty: &'ll Type,
158 ptr: &'ll Value,
159 indices: &[&'ll Value],
160 ) -> &'ll Value {
161 unsafe {
162 llvm::LLVMBuildGEPWithNoWrapFlags(
163 self.llbuilder,
164 ty,
165 ptr,
166 indices.as_ptr(),
167 indices.len() as c_uint,
168 UNNAMED,
169 GEPNoWrapFlags::InBounds,
170 )
171 }
172 }
173
174 pub(crate) fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
175 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:175",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(175u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Store {0:?} -> {1:?}",
val, ptr) as &dyn Value))])
});
} else { ; }
};debug!("Store {:?} -> {:?}", val, ptr);
176 match (&self.cx.type_kind(self.cx.val_ty(ptr)), &TypeKind::Pointer) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(self.cx.type_kind(self.cx.val_ty(ptr)), TypeKind::Pointer);
177 unsafe {
178 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
179 llvm::LLVMSetAlignment(store, align.bytes() as c_uint);
180 store
181 }
182 }
183
184 pub(crate) fn load(&mut self, ty: &'ll Type, ptr: &'ll Value, align: Align) -> &'ll Value {
185 unsafe {
186 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
187 llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
188 load
189 }
190 }
191}
192
193pub(crate) const UNNAMED: *const c_char = c"".as_ptr();
197
198impl<'ll, CX: Borrow<SCx<'ll>>> BackendTypes for GenericBuilder<'_, 'll, CX> {
199 type Value = <GenericCx<'ll, CX> as BackendTypes>::Value;
200 type Metadata = <GenericCx<'ll, CX> as BackendTypes>::Metadata;
201 type Function = <GenericCx<'ll, CX> as BackendTypes>::Function;
202 type BasicBlock = <GenericCx<'ll, CX> as BackendTypes>::BasicBlock;
203 type Type = <GenericCx<'ll, CX> as BackendTypes>::Type;
204 type Funclet = <GenericCx<'ll, CX> as BackendTypes>::Funclet;
205
206 type DIScope = <GenericCx<'ll, CX> as BackendTypes>::DIScope;
207 type DILocation = <GenericCx<'ll, CX> as BackendTypes>::DILocation;
208 type DIVariable = <GenericCx<'ll, CX> as BackendTypes>::DIVariable;
209}
210
211impl abi::HasDataLayout for Builder<'_, '_, '_> {
212 fn data_layout(&self) -> &abi::TargetDataLayout {
213 self.cx.data_layout()
214 }
215}
216
217impl<'tcx> ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> {
218 #[inline]
219 fn tcx(&self) -> TyCtxt<'tcx> {
220 self.cx.tcx
221 }
222}
223
224impl<'tcx> ty::layout::HasTypingEnv<'tcx> for Builder<'_, '_, 'tcx> {
225 fn typing_env(&self) -> ty::TypingEnv<'tcx> {
226 self.cx.typing_env()
227 }
228}
229
230impl HasTargetSpec for Builder<'_, '_, '_> {
231 #[inline]
232 fn target_spec(&self) -> &Target {
233 self.cx.target_spec()
234 }
235}
236
237impl<'tcx> LayoutOfHelpers<'tcx> for Builder<'_, '_, 'tcx> {
238 #[inline]
239 fn handle_layout_err(&self, err: LayoutError<'tcx>, span: Span, ty: Ty<'tcx>) -> ! {
240 self.cx.handle_layout_err(err, span, ty)
241 }
242}
243
244impl<'tcx> FnAbiOfHelpers<'tcx> for Builder<'_, '_, 'tcx> {
245 #[inline]
246 fn handle_fn_abi_err(
247 &self,
248 err: FnAbiError<'tcx>,
249 span: Span,
250 fn_abi_request: FnAbiRequest<'tcx>,
251 ) -> ! {
252 self.cx.handle_fn_abi_err(err, span, fn_abi_request)
253 }
254}
255
256impl<'ll, 'tcx> Deref for Builder<'_, 'll, 'tcx> {
257 type Target = CodegenCx<'ll, 'tcx>;
258
259 #[inline]
260 fn deref(&self) -> &Self::Target {
261 self.cx
262 }
263}
264
265macro_rules! math_builder_methods {
266 ($($name:ident($($arg:ident),*) => $llvm_capi:ident),+ $(,)?) => {
267 $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value {
268 unsafe {
269 llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED)
270 }
271 })+
272 }
273}
274
275macro_rules! set_math_builder_methods {
276 ($($name:ident($($arg:ident),*) => ($llvm_capi:ident, $llvm_set_math:ident)),+ $(,)?) => {
277 $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value {
278 unsafe {
279 let instr = llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED);
280 llvm::$llvm_set_math(instr);
281 instr
282 }
283 })+
284 }
285}
286
287impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
288 type CodegenCx = CodegenCx<'ll, 'tcx>;
289
290 fn build(cx: &'a CodegenCx<'ll, 'tcx>, llbb: &'ll BasicBlock) -> Self {
291 let bx = Builder::with_cx(cx);
292 unsafe {
293 llvm::LLVMPositionBuilderAtEnd(bx.llbuilder, llbb);
294 }
295 bx
296 }
297
298 fn cx(&self) -> &CodegenCx<'ll, 'tcx> {
299 self.cx
300 }
301
302 fn llbb(&self) -> &'ll BasicBlock {
303 unsafe { llvm::LLVMGetInsertBlock(self.llbuilder) }
304 }
305
306 fn set_span(&mut self, _span: Span) {}
307
308 fn append_block(cx: &'a CodegenCx<'ll, 'tcx>, llfn: &'ll Value, name: &str) -> &'ll BasicBlock {
309 unsafe {
310 let name = SmallCStr::new(name);
311 llvm::LLVMAppendBasicBlockInContext(cx.llcx, llfn, name.as_ptr())
312 }
313 }
314
315 fn append_sibling_block(&mut self, name: &str) -> &'ll BasicBlock {
316 Self::append_block(self.cx, self.llfn(), name)
317 }
318
319 fn switch_to_block(&mut self, llbb: Self::BasicBlock) {
320 *self = Self::build(self.cx, llbb)
321 }
322
323 fn ret_void(&mut self) {
324 llvm::LLVMBuildRetVoid(self.llbuilder);
325 }
326
327 fn ret(&mut self, v: &'ll Value) {
328 unsafe {
329 llvm::LLVMBuildRet(self.llbuilder, v);
330 }
331 }
332
333 fn br(&mut self, dest: &'ll BasicBlock) {
334 unsafe {
335 llvm::LLVMBuildBr(self.llbuilder, dest);
336 }
337 }
338
339 fn cond_br(
340 &mut self,
341 cond: &'ll Value,
342 then_llbb: &'ll BasicBlock,
343 else_llbb: &'ll BasicBlock,
344 ) {
345 unsafe {
346 llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
347 }
348 }
349
350 fn switch(
351 &mut self,
352 v: &'ll Value,
353 else_llbb: &'ll BasicBlock,
354 cases: impl ExactSizeIterator<Item = (u128, &'ll BasicBlock)>,
355 ) {
356 let switch =
357 unsafe { llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, cases.len() as c_uint) };
358 for (on_val, dest) in cases {
359 let on_val = self.const_uint_big(self.val_ty(v), on_val);
360 unsafe { llvm::LLVMAddCase(switch, on_val, dest) }
361 }
362 }
363
364 fn switch_with_weights(
365 &mut self,
366 v: Self::Value,
367 else_llbb: Self::BasicBlock,
368 else_is_cold: bool,
369 cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock, bool)>,
370 ) {
371 if self.cx.sess().opts.optimize == rustc_session::config::OptLevel::No {
372 self.switch(v, else_llbb, cases.map(|(val, dest, _)| (val, dest)));
373 return;
374 }
375
376 let id = self.cx.create_metadata(b"branch_weights");
377
378 let cold_weight = llvm::LLVMValueAsMetadata(self.cx.const_u32(1));
383 let hot_weight = llvm::LLVMValueAsMetadata(self.cx.const_u32(2000));
384 let weight =
385 |is_cold: bool| -> &Metadata { if is_cold { cold_weight } else { hot_weight } };
386
387 let mut md: SmallVec<[&Metadata; 16]> = SmallVec::with_capacity(cases.len() + 2);
388 md.push(id);
389 md.push(weight(else_is_cold));
390
391 let switch =
392 unsafe { llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, cases.len() as c_uint) };
393 for (on_val, dest, is_cold) in cases {
394 let on_val = self.const_uint_big(self.val_ty(v), on_val);
395 unsafe { llvm::LLVMAddCase(switch, on_val, dest) }
396 md.push(weight(is_cold));
397 }
398
399 self.cx.set_metadata_node(switch, llvm::MD_prof, &md);
400 }
401
402 fn invoke(
403 &mut self,
404 llty: &'ll Type,
405 fn_attrs: Option<&CodegenFnAttrs>,
406 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
407 llfn: &'ll Value,
408 args: &[&'ll Value],
409 then: &'ll BasicBlock,
410 catch: &'ll BasicBlock,
411 funclet: Option<&Funclet<'ll>>,
412 instance: Option<Instance<'tcx>>,
413 ) -> &'ll Value {
414 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:414",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(414u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("invoke {0:?} with args ({1:?})",
llfn, args) as &dyn Value))])
});
} else { ; }
};debug!("invoke {:?} with args ({:?})", llfn, args);
415
416 let args = self.check_call("invoke", llty, llfn, args);
417 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
418 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
419 if let Some(funclet_bundle) = funclet_bundle {
420 bundles.push(funclet_bundle);
421 }
422
423 self.cfi_type_test(fn_attrs, fn_abi, instance, llfn);
425
426 let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn);
428 if let Some(kcfi_bundle) = kcfi_bundle.as_ref().map(|b| b.as_ref()) {
429 bundles.push(kcfi_bundle);
430 }
431
432 let invoke = unsafe {
433 llvm::LLVMBuildInvokeWithOperandBundles(
434 self.llbuilder,
435 llty,
436 llfn,
437 args.as_ptr(),
438 args.len() as c_uint,
439 then,
440 catch,
441 bundles.as_ptr(),
442 bundles.len() as c_uint,
443 UNNAMED,
444 )
445 };
446 if let Some(fn_abi) = fn_abi {
447 fn_abi.apply_attrs_callsite(self, invoke);
448 }
449 invoke
450 }
451
452 fn unreachable(&mut self) {
453 unsafe {
454 llvm::LLVMBuildUnreachable(self.llbuilder);
455 }
456 }
457
458 self
&'ll Value
x
&'ll Value
y
&'ll Value
unsafe { llvm::LLVMBuildNUWMul(self.llbuilder, x, y, UNNAMED) }math_builder_methods! {
459 add(a, b) => LLVMBuildAdd,
460 fadd(a, b) => LLVMBuildFAdd,
461 sub(a, b) => LLVMBuildSub,
462 fsub(a, b) => LLVMBuildFSub,
463 mul(a, b) => LLVMBuildMul,
464 fmul(a, b) => LLVMBuildFMul,
465 udiv(a, b) => LLVMBuildUDiv,
466 exactudiv(a, b) => LLVMBuildExactUDiv,
467 sdiv(a, b) => LLVMBuildSDiv,
468 exactsdiv(a, b) => LLVMBuildExactSDiv,
469 fdiv(a, b) => LLVMBuildFDiv,
470 urem(a, b) => LLVMBuildURem,
471 srem(a, b) => LLVMBuildSRem,
472 frem(a, b) => LLVMBuildFRem,
473 shl(a, b) => LLVMBuildShl,
474 lshr(a, b) => LLVMBuildLShr,
475 ashr(a, b) => LLVMBuildAShr,
476 and(a, b) => LLVMBuildAnd,
477 or(a, b) => LLVMBuildOr,
478 xor(a, b) => LLVMBuildXor,
479 neg(x) => LLVMBuildNeg,
480 fneg(x) => LLVMBuildFNeg,
481 not(x) => LLVMBuildNot,
482 unchecked_sadd(x, y) => LLVMBuildNSWAdd,
483 unchecked_uadd(x, y) => LLVMBuildNUWAdd,
484 unchecked_ssub(x, y) => LLVMBuildNSWSub,
485 unchecked_usub(x, y) => LLVMBuildNUWSub,
486 unchecked_smul(x, y) => LLVMBuildNSWMul,
487 unchecked_umul(x, y) => LLVMBuildNUWMul,
488 }
489
490 fn unchecked_suadd(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
491 unsafe {
492 let add = llvm::LLVMBuildAdd(self.llbuilder, a, b, UNNAMED);
493 if llvm::LLVMIsAInstruction(add).is_some() {
494 llvm::LLVMSetNUW(add, TRUE);
495 llvm::LLVMSetNSW(add, TRUE);
496 }
497 add
498 }
499 }
500 fn unchecked_susub(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
501 unsafe {
502 let sub = llvm::LLVMBuildSub(self.llbuilder, a, b, UNNAMED);
503 if llvm::LLVMIsAInstruction(sub).is_some() {
504 llvm::LLVMSetNUW(sub, TRUE);
505 llvm::LLVMSetNSW(sub, TRUE);
506 }
507 sub
508 }
509 }
510 fn unchecked_sumul(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
511 unsafe {
512 let mul = llvm::LLVMBuildMul(self.llbuilder, a, b, UNNAMED);
513 if llvm::LLVMIsAInstruction(mul).is_some() {
514 llvm::LLVMSetNUW(mul, TRUE);
515 llvm::LLVMSetNSW(mul, TRUE);
516 }
517 mul
518 }
519 }
520
521 fn or_disjoint(&mut self, a: &'ll Value, b: &'ll Value) -> &'ll Value {
522 unsafe {
523 let or = llvm::LLVMBuildOr(self.llbuilder, a, b, UNNAMED);
524
525 if llvm::LLVMIsAInstruction(or).is_some() {
529 llvm::LLVMSetIsDisjoint(or, TRUE);
530 }
531 or
532 }
533 }
534
535 self
&'ll Value
x
&'ll Value
y
&'ll Value
unsafe {
let instr = llvm::LLVMBuildFRem(self.llbuilder, x, y, UNNAMED);
llvm::LLVMRustSetAlgebraicMath(instr);
instr
}set_math_builder_methods! {
536 fadd_fast(x, y) => (LLVMBuildFAdd, LLVMRustSetFastMath),
537 fsub_fast(x, y) => (LLVMBuildFSub, LLVMRustSetFastMath),
538 fmul_fast(x, y) => (LLVMBuildFMul, LLVMRustSetFastMath),
539 fdiv_fast(x, y) => (LLVMBuildFDiv, LLVMRustSetFastMath),
540 frem_fast(x, y) => (LLVMBuildFRem, LLVMRustSetFastMath),
541 fadd_algebraic(x, y) => (LLVMBuildFAdd, LLVMRustSetAlgebraicMath),
542 fsub_algebraic(x, y) => (LLVMBuildFSub, LLVMRustSetAlgebraicMath),
543 fmul_algebraic(x, y) => (LLVMBuildFMul, LLVMRustSetAlgebraicMath),
544 fdiv_algebraic(x, y) => (LLVMBuildFDiv, LLVMRustSetAlgebraicMath),
545 frem_algebraic(x, y) => (LLVMBuildFRem, LLVMRustSetAlgebraicMath),
546 }
547
548 fn checked_binop(
549 &mut self,
550 oop: OverflowOp,
551 ty: Ty<'tcx>,
552 lhs: Self::Value,
553 rhs: Self::Value,
554 ) -> (Self::Value, Self::Value) {
555 let (size, signed) = ty.int_size_and_signed(self.tcx);
556 let width = size.bits();
557
558 if !signed {
559 match oop {
560 OverflowOp::Sub => {
561 let sub = self.sub(lhs, rhs);
565 let cmp = self.icmp(IntPredicate::IntULT, lhs, rhs);
566 return (sub, cmp);
567 }
568 OverflowOp::Add => {
569 let add = self.add(lhs, rhs);
572 let cmp = self.icmp(IntPredicate::IntULT, add, lhs);
573 return (add, cmp);
574 }
575 OverflowOp::Mul => {}
576 }
577 }
578
579 let oop_str = match oop {
580 OverflowOp::Add => "add",
581 OverflowOp::Sub => "sub",
582 OverflowOp::Mul => "mul",
583 };
584
585 let name = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("llvm.{0}{1}.with.overflow",
if signed { 's' } else { 'u' }, oop_str))
})format!("llvm.{}{oop_str}.with.overflow", if signed { 's' } else { 'u' });
586
587 let res = self.call_intrinsic(name, &[self.type_ix(width)], &[lhs, rhs]);
588 (self.extract_value(res, 0), self.extract_value(res, 1))
589 }
590
591 fn from_immediate(&mut self, val: Self::Value) -> Self::Value {
592 if self.cx().val_ty(val) == self.cx().type_i1() {
593 self.zext(val, self.cx().type_i8())
594 } else {
595 val
596 }
597 }
598
599 fn to_immediate_scalar(&mut self, val: Self::Value, scalar: abi::Scalar) -> Self::Value {
600 if scalar.is_bool() {
601 return self.unchecked_utrunc(val, self.cx().type_i1());
602 }
603 val
604 }
605
606 fn alloca(&mut self, size: Size, align: Align) -> &'ll Value {
607 let mut bx = Builder::with_cx(self.cx);
608 bx.position_at_start(unsafe { llvm::LLVMGetFirstBasicBlock(self.llfn()) });
609 let ty = self.cx().type_array(self.cx().type_i8(), size.bytes());
610 unsafe {
611 let alloca = llvm::LLVMBuildAlloca(bx.llbuilder, ty, UNNAMED);
612 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
613 llvm::LLVMBuildPointerCast(bx.llbuilder, alloca, self.cx().type_ptr(), UNNAMED)
615 }
616 }
617
618 fn scalable_alloca(&mut self, elt: u64, align: Align, element_ty: Ty<'_>) -> Self::Value {
619 let mut bx = Builder::with_cx(self.cx);
620 bx.position_at_start(unsafe { llvm::LLVMGetFirstBasicBlock(self.llfn()) });
621 let llvm_ty = match element_ty.kind() {
622 ty::Bool => bx.type_i1(),
623 ty::Int(int_ty) => self.cx.type_int_from_ty(*int_ty),
624 ty::Uint(uint_ty) => self.cx.type_uint_from_ty(*uint_ty),
625 ty::Float(float_ty) => self.cx.type_float_from_ty(*float_ty),
626 _ => {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("scalable vectors can only contain a bool, int, uint or float")));
}unreachable!("scalable vectors can only contain a bool, int, uint or float"),
627 };
628
629 unsafe {
630 let ty = llvm::LLVMScalableVectorType(llvm_ty, elt.try_into().unwrap());
631 let alloca = llvm::LLVMBuildAlloca(&bx.llbuilder, ty, UNNAMED);
632 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
633 alloca
634 }
635 }
636
637 fn load(&mut self, ty: &'ll Type, ptr: &'ll Value, align: Align) -> &'ll Value {
638 unsafe {
639 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
640 let align = align.min(self.cx().tcx.sess.target.max_reliable_alignment());
641 llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
642 load
643 }
644 }
645
646 fn volatile_load(&mut self, ty: &'ll Type, ptr: &'ll Value) -> &'ll Value {
647 unsafe {
648 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
649 llvm::LLVMSetVolatile(load, llvm::TRUE);
650 load
651 }
652 }
653
654 fn atomic_load(
655 &mut self,
656 ty: &'ll Type,
657 ptr: &'ll Value,
658 order: rustc_middle::ty::AtomicOrdering,
659 size: Size,
660 ) -> &'ll Value {
661 unsafe {
662 let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
663 llvm::LLVMSetOrdering(load, AtomicOrdering::from_generic(order));
665 llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
667 load
668 }
669 }
670
671 #[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("load_operand",
"rustc_codegen_llvm::builder", ::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(671u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["place"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&place)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: OperandRef<'tcx, &'ll Value> =
loop {};
return __tracing_attr_fake_return;
}
{
if place.layout.is_unsized() {
let tail =
self.tcx.struct_tail_for_codegen(place.layout.ty,
self.typing_env());
if #[allow(non_exhaustive_omitted_patterns)] match tail.kind()
{
ty::Foreign(..) => true,
_ => false,
} {
{
::core::panicking::panic_fmt(format_args!("unsized locals must not be `extern` types"));
};
}
}
match (&place.val.llextra.is_some(), &place.layout.is_unsized()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
if place.layout.is_zst() {
return OperandRef::zero_sized(place.layout);
}
fn scalar_load_metadata<'a, 'll,
'tcx>(bx: &mut Builder<'a, 'll, 'tcx>, load: &'ll Value,
scalar: abi::Scalar, layout: TyAndLayout<'tcx>,
offset: Size) {
{}
#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("scalar_load_metadata",
"rustc_codegen_llvm::builder", ::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(688u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["load", "scalar",
"layout", "offset"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&load)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&scalar)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&layout)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&offset)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy
:: needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: () = loop {};
return __tracing_attr_fake_return;
}
{
if bx.cx.sess().opts.optimize == OptLevel::No { return; }
if !scalar.is_uninit_valid() { bx.noundef_metadata(load); }
match scalar.primitive() {
abi::Primitive::Int(..) => {
if !scalar.is_always_valid(bx) {
bx.range_metadata(load, scalar.valid_range(bx));
}
}
abi::Primitive::Pointer(_) => {
if !scalar.valid_range(bx).contains(0) {
bx.nonnull_metadata(load);
}
if let Some(pointee) = layout.pointee_info_at(bx, offset) &&
let Some(_) = pointee.safe {
bx.align_metadata(load, pointee.align);
}
}
abi::Primitive::Float(_) => {}
}
}
}
}
}
let val =
if let Some(_) = place.val.llextra {
OperandValue::Ref(place.val)
} else if place.layout.is_llvm_immediate() {
let mut const_llval = None;
let llty = place.layout.llvm_type(self);
if let Some(global) =
llvm::LLVMIsAGlobalVariable(place.val.llval) {
if llvm::LLVMIsGlobalConstant(global).is_true() {
if let Some(init) = llvm::LLVMGetInitializer(global) {
if self.val_ty(init) == llty { const_llval = Some(init); }
}
}
}
let llval =
const_llval.unwrap_or_else(||
{
let load =
self.load(llty, place.val.llval, place.val.align);
if let abi::BackendRepr::Scalar(scalar) =
place.layout.backend_repr {
scalar_load_metadata(self, load, scalar, place.layout,
Size::ZERO);
self.to_immediate_scalar(load, scalar)
} else { load }
});
OperandValue::Immediate(llval)
} else if let abi::BackendRepr::ScalarPair(a, b) =
place.layout.backend_repr {
let b_offset = a.size(self).align_to(b.align(self).abi);
let mut load =
|i, scalar: abi::Scalar, layout, align, offset|
{
let llptr =
if i == 0 {
place.val.llval
} else {
self.inbounds_ptradd(place.val.llval,
self.const_usize(b_offset.bytes()))
};
let llty =
place.layout.scalar_pair_element_llvm_type(self, i, false);
let load = self.load(llty, llptr, align);
scalar_load_metadata(self, load, scalar, layout, offset);
self.to_immediate_scalar(load, scalar)
};
OperandValue::Pair(load(0, a, place.layout, place.val.align,
Size::ZERO),
load(1, b, place.layout,
place.val.align.restrict_for_offset(b_offset), b_offset))
} else { OperandValue::Ref(place.val) };
OperandRef { val, layout: place.layout, move_annotation: None }
}
}
}#[instrument(level = "trace", skip(self))]
672 fn load_operand(&mut self, place: PlaceRef<'tcx, &'ll Value>) -> OperandRef<'tcx, &'ll Value> {
673 if place.layout.is_unsized() {
674 let tail = self.tcx.struct_tail_for_codegen(place.layout.ty, self.typing_env());
675 if matches!(tail.kind(), ty::Foreign(..)) {
676 panic!("unsized locals must not be `extern` types");
680 }
681 }
682 assert_eq!(place.val.llextra.is_some(), place.layout.is_unsized());
683
684 if place.layout.is_zst() {
685 return OperandRef::zero_sized(place.layout);
686 }
687
688 #[instrument(level = "trace", skip(bx))]
689 fn scalar_load_metadata<'a, 'll, 'tcx>(
690 bx: &mut Builder<'a, 'll, 'tcx>,
691 load: &'ll Value,
692 scalar: abi::Scalar,
693 layout: TyAndLayout<'tcx>,
694 offset: Size,
695 ) {
696 if bx.cx.sess().opts.optimize == OptLevel::No {
697 return;
699 }
700
701 if !scalar.is_uninit_valid() {
702 bx.noundef_metadata(load);
703 }
704
705 match scalar.primitive() {
706 abi::Primitive::Int(..) => {
707 if !scalar.is_always_valid(bx) {
708 bx.range_metadata(load, scalar.valid_range(bx));
709 }
710 }
711 abi::Primitive::Pointer(_) => {
712 if !scalar.valid_range(bx).contains(0) {
713 bx.nonnull_metadata(load);
714 }
715
716 if let Some(pointee) = layout.pointee_info_at(bx, offset)
717 && let Some(_) = pointee.safe
718 {
719 bx.align_metadata(load, pointee.align);
720 }
721 }
722 abi::Primitive::Float(_) => {}
723 }
724 }
725
726 let val = if let Some(_) = place.val.llextra {
727 OperandValue::Ref(place.val)
729 } else if place.layout.is_llvm_immediate() {
730 let mut const_llval = None;
731 let llty = place.layout.llvm_type(self);
732 if let Some(global) = llvm::LLVMIsAGlobalVariable(place.val.llval) {
733 if llvm::LLVMIsGlobalConstant(global).is_true() {
734 if let Some(init) = llvm::LLVMGetInitializer(global) {
735 if self.val_ty(init) == llty {
736 const_llval = Some(init);
737 }
738 }
739 }
740 }
741
742 let llval = const_llval.unwrap_or_else(|| {
743 let load = self.load(llty, place.val.llval, place.val.align);
744 if let abi::BackendRepr::Scalar(scalar) = place.layout.backend_repr {
745 scalar_load_metadata(self, load, scalar, place.layout, Size::ZERO);
746 self.to_immediate_scalar(load, scalar)
747 } else {
748 load
749 }
750 });
751 OperandValue::Immediate(llval)
752 } else if let abi::BackendRepr::ScalarPair(a, b) = place.layout.backend_repr {
753 let b_offset = a.size(self).align_to(b.align(self).abi);
754
755 let mut load = |i, scalar: abi::Scalar, layout, align, offset| {
756 let llptr = if i == 0 {
757 place.val.llval
758 } else {
759 self.inbounds_ptradd(place.val.llval, self.const_usize(b_offset.bytes()))
760 };
761 let llty = place.layout.scalar_pair_element_llvm_type(self, i, false);
762 let load = self.load(llty, llptr, align);
763 scalar_load_metadata(self, load, scalar, layout, offset);
764 self.to_immediate_scalar(load, scalar)
765 };
766
767 OperandValue::Pair(
768 load(0, a, place.layout, place.val.align, Size::ZERO),
769 load(1, b, place.layout, place.val.align.restrict_for_offset(b_offset), b_offset),
770 )
771 } else {
772 OperandValue::Ref(place.val)
773 };
774
775 OperandRef { val, layout: place.layout, move_annotation: None }
776 }
777
778 fn write_operand_repeatedly(
779 &mut self,
780 cg_elem: OperandRef<'tcx, &'ll Value>,
781 count: u64,
782 dest: PlaceRef<'tcx, &'ll Value>,
783 ) {
784 if self.cx.sess().opts.optimize == OptLevel::No {
785 self.write_operand_repeatedly_unoptimized(cg_elem, count, dest);
793 } else {
794 self.write_operand_repeatedly_optimized(cg_elem, count, dest);
795 }
796 }
797
798 fn range_metadata(&mut self, load: &'ll Value, range: WrappingRange) {
799 if self.cx.sess().opts.optimize == OptLevel::No {
800 return;
802 }
803
804 let llty = self.cx.val_ty(load);
805 let md = [
806 llvm::LLVMValueAsMetadata(self.cx.const_uint_big(llty, range.start)),
807 llvm::LLVMValueAsMetadata(self.cx.const_uint_big(llty, range.end.wrapping_add(1))),
808 ];
809 self.set_metadata_node(load, llvm::MD_range, &md);
810 }
811
812 fn nonnull_metadata(&mut self, load: &'ll Value) {
813 self.set_metadata_node(load, llvm::MD_nonnull, &[]);
814 }
815
816 fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
817 self.store_with_flags(val, ptr, align, MemFlags::empty())
818 }
819
820 fn store_with_flags(
821 &mut self,
822 val: &'ll Value,
823 ptr: &'ll Value,
824 align: Align,
825 flags: MemFlags,
826 ) -> &'ll Value {
827 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:827",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(827u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Store {0:?} -> {1:?} ({2:?})",
val, ptr, flags) as &dyn Value))])
});
} else { ; }
};debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
828 match (&self.cx.type_kind(self.cx.val_ty(ptr)), &TypeKind::Pointer) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(self.cx.type_kind(self.cx.val_ty(ptr)), TypeKind::Pointer);
829 unsafe {
830 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
831 let align = align.min(self.cx().tcx.sess.target.max_reliable_alignment());
832 let align =
833 if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() as c_uint };
834 llvm::LLVMSetAlignment(store, align);
835 if flags.contains(MemFlags::VOLATILE) {
836 llvm::LLVMSetVolatile(store, llvm::TRUE);
837 }
838 if flags.contains(MemFlags::NONTEMPORAL) {
839 let use_nontemporal = #[allow(non_exhaustive_omitted_patterns)] match self.cx.tcx.sess.target.arch {
Arch::AArch64 | Arch::Arm | Arch::RiscV32 | Arch::RiscV64 => true,
_ => false,
}matches!(
852 self.cx.tcx.sess.target.arch,
853 Arch::AArch64 | Arch::Arm | Arch::RiscV32 | Arch::RiscV64
854 );
855 if use_nontemporal {
856 let one = llvm::LLVMValueAsMetadata(self.cx.const_i32(1));
861 self.set_metadata_node(store, llvm::MD_nontemporal, &[one]);
862 }
863 }
864 store
865 }
866 }
867
868 fn atomic_store(
869 &mut self,
870 val: &'ll Value,
871 ptr: &'ll Value,
872 order: rustc_middle::ty::AtomicOrdering,
873 size: Size,
874 ) {
875 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:875",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(875u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Store {0:?} -> {1:?}",
val, ptr) as &dyn Value))])
});
} else { ; }
};debug!("Store {:?} -> {:?}", val, ptr);
876 match (&self.cx.type_kind(self.cx.val_ty(ptr)), &TypeKind::Pointer) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(self.cx.type_kind(self.cx.val_ty(ptr)), TypeKind::Pointer);
877 unsafe {
878 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
879 llvm::LLVMSetOrdering(store, AtomicOrdering::from_generic(order));
881 llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
883 }
884 }
885
886 fn gep(&mut self, ty: &'ll Type, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
887 unsafe {
888 llvm::LLVMBuildGEPWithNoWrapFlags(
889 self.llbuilder,
890 ty,
891 ptr,
892 indices.as_ptr(),
893 indices.len() as c_uint,
894 UNNAMED,
895 GEPNoWrapFlags::default(),
896 )
897 }
898 }
899
900 fn inbounds_gep(
901 &mut self,
902 ty: &'ll Type,
903 ptr: &'ll Value,
904 indices: &[&'ll Value],
905 ) -> &'ll Value {
906 unsafe {
907 llvm::LLVMBuildGEPWithNoWrapFlags(
908 self.llbuilder,
909 ty,
910 ptr,
911 indices.as_ptr(),
912 indices.len() as c_uint,
913 UNNAMED,
914 GEPNoWrapFlags::InBounds,
915 )
916 }
917 }
918
919 fn inbounds_nuw_gep(
920 &mut self,
921 ty: &'ll Type,
922 ptr: &'ll Value,
923 indices: &[&'ll Value],
924 ) -> &'ll Value {
925 unsafe {
926 llvm::LLVMBuildGEPWithNoWrapFlags(
927 self.llbuilder,
928 ty,
929 ptr,
930 indices.as_ptr(),
931 indices.len() as c_uint,
932 UNNAMED,
933 GEPNoWrapFlags::InBounds | GEPNoWrapFlags::NUW,
934 )
935 }
936 }
937
938 fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
940 unsafe { llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
941 }
942
943 fn unchecked_utrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
944 if true {
match (&self.val_ty(val), &dest_ty) {
(left_val, right_val) => {
if *left_val == *right_val {
let kind = ::core::panicking::AssertKind::Ne;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_ne!(self.val_ty(val), dest_ty);
945
946 let trunc = self.trunc(val, dest_ty);
947 unsafe {
948 if llvm::LLVMIsAInstruction(trunc).is_some() {
949 llvm::LLVMSetNUW(trunc, TRUE);
950 }
951 }
952 trunc
953 }
954
955 fn unchecked_strunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
956 if true {
match (&self.val_ty(val), &dest_ty) {
(left_val, right_val) => {
if *left_val == *right_val {
let kind = ::core::panicking::AssertKind::Ne;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_ne!(self.val_ty(val), dest_ty);
957
958 let trunc = self.trunc(val, dest_ty);
959 unsafe {
960 if llvm::LLVMIsAInstruction(trunc).is_some() {
961 llvm::LLVMSetNSW(trunc, TRUE);
962 }
963 }
964 trunc
965 }
966
967 fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
968 unsafe { llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, UNNAMED) }
969 }
970
971 fn fptoui_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
972 self.call_intrinsic("llvm.fptoui.sat", &[dest_ty, self.val_ty(val)], &[val])
973 }
974
975 fn fptosi_sat(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
976 self.call_intrinsic("llvm.fptosi.sat", &[dest_ty, self.val_ty(val)], &[val])
977 }
978
979 fn fptoui(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
980 if self.sess().target.is_like_wasm {
995 let src_ty = self.cx.val_ty(val);
996 if self.cx.type_kind(src_ty) != TypeKind::Vector {
997 let float_width = self.cx.float_width(src_ty);
998 let int_width = self.cx.int_width(dest_ty);
999 if #[allow(non_exhaustive_omitted_patterns)] match (int_width, float_width) {
(32 | 64, 32 | 64) => true,
_ => false,
}matches!((int_width, float_width), (32 | 64, 32 | 64)) {
1000 return self.call_intrinsic(
1001 "llvm.wasm.trunc.unsigned",
1002 &[dest_ty, src_ty],
1003 &[val],
1004 );
1005 }
1006 }
1007 }
1008 unsafe { llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, UNNAMED) }
1009 }
1010
1011 fn fptosi(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1012 if self.sess().target.is_like_wasm {
1014 let src_ty = self.cx.val_ty(val);
1015 if self.cx.type_kind(src_ty) != TypeKind::Vector {
1016 let float_width = self.cx.float_width(src_ty);
1017 let int_width = self.cx.int_width(dest_ty);
1018 if #[allow(non_exhaustive_omitted_patterns)] match (int_width, float_width) {
(32 | 64, 32 | 64) => true,
_ => false,
}matches!((int_width, float_width), (32 | 64, 32 | 64)) {
1019 return self.call_intrinsic(
1020 "llvm.wasm.trunc.signed",
1021 &[dest_ty, src_ty],
1022 &[val],
1023 );
1024 }
1025 }
1026 }
1027 unsafe { llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty, UNNAMED) }
1028 }
1029
1030 fn uitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1031 unsafe { llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
1032 }
1033
1034 fn sitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1035 unsafe { llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, UNNAMED) }
1036 }
1037
1038 fn fptrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1039 unsafe { llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
1040 }
1041
1042 fn fpext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1043 unsafe { llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, UNNAMED) }
1044 }
1045
1046 fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1047 unsafe { llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, UNNAMED) }
1048 }
1049
1050 fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1051 unsafe { llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, UNNAMED) }
1052 }
1053
1054 fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1055 unsafe { llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED) }
1056 }
1057
1058 fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
1059 unsafe {
1060 llvm::LLVMBuildIntCast2(self.llbuilder, val, dest_ty, is_signed.to_llvm_bool(), UNNAMED)
1061 }
1062 }
1063
1064 fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1065 unsafe { llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, UNNAMED) }
1066 }
1067
1068 fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1070 let op = llvm::IntPredicate::from_generic(op);
1071 unsafe { llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1072 }
1073
1074 fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1075 let op = llvm::RealPredicate::from_generic(op);
1076 unsafe { llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
1077 }
1078
1079 fn three_way_compare(
1080 &mut self,
1081 ty: Ty<'tcx>,
1082 lhs: Self::Value,
1083 rhs: Self::Value,
1084 ) -> Self::Value {
1085 let size = ty.primitive_size(self.tcx);
1086 let name = if ty.is_signed() { "llvm.scmp" } else { "llvm.ucmp" };
1087
1088 self.call_intrinsic(name, &[self.type_i8(), self.type_ix(size.bits())], &[lhs, rhs])
1089 }
1090
1091 fn memcpy(
1093 &mut self,
1094 dst: &'ll Value,
1095 dst_align: Align,
1096 src: &'ll Value,
1097 src_align: Align,
1098 size: &'ll Value,
1099 flags: MemFlags,
1100 tt: Option<FncTree>,
1101 ) {
1102 if !!flags.contains(MemFlags::NONTEMPORAL) {
{
::core::panicking::panic_fmt(format_args!("non-temporal memcpy not supported"));
}
};assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memcpy not supported");
1103 let size = self.intcast(size, self.type_isize(), false);
1104 let is_volatile = flags.contains(MemFlags::VOLATILE);
1105 let memcpy = unsafe {
1106 llvm::LLVMRustBuildMemCpy(
1107 self.llbuilder,
1108 dst,
1109 dst_align.bytes() as c_uint,
1110 src,
1111 src_align.bytes() as c_uint,
1112 size,
1113 is_volatile,
1114 )
1115 };
1116
1117 if let Some(tt) = tt {
1123 crate::typetree::add_tt(self.cx().llmod, self.cx().llcx, memcpy, tt);
1124 }
1125 }
1126
1127 fn memmove(
1128 &mut self,
1129 dst: &'ll Value,
1130 dst_align: Align,
1131 src: &'ll Value,
1132 src_align: Align,
1133 size: &'ll Value,
1134 flags: MemFlags,
1135 ) {
1136 if !!flags.contains(MemFlags::NONTEMPORAL) {
{
::core::panicking::panic_fmt(format_args!("non-temporal memmove not supported"));
}
};assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memmove not supported");
1137 let size = self.intcast(size, self.type_isize(), false);
1138 let is_volatile = flags.contains(MemFlags::VOLATILE);
1139 unsafe {
1140 llvm::LLVMRustBuildMemMove(
1141 self.llbuilder,
1142 dst,
1143 dst_align.bytes() as c_uint,
1144 src,
1145 src_align.bytes() as c_uint,
1146 size,
1147 is_volatile,
1148 );
1149 }
1150 }
1151
1152 fn memset(
1153 &mut self,
1154 ptr: &'ll Value,
1155 fill_byte: &'ll Value,
1156 size: &'ll Value,
1157 align: Align,
1158 flags: MemFlags,
1159 ) {
1160 if !!flags.contains(MemFlags::NONTEMPORAL) {
{
::core::panicking::panic_fmt(format_args!("non-temporal memset not supported"));
}
};assert!(!flags.contains(MemFlags::NONTEMPORAL), "non-temporal memset not supported");
1161 let is_volatile = flags.contains(MemFlags::VOLATILE);
1162 unsafe {
1163 llvm::LLVMRustBuildMemSet(
1164 self.llbuilder,
1165 ptr,
1166 align.bytes() as c_uint,
1167 fill_byte,
1168 size,
1169 is_volatile,
1170 );
1171 }
1172 }
1173
1174 fn select(
1175 &mut self,
1176 cond: &'ll Value,
1177 then_val: &'ll Value,
1178 else_val: &'ll Value,
1179 ) -> &'ll Value {
1180 unsafe { llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, UNNAMED) }
1181 }
1182
1183 fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
1184 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
1185 }
1186
1187 fn extract_element(&mut self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
1188 unsafe { llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, UNNAMED) }
1189 }
1190
1191 fn vector_splat(&mut self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
1192 unsafe {
1193 let elt_ty = self.cx.val_ty(elt);
1194 let undef = llvm::LLVMGetUndef(self.type_vector(elt_ty, num_elts as u64));
1195 let vec = self.insert_element(undef, elt, self.cx.const_i32(0));
1196 let vec_i32_ty = self.type_vector(self.type_i32(), num_elts as u64);
1197 self.shuffle_vector(vec, undef, self.const_null(vec_i32_ty))
1198 }
1199 }
1200
1201 fn extract_value(&mut self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
1202 match (&(idx as c_uint as u64), &idx) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(idx as c_uint as u64, idx);
1203 unsafe { llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, UNNAMED) }
1204 }
1205
1206 fn insert_value(&mut self, agg_val: &'ll Value, elt: &'ll Value, idx: u64) -> &'ll Value {
1207 match (&(idx as c_uint as u64), &idx) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(idx as c_uint as u64, idx);
1208 unsafe { llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint, UNNAMED) }
1209 }
1210
1211 fn set_personality_fn(&mut self, personality: &'ll Value) {
1212 unsafe {
1213 llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1214 }
1215 }
1216
1217 fn cleanup_landing_pad(&mut self, pers_fn: &'ll Value) -> (&'ll Value, &'ll Value) {
1218 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
1219 let landing_pad = self.landing_pad(ty, pers_fn, 0);
1220 unsafe {
1221 llvm::LLVMSetCleanup(landing_pad, llvm::TRUE);
1222 }
1223 (self.extract_value(landing_pad, 0), self.extract_value(landing_pad, 1))
1224 }
1225
1226 fn filter_landing_pad(&mut self, pers_fn: &'ll Value) {
1227 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
1228 let landing_pad = self.landing_pad(ty, pers_fn, 1);
1229 self.add_clause(landing_pad, self.const_array(self.type_ptr(), &[]));
1230 }
1231
1232 fn resume(&mut self, exn0: &'ll Value, exn1: &'ll Value) {
1233 let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
1234 let mut exn = self.const_poison(ty);
1235 exn = self.insert_value(exn, exn0, 0);
1236 exn = self.insert_value(exn, exn1, 1);
1237 unsafe {
1238 llvm::LLVMBuildResume(self.llbuilder, exn);
1239 }
1240 }
1241
1242 fn cleanup_pad(&mut self, parent: Option<&'ll Value>, args: &[&'ll Value]) -> Funclet<'ll> {
1243 let ret = unsafe {
1244 llvm::LLVMBuildCleanupPad(
1245 self.llbuilder,
1246 parent,
1247 args.as_ptr(),
1248 args.len() as c_uint,
1249 c"cleanuppad".as_ptr(),
1250 )
1251 };
1252 Funclet::new(ret.expect("LLVM does not have support for cleanuppad"))
1253 }
1254
1255 fn cleanup_ret(&mut self, funclet: &Funclet<'ll>, unwind: Option<&'ll BasicBlock>) {
1256 unsafe {
1257 llvm::LLVMBuildCleanupRet(self.llbuilder, funclet.cleanuppad(), unwind)
1258 .expect("LLVM does not have support for cleanupret");
1259 }
1260 }
1261
1262 fn catch_pad(&mut self, parent: &'ll Value, args: &[&'ll Value]) -> Funclet<'ll> {
1263 let ret = unsafe {
1264 llvm::LLVMBuildCatchPad(
1265 self.llbuilder,
1266 parent,
1267 args.as_ptr(),
1268 args.len() as c_uint,
1269 c"catchpad".as_ptr(),
1270 )
1271 };
1272 Funclet::new(ret.expect("LLVM does not have support for catchpad"))
1273 }
1274
1275 fn catch_switch(
1276 &mut self,
1277 parent: Option<&'ll Value>,
1278 unwind: Option<&'ll BasicBlock>,
1279 handlers: &[&'ll BasicBlock],
1280 ) -> &'ll Value {
1281 let ret = unsafe {
1282 llvm::LLVMBuildCatchSwitch(
1283 self.llbuilder,
1284 parent,
1285 unwind,
1286 handlers.len() as c_uint,
1287 c"catchswitch".as_ptr(),
1288 )
1289 };
1290 let ret = ret.expect("LLVM does not have support for catchswitch");
1291 for handler in handlers {
1292 unsafe {
1293 llvm::LLVMAddHandler(ret, handler);
1294 }
1295 }
1296 ret
1297 }
1298
1299 fn atomic_cmpxchg(
1301 &mut self,
1302 dst: &'ll Value,
1303 cmp: &'ll Value,
1304 src: &'ll Value,
1305 order: rustc_middle::ty::AtomicOrdering,
1306 failure_order: rustc_middle::ty::AtomicOrdering,
1307 weak: bool,
1308 ) -> (&'ll Value, &'ll Value) {
1309 unsafe {
1310 let value = llvm::LLVMBuildAtomicCmpXchg(
1311 self.llbuilder,
1312 dst,
1313 cmp,
1314 src,
1315 AtomicOrdering::from_generic(order),
1316 AtomicOrdering::from_generic(failure_order),
1317 llvm::FALSE, );
1319 llvm::LLVMSetWeak(value, weak.to_llvm_bool());
1320 let val = self.extract_value(value, 0);
1321 let success = self.extract_value(value, 1);
1322 (val, success)
1323 }
1324 }
1325
1326 fn atomic_rmw(
1327 &mut self,
1328 op: rustc_codegen_ssa::common::AtomicRmwBinOp,
1329 dst: &'ll Value,
1330 src: &'ll Value,
1331 order: rustc_middle::ty::AtomicOrdering,
1332 ret_ptr: bool,
1333 ) -> &'ll Value {
1334 let mut res = unsafe {
1338 llvm::LLVMBuildAtomicRMW(
1339 self.llbuilder,
1340 AtomicRmwBinOp::from_generic(op),
1341 dst,
1342 src,
1343 AtomicOrdering::from_generic(order),
1344 llvm::FALSE, )
1346 };
1347 if ret_ptr && self.val_ty(res) != self.type_ptr() {
1348 res = self.inttoptr(res, self.type_ptr());
1349 }
1350 res
1351 }
1352
1353 fn atomic_fence(
1354 &mut self,
1355 order: rustc_middle::ty::AtomicOrdering,
1356 scope: SynchronizationScope,
1357 ) {
1358 let single_threaded = match scope {
1359 SynchronizationScope::SingleThread => true,
1360 SynchronizationScope::CrossThread => false,
1361 };
1362 unsafe {
1363 llvm::LLVMBuildFence(
1364 self.llbuilder,
1365 AtomicOrdering::from_generic(order),
1366 single_threaded.to_llvm_bool(),
1367 UNNAMED,
1368 );
1369 }
1370 }
1371
1372 fn set_invariant_load(&mut self, load: &'ll Value) {
1373 self.set_metadata_node(load, llvm::MD_invariant_load, &[]);
1374 }
1375
1376 fn lifetime_start(&mut self, ptr: &'ll Value, size: Size) {
1377 self.call_lifetime_intrinsic("llvm.lifetime.start", ptr, size);
1378 }
1379
1380 fn lifetime_end(&mut self, ptr: &'ll Value, size: Size) {
1381 self.call_lifetime_intrinsic("llvm.lifetime.end", ptr, size);
1382 }
1383
1384 fn call(
1385 &mut self,
1386 llty: &'ll Type,
1387 caller_attrs: Option<&CodegenFnAttrs>,
1388 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1389 llfn: &'ll Value,
1390 args: &[&'ll Value],
1391 funclet: Option<&Funclet<'ll>>,
1392 callee_instance: Option<Instance<'tcx>>,
1393 ) -> &'ll Value {
1394 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:1394",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(1394u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("call {0:?} with args ({1:?})",
llfn, args) as &dyn Value))])
});
} else { ; }
};debug!("call {:?} with args ({:?})", llfn, args);
1395
1396 let args = self.check_call("call", llty, llfn, args);
1397 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
1398 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
1399 if let Some(funclet_bundle) = funclet_bundle {
1400 bundles.push(funclet_bundle);
1401 }
1402
1403 self.cfi_type_test(caller_attrs, fn_abi, callee_instance, llfn);
1405
1406 let kcfi_bundle = self.kcfi_operand_bundle(caller_attrs, fn_abi, callee_instance, llfn);
1408 if let Some(kcfi_bundle) = kcfi_bundle.as_ref().map(|b| b.as_ref()) {
1409 bundles.push(kcfi_bundle);
1410 }
1411
1412 let call = unsafe {
1413 llvm::LLVMBuildCallWithOperandBundles(
1414 self.llbuilder,
1415 llty,
1416 llfn,
1417 args.as_ptr() as *const &llvm::Value,
1418 args.len() as c_uint,
1419 bundles.as_ptr(),
1420 bundles.len() as c_uint,
1421 c"".as_ptr(),
1422 )
1423 };
1424
1425 if let Some(callee_instance) = callee_instance {
1426 let callee_attrs = self.cx.tcx.codegen_fn_attrs(callee_instance.def_id());
1428 if let Some(caller_attrs) = caller_attrs
1429 && let Some(inlining_rule) = attributes::inline_attr(&self.cx, self.cx.tcx, callee_instance)
1433 && self.cx.tcx.is_target_feature_call_safe(
1434 &callee_attrs.target_features,
1435 &caller_attrs.target_features.iter().cloned().chain(
1436 self.cx.tcx.sess.target_features.iter().map(|feat| TargetFeature {
1437 name: *feat,
1438 kind: TargetFeatureKind::Implied,
1439 })
1440 ).collect::<Vec<_>>(),
1441 )
1442 {
1443 attributes::apply_to_callsite(
1444 call,
1445 llvm::AttributePlace::Function,
1446 &[inlining_rule],
1447 );
1448 }
1449 }
1450
1451 if let Some(fn_abi) = fn_abi {
1452 fn_abi.apply_attrs_callsite(self, call);
1453 }
1454 call
1455 }
1456
1457 fn tail_call(
1458 &mut self,
1459 llty: Self::Type,
1460 caller_attrs: Option<&CodegenFnAttrs>,
1461 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
1462 llfn: Self::Value,
1463 args: &[Self::Value],
1464 funclet: Option<&Self::Funclet>,
1465 callee_instance: Option<Instance<'tcx>>,
1466 ) {
1467 let call =
1468 self.call(llty, caller_attrs, Some(fn_abi), llfn, args, funclet, callee_instance);
1469 llvm::LLVMSetTailCallKind(call, llvm::TailCallKind::MustTail);
1470
1471 match &fn_abi.ret.mode {
1472 PassMode::Ignore | PassMode::Indirect { .. } => self.ret_void(),
1473 PassMode::Direct(_) | PassMode::Pair { .. } | PassMode::Cast { .. } => self.ret(call),
1474 }
1475 }
1476
1477 fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1478 unsafe { llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, UNNAMED) }
1479 }
1480
1481 fn apply_attrs_to_cleanup_callsite(&mut self, llret: &'ll Value) {
1482 let cold_inline = llvm::AttributeKind::Cold.create_attr(self.llcx);
1484 attributes::apply_to_callsite(llret, llvm::AttributePlace::Function, &[cold_inline]);
1485 }
1486}
1487
1488impl<'ll> StaticBuilderMethods for Builder<'_, 'll, '_> {
1489 fn get_static(&mut self, def_id: DefId) -> &'ll Value {
1490 let global = self.cx().get_static(def_id);
1492 if self.cx().tcx.is_thread_local_static(def_id) {
1493 let pointer =
1494 self.call_intrinsic("llvm.threadlocal.address", &[self.val_ty(global)], &[global]);
1495 self.pointercast(pointer, self.type_ptr())
1497 } else {
1498 self.cx().const_pointercast(global, self.type_ptr())
1500 }
1501 }
1502}
1503
1504impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1505 pub(crate) fn llfn(&self) -> &'ll Value {
1506 unsafe { llvm::LLVMGetBasicBlockParent(self.llbb()) }
1507 }
1508}
1509
1510impl<'a, 'll, CX: Borrow<SCx<'ll>>> GenericBuilder<'a, 'll, CX> {
1511 fn position_at_start(&mut self, llbb: &'ll BasicBlock) {
1512 unsafe {
1513 llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
1514 }
1515 }
1516}
1517impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1518 fn align_metadata(&mut self, load: &'ll Value, align: Align) {
1519 let md = [llvm::LLVMValueAsMetadata(self.cx.const_u64(align.bytes()))];
1520 self.set_metadata_node(load, llvm::MD_align, &md);
1521 }
1522
1523 fn noundef_metadata(&mut self, load: &'ll Value) {
1524 self.set_metadata_node(load, llvm::MD_noundef, &[]);
1525 }
1526
1527 pub(crate) fn set_unpredictable(&mut self, inst: &'ll Value) {
1528 self.set_metadata_node(inst, llvm::MD_unpredictable, &[]);
1529 }
1530
1531 fn write_operand_repeatedly_optimized(
1532 &mut self,
1533 cg_elem: OperandRef<'tcx, &'ll Value>,
1534 count: u64,
1535 dest: PlaceRef<'tcx, &'ll Value>,
1536 ) {
1537 let zero = self.const_usize(0);
1538 let count = self.const_usize(count);
1539
1540 let header_bb = self.append_sibling_block("repeat_loop_header");
1541 let body_bb = self.append_sibling_block("repeat_loop_body");
1542 let next_bb = self.append_sibling_block("repeat_loop_next");
1543
1544 self.br(header_bb);
1545
1546 let mut header_bx = Self::build(self.cx, header_bb);
1547 let i = header_bx.phi(self.val_ty(zero), &[zero], &[self.llbb()]);
1548
1549 let keep_going = header_bx.icmp(IntPredicate::IntULT, i, count);
1550 header_bx.cond_br(keep_going, body_bb, next_bb);
1551
1552 let mut body_bx = Self::build(self.cx, body_bb);
1553 let dest_elem = dest.project_index(&mut body_bx, i);
1554 cg_elem.val.store(&mut body_bx, dest_elem);
1555
1556 let next = body_bx.unchecked_uadd(i, self.const_usize(1));
1557 body_bx.br(header_bb);
1558 header_bx.add_incoming_to_phi(i, next, body_bb);
1559
1560 *self = Self::build(self.cx, next_bb);
1561 }
1562
1563 fn write_operand_repeatedly_unoptimized(
1564 &mut self,
1565 cg_elem: OperandRef<'tcx, &'ll Value>,
1566 count: u64,
1567 dest: PlaceRef<'tcx, &'ll Value>,
1568 ) {
1569 let zero = self.const_usize(0);
1570 let count = self.const_usize(count);
1571 let start = dest.project_index(self, zero).val.llval;
1572 let end = dest.project_index(self, count).val.llval;
1573
1574 let header_bb = self.append_sibling_block("repeat_loop_header");
1575 let body_bb = self.append_sibling_block("repeat_loop_body");
1576 let next_bb = self.append_sibling_block("repeat_loop_next");
1577
1578 self.br(header_bb);
1579
1580 let mut header_bx = Self::build(self.cx, header_bb);
1581 let current = header_bx.phi(self.val_ty(start), &[start], &[self.llbb()]);
1582
1583 let keep_going = header_bx.icmp(IntPredicate::IntNE, current, end);
1584 header_bx.cond_br(keep_going, body_bb, next_bb);
1585
1586 let mut body_bx = Self::build(self.cx, body_bb);
1587 let align = dest.val.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
1588 cg_elem
1589 .val
1590 .store(&mut body_bx, PlaceRef::new_sized_aligned(current, cg_elem.layout, align));
1591
1592 let next = body_bx.inbounds_gep(
1593 self.backend_type(cg_elem.layout),
1594 current,
1595 &[self.const_usize(1)],
1596 );
1597 body_bx.br(header_bb);
1598 header_bx.add_incoming_to_phi(current, next, body_bb);
1599
1600 *self = Self::build(self.cx, next_bb);
1601 }
1602
1603 pub(crate) fn minnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1604 self.call_intrinsic("llvm.minnum", &[self.val_ty(lhs)], &[lhs, rhs])
1605 }
1606
1607 pub(crate) fn maxnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
1608 self.call_intrinsic("llvm.maxnum", &[self.val_ty(lhs)], &[lhs, rhs])
1609 }
1610
1611 pub(crate) fn insert_element(
1612 &mut self,
1613 vec: &'ll Value,
1614 elt: &'ll Value,
1615 idx: &'ll Value,
1616 ) -> &'ll Value {
1617 unsafe { llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, UNNAMED) }
1618 }
1619
1620 pub(crate) fn shuffle_vector(
1621 &mut self,
1622 v1: &'ll Value,
1623 v2: &'ll Value,
1624 mask: &'ll Value,
1625 ) -> &'ll Value {
1626 unsafe { llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, UNNAMED) }
1627 }
1628
1629 pub(crate) fn vector_reduce_fadd(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1630 self.call_intrinsic("llvm.vector.reduce.fadd", &[self.val_ty(src)], &[acc, src])
1631 }
1632 pub(crate) fn vector_reduce_fmul(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1633 self.call_intrinsic("llvm.vector.reduce.fmul", &[self.val_ty(src)], &[acc, src])
1634 }
1635 pub(crate) fn vector_reduce_fadd_reassoc(
1636 &mut self,
1637 acc: &'ll Value,
1638 src: &'ll Value,
1639 ) -> &'ll Value {
1640 unsafe {
1641 let instr =
1642 self.call_intrinsic("llvm.vector.reduce.fadd", &[self.val_ty(src)], &[acc, src]);
1643 llvm::LLVMRustSetAllowReassoc(instr);
1644 instr
1645 }
1646 }
1647 pub(crate) fn vector_reduce_fmul_reassoc(
1648 &mut self,
1649 acc: &'ll Value,
1650 src: &'ll Value,
1651 ) -> &'ll Value {
1652 unsafe {
1653 let instr =
1654 self.call_intrinsic("llvm.vector.reduce.fmul", &[self.val_ty(src)], &[acc, src]);
1655 llvm::LLVMRustSetAllowReassoc(instr);
1656 instr
1657 }
1658 }
1659 pub(crate) fn vector_reduce_add(&mut self, src: &'ll Value) -> &'ll Value {
1660 self.call_intrinsic("llvm.vector.reduce.add", &[self.val_ty(src)], &[src])
1661 }
1662 pub(crate) fn vector_reduce_mul(&mut self, src: &'ll Value) -> &'ll Value {
1663 self.call_intrinsic("llvm.vector.reduce.mul", &[self.val_ty(src)], &[src])
1664 }
1665 pub(crate) fn vector_reduce_and(&mut self, src: &'ll Value) -> &'ll Value {
1666 self.call_intrinsic("llvm.vector.reduce.and", &[self.val_ty(src)], &[src])
1667 }
1668 pub(crate) fn vector_reduce_or(&mut self, src: &'ll Value) -> &'ll Value {
1669 self.call_intrinsic("llvm.vector.reduce.or", &[self.val_ty(src)], &[src])
1670 }
1671 pub(crate) fn vector_reduce_xor(&mut self, src: &'ll Value) -> &'ll Value {
1672 self.call_intrinsic("llvm.vector.reduce.xor", &[self.val_ty(src)], &[src])
1673 }
1674 pub(crate) fn vector_reduce_fmin(&mut self, src: &'ll Value) -> &'ll Value {
1675 self.call_intrinsic("llvm.vector.reduce.fmin", &[self.val_ty(src)], &[src])
1676 }
1677 pub(crate) fn vector_reduce_fmax(&mut self, src: &'ll Value) -> &'ll Value {
1678 self.call_intrinsic("llvm.vector.reduce.fmax", &[self.val_ty(src)], &[src])
1679 }
1680 pub(crate) fn vector_reduce_min(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
1681 self.call_intrinsic(
1682 if is_signed { "llvm.vector.reduce.smin" } else { "llvm.vector.reduce.umin" },
1683 &[self.val_ty(src)],
1684 &[src],
1685 )
1686 }
1687 pub(crate) fn vector_reduce_max(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
1688 self.call_intrinsic(
1689 if is_signed { "llvm.vector.reduce.smax" } else { "llvm.vector.reduce.umax" },
1690 &[self.val_ty(src)],
1691 &[src],
1692 )
1693 }
1694}
1695impl<'a, 'll, CX: Borrow<SCx<'ll>>> GenericBuilder<'a, 'll, CX> {
1696 pub(crate) fn add_clause(&mut self, landing_pad: &'ll Value, clause: &'ll Value) {
1697 unsafe {
1698 llvm::LLVMAddClause(landing_pad, clause);
1699 }
1700 }
1701
1702 pub(crate) fn catch_ret(
1703 &mut self,
1704 funclet: &Funclet<'ll>,
1705 unwind: &'ll BasicBlock,
1706 ) -> &'ll Value {
1707 let ret = unsafe { llvm::LLVMBuildCatchRet(self.llbuilder, funclet.cleanuppad(), unwind) };
1708 ret.expect("LLVM does not have support for catchret")
1709 }
1710
1711 pub(crate) fn check_call<'b>(
1712 &mut self,
1713 typ: &str,
1714 fn_ty: &'ll Type,
1715 llfn: &'ll Value,
1716 args: &'b [&'ll Value],
1717 ) -> Cow<'b, [&'ll Value]> {
1718 if !(self.cx.type_kind(fn_ty) == TypeKind::Function) {
{
::core::panicking::panic_fmt(format_args!("builder::{0} not passed a function, but {1:?}",
typ, fn_ty));
}
};assert!(
1719 self.cx.type_kind(fn_ty) == TypeKind::Function,
1720 "builder::{typ} not passed a function, but {fn_ty:?}"
1721 );
1722
1723 let param_tys = self.cx.func_params_types(fn_ty);
1724
1725 let all_args_match = iter::zip(¶m_tys, args.iter().map(|&v| self.cx.val_ty(v)))
1726 .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1727
1728 if all_args_match {
1729 return Cow::Borrowed(args);
1730 }
1731
1732 let casted_args: Vec<_> = iter::zip(param_tys, args)
1733 .enumerate()
1734 .map(|(i, (expected_ty, &actual_val))| {
1735 let actual_ty = self.cx.val_ty(actual_val);
1736 if expected_ty != actual_ty {
1737 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:1737",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(1737u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("type mismatch in function call of {0:?}. Expected {1:?} for param {2}, got {3:?}; injecting bitcast",
llfn, expected_ty, i, actual_ty) as &dyn Value))])
});
} else { ; }
};debug!(
1738 "type mismatch in function call of {:?}. \
1739 Expected {:?} for param {}, got {:?}; injecting bitcast",
1740 llfn, expected_ty, i, actual_ty
1741 );
1742 self.bitcast(actual_val, expected_ty)
1743 } else {
1744 actual_val
1745 }
1746 })
1747 .collect();
1748
1749 Cow::Owned(casted_args)
1750 }
1751
1752 pub(crate) fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
1753 unsafe { llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED) }
1754 }
1755}
1756
1757impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1758 pub(crate) fn call_intrinsic(
1759 &mut self,
1760 base_name: impl Into<Cow<'static, str>>,
1761 type_params: &[&'ll Type],
1762 args: &[&'ll Value],
1763 ) -> &'ll Value {
1764 let (ty, f) = self.cx.get_intrinsic(base_name.into(), type_params);
1765 self.call(ty, None, None, f, args, None, None)
1766 }
1767
1768 fn call_lifetime_intrinsic(&mut self, intrinsic: &'static str, ptr: &'ll Value, size: Size) {
1769 let size = size.bytes();
1770 if size == 0 {
1771 return;
1772 }
1773
1774 if !self.cx().sess().emit_lifetime_markers() {
1775 return;
1776 }
1777
1778 if crate::llvm_util::get_version() >= (22, 0, 0) {
1779 let ptr = unsafe { llvm::LLVMRustStripPointerCasts(ptr) };
1782 self.call_intrinsic(intrinsic, &[self.val_ty(ptr)], &[ptr]);
1783 } else {
1784 self.call_intrinsic(intrinsic, &[self.val_ty(ptr)], &[self.cx.const_u64(size), ptr]);
1785 }
1786 }
1787}
1788impl<'a, 'll, CX: Borrow<SCx<'ll>>> GenericBuilder<'a, 'll, CX> {
1789 pub(crate) fn phi(
1790 &mut self,
1791 ty: &'ll Type,
1792 vals: &[&'ll Value],
1793 bbs: &[&'ll BasicBlock],
1794 ) -> &'ll Value {
1795 match (&vals.len(), &bbs.len()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(vals.len(), bbs.len());
1796 let phi = unsafe { llvm::LLVMBuildPhi(self.llbuilder, ty, UNNAMED) };
1797 unsafe {
1798 llvm::LLVMAddIncoming(phi, vals.as_ptr(), bbs.as_ptr(), vals.len() as c_uint);
1799 phi
1800 }
1801 }
1802
1803 fn add_incoming_to_phi(&mut self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
1804 unsafe {
1805 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1806 }
1807 }
1808}
1809impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
1810 pub(crate) fn landing_pad(
1811 &mut self,
1812 ty: &'ll Type,
1813 pers_fn: &'ll Value,
1814 num_clauses: usize,
1815 ) -> &'ll Value {
1816 self.set_personality_fn(pers_fn);
1820 unsafe {
1821 llvm::LLVMBuildLandingPad(self.llbuilder, ty, None, num_clauses as c_uint, UNNAMED)
1822 }
1823 }
1824
1825 pub(crate) fn callbr(
1826 &mut self,
1827 llty: &'ll Type,
1828 fn_attrs: Option<&CodegenFnAttrs>,
1829 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1830 llfn: &'ll Value,
1831 args: &[&'ll Value],
1832 default_dest: &'ll BasicBlock,
1833 indirect_dest: &[&'ll BasicBlock],
1834 funclet: Option<&Funclet<'ll>>,
1835 instance: Option<Instance<'tcx>>,
1836 ) -> &'ll Value {
1837 {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_codegen_llvm/src/builder.rs:1837",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(1837u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("invoke {0:?} with args ({1:?})",
llfn, args) as &dyn Value))])
});
} else { ; }
};debug!("invoke {:?} with args ({:?})", llfn, args);
1838
1839 let args = self.check_call("callbr", llty, llfn, args);
1840 let funclet_bundle = funclet.map(|funclet| funclet.bundle());
1841 let mut bundles: SmallVec<[_; 2]> = SmallVec::new();
1842 if let Some(funclet_bundle) = funclet_bundle {
1843 bundles.push(funclet_bundle);
1844 }
1845
1846 self.cfi_type_test(fn_attrs, fn_abi, instance, llfn);
1848
1849 let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn);
1851 if let Some(kcfi_bundle) = kcfi_bundle.as_ref().map(|b| b.as_ref()) {
1852 bundles.push(kcfi_bundle);
1853 }
1854
1855 let callbr = unsafe {
1856 llvm::LLVMBuildCallBr(
1857 self.llbuilder,
1858 llty,
1859 llfn,
1860 default_dest,
1861 indirect_dest.as_ptr(),
1862 indirect_dest.len() as c_uint,
1863 args.as_ptr(),
1864 args.len() as c_uint,
1865 bundles.as_ptr(),
1866 bundles.len() as c_uint,
1867 UNNAMED,
1868 )
1869 };
1870 if let Some(fn_abi) = fn_abi {
1871 fn_abi.apply_attrs_callsite(self, callbr);
1872 }
1873 callbr
1874 }
1875
1876 fn cfi_type_test(
1878 &mut self,
1879 fn_attrs: Option<&CodegenFnAttrs>,
1880 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1881 instance: Option<Instance<'tcx>>,
1882 llfn: &'ll Value,
1883 ) {
1884 let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) };
1885 if self.tcx.sess.is_sanitizer_cfi_enabled()
1886 && let Some(fn_abi) = fn_abi
1887 && is_indirect_call
1888 {
1889 if let Some(fn_attrs) = fn_attrs
1890 && fn_attrs.sanitizers.disabled.contains(SanitizerSet::CFI)
1891 {
1892 return;
1893 }
1894
1895 let mut options = cfi::TypeIdOptions::empty();
1896 if self.tcx.sess.is_sanitizer_cfi_generalize_pointers_enabled() {
1897 options.insert(cfi::TypeIdOptions::GENERALIZE_POINTERS);
1898 }
1899 if self.tcx.sess.is_sanitizer_cfi_normalize_integers_enabled() {
1900 options.insert(cfi::TypeIdOptions::NORMALIZE_INTEGERS);
1901 }
1902
1903 let typeid = if let Some(instance) = instance {
1904 cfi::typeid_for_instance(self.tcx, instance, options)
1905 } else {
1906 cfi::typeid_for_fnabi(self.tcx, fn_abi, options)
1907 };
1908 let typeid_metadata = self.cx.create_metadata(typeid.as_bytes());
1909 let dbg_loc = self.get_dbg_loc();
1910
1911 let typeid = self.get_metadata_value(typeid_metadata);
1915 let cond = self.call_intrinsic("llvm.type.test", &[], &[llfn, typeid]);
1916 let bb_pass = self.append_sibling_block("type_test.pass");
1917 let bb_fail = self.append_sibling_block("type_test.fail");
1918 self.cond_br(cond, bb_pass, bb_fail);
1919
1920 self.switch_to_block(bb_fail);
1921 if let Some(dbg_loc) = dbg_loc {
1922 self.set_dbg_loc(dbg_loc);
1923 }
1924 self.abort();
1925 self.unreachable();
1926
1927 self.switch_to_block(bb_pass);
1928 if let Some(dbg_loc) = dbg_loc {
1929 self.set_dbg_loc(dbg_loc);
1930 }
1931 }
1932 }
1933
1934 fn kcfi_operand_bundle(
1936 &mut self,
1937 fn_attrs: Option<&CodegenFnAttrs>,
1938 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
1939 instance: Option<Instance<'tcx>>,
1940 llfn: &'ll Value,
1941 ) -> Option<llvm::OperandBundleBox<'ll>> {
1942 let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) };
1943 let kcfi_bundle = if self.tcx.sess.is_sanitizer_kcfi_enabled()
1944 && let Some(fn_abi) = fn_abi
1945 && is_indirect_call
1946 {
1947 if let Some(fn_attrs) = fn_attrs
1948 && fn_attrs.sanitizers.disabled.contains(SanitizerSet::KCFI)
1949 {
1950 return None;
1951 }
1952
1953 let mut options = kcfi::TypeIdOptions::empty();
1954 if self.tcx.sess.is_sanitizer_cfi_generalize_pointers_enabled() {
1955 options.insert(kcfi::TypeIdOptions::GENERALIZE_POINTERS);
1956 }
1957 if self.tcx.sess.is_sanitizer_cfi_normalize_integers_enabled() {
1958 options.insert(kcfi::TypeIdOptions::NORMALIZE_INTEGERS);
1959 }
1960
1961 let kcfi_typeid = if let Some(instance) = instance {
1962 kcfi::typeid_for_instance(self.tcx, instance, options)
1963 } else {
1964 kcfi::typeid_for_fnabi(self.tcx, fn_abi, options)
1965 };
1966
1967 Some(llvm::OperandBundleBox::new("kcfi", &[self.const_u32(kcfi_typeid)]))
1968 } else {
1969 None
1970 };
1971 kcfi_bundle
1972 }
1973
1974 #[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("instrprof_increment",
"rustc_codegen_llvm::builder", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_codegen_llvm/src/builder.rs"),
::tracing_core::__macro_support::Option::Some(1975u32),
::tracing_core::__macro_support::Option::Some("rustc_codegen_llvm::builder"),
::tracing_core::field::FieldSet::new(&["fn_name", "hash",
"num_counters", "index"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&fn_name)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&hash)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&num_counters)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&index)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: () = loop {};
return __tracing_attr_fake_return;
}
{
self.call_intrinsic("llvm.instrprof.increment", &[],
&[fn_name, hash, num_counters, index]);
}
}
}#[instrument(level = "debug", skip(self))]
1976 pub(crate) fn instrprof_increment(
1977 &mut self,
1978 fn_name: &'ll Value,
1979 hash: &'ll Value,
1980 num_counters: &'ll Value,
1981 index: &'ll Value,
1982 ) {
1983 self.call_intrinsic("llvm.instrprof.increment", &[], &[fn_name, hash, num_counters, index]);
1984 }
1985}