1use std::cmp;
2
3use libc::c_uint;
4use rustc_abi::{
5 ArmCall, BackendRepr, CanonAbi, HasDataLayout, InterruptKind, Primitive, Reg, RegKind, Size,
6 X86Call,
7};
8use rustc_codegen_ssa::MemFlags;
9use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
10use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue};
11use rustc_codegen_ssa::traits::*;
12use rustc_middle::ty::Ty;
13use rustc_middle::ty::layout::LayoutOf;
14use rustc_middle::{bug, ty};
15use rustc_session::{Session, config};
16use rustc_target::callconv::{
17 ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, CastTarget, FnAbi, PassMode,
18};
19use rustc_target::spec::{Arch, SanitizerSet};
20use smallvec::SmallVec;
21
22use crate::attributes::{self, llfn_attrs_from_instance};
23use crate::builder::Builder;
24use crate::context::CodegenCx;
25use crate::llvm::{self, Attribute, AttributePlace, Type, Value};
26use crate::type_of::LayoutLlvmExt;
27
28trait ArgAttributesExt {
29 fn apply_attrs_to_llfn(&self, idx: AttributePlace, cx: &CodegenCx<'_, '_>, llfn: &Value);
30 fn apply_attrs_to_callsite(
31 &self,
32 idx: AttributePlace,
33 cx: &CodegenCx<'_, '_>,
34 callsite: &Value,
35 );
36}
37
38const ABI_AFFECTING_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 1] =
39 [(ArgAttribute::InReg, llvm::AttributeKind::InReg)];
40
41const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 4] = [
42 (ArgAttribute::NoAlias, llvm::AttributeKind::NoAlias),
43 (ArgAttribute::NonNull, llvm::AttributeKind::NonNull),
44 (ArgAttribute::ReadOnly, llvm::AttributeKind::ReadOnly),
45 (ArgAttribute::NoUndef, llvm::AttributeKind::NoUndef),
46];
47
48const CAPTURES_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 3] = [
49 (ArgAttribute::CapturesNone, llvm::AttributeKind::CapturesNone),
50 (ArgAttribute::CapturesAddress, llvm::AttributeKind::CapturesAddress),
51 (ArgAttribute::CapturesReadOnly, llvm::AttributeKind::CapturesReadOnly),
52];
53
54fn get_attrs<'ll>(this: &ArgAttributes, cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'ll Attribute; 8]> {
55 let mut regular = this.regular;
56
57 let mut attrs = SmallVec::new();
58
59 for (attr, llattr) in ABI_AFFECTING_ATTRIBUTES {
61 if regular.contains(attr) {
62 attrs.push(llattr.create_attr(cx.llcx));
63 }
64 }
65 if let Some(align) = this.pointee_align {
66 attrs.push(llvm::CreateAlignmentAttr(cx.llcx, align.bytes()));
67 }
68 match this.arg_ext {
69 ArgExtension::None => {}
70 ArgExtension::Zext => attrs.push(llvm::AttributeKind::ZExt.create_attr(cx.llcx)),
71 ArgExtension::Sext => attrs.push(llvm::AttributeKind::SExt.create_attr(cx.llcx)),
72 }
73
74 if cx.sess().opts.optimize != config::OptLevel::No {
76 let deref = this.pointee_size.bytes();
77 if deref != 0 {
78 if regular.contains(ArgAttribute::NonNull) {
79 attrs.push(llvm::CreateDereferenceableAttr(cx.llcx, deref));
80 } else {
81 attrs.push(llvm::CreateDereferenceableOrNullAttr(cx.llcx, deref));
82 }
83 regular -= ArgAttribute::NonNull;
84 }
85 for (attr, llattr) in OPTIMIZATION_ATTRIBUTES {
86 if regular.contains(attr) {
87 attrs.push(llattr.create_attr(cx.llcx));
88 }
89 }
90 for (attr, llattr) in CAPTURES_ATTRIBUTES {
91 if regular.contains(attr) {
92 attrs.push(llattr.create_attr(cx.llcx));
93 break;
94 }
95 }
96 } else if cx.tcx.sess.sanitizers().contains(SanitizerSet::MEMORY) {
97 if regular.contains(ArgAttribute::NoUndef) {
101 attrs.push(llvm::AttributeKind::NoUndef.create_attr(cx.llcx));
102 }
103 }
104
105 attrs
106}
107
108impl ArgAttributesExt for ArgAttributes {
109 fn apply_attrs_to_llfn(&self, idx: AttributePlace, cx: &CodegenCx<'_, '_>, llfn: &Value) {
110 let attrs = get_attrs(self, cx);
111 attributes::apply_to_llfn(llfn, idx, &attrs);
112 }
113
114 fn apply_attrs_to_callsite(
115 &self,
116 idx: AttributePlace,
117 cx: &CodegenCx<'_, '_>,
118 callsite: &Value,
119 ) {
120 let attrs = get_attrs(self, cx);
121 attributes::apply_to_callsite(callsite, idx, &attrs);
122 }
123}
124
125pub(crate) trait LlvmType {
126 fn llvm_type<'ll>(&self, cx: &CodegenCx<'ll, '_>) -> &'ll Type;
127}
128
129impl LlvmType for Reg {
130 fn llvm_type<'ll>(&self, cx: &CodegenCx<'ll, '_>) -> &'ll Type {
131 match self.kind {
132 RegKind::Integer => cx.type_ix(self.size.bits()),
133 RegKind::Float => match self.size.bits() {
134 16 => cx.type_f16(),
135 32 => cx.type_f32(),
136 64 => cx.type_f64(),
137 128 => cx.type_f128(),
138 _ => ::rustc_middle::util::bug::bug_fmt(format_args!("unsupported float: {0:?}",
self))bug!("unsupported float: {:?}", self),
139 },
140 RegKind::Vector => cx.type_vector(cx.type_i8(), self.size.bytes()),
141 }
142 }
143}
144
145impl LlvmType for CastTarget {
146 fn llvm_type<'ll>(&self, cx: &CodegenCx<'ll, '_>) -> &'ll Type {
147 let rest_ll_unit = self.rest.unit.llvm_type(cx);
148 let rest_count = if self.rest.total == Size::ZERO {
149 0
150 } else {
151 match (&(self.rest.unit.size), &(Size::ZERO)) {
(left_val, right_val) => {
if *left_val == *right_val {
let kind = ::core::panicking::AssertKind::Ne;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("total size {0:?} cannot be divided into units of zero size",
self.rest.total)));
}
}
};assert_ne!(
152 self.rest.unit.size,
153 Size::ZERO,
154 "total size {:?} cannot be divided into units of zero size",
155 self.rest.total
156 );
157 if !self.rest.total.bytes().is_multiple_of(self.rest.unit.size.bytes()) {
158 match (&self.rest.unit.kind, &RegKind::Integer) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::Some(format_args!("only int regs can be split")));
}
}
};assert_eq!(self.rest.unit.kind, RegKind::Integer, "only int regs can be split");
159 }
160 self.rest.total.bytes().div_ceil(self.rest.unit.size.bytes())
161 };
162
163 if self.prefix.iter().all(|x| x.is_none()) {
166 if rest_count == 1 && (!self.rest.is_consecutive || self.rest.unit != Reg::i128()) {
170 return rest_ll_unit;
171 }
172
173 return cx.type_array(rest_ll_unit, rest_count);
174 }
175
176 let prefix_args =
178 self.prefix.iter().flat_map(|option_reg| option_reg.map(|reg| reg.llvm_type(cx)));
179 let rest_args = (0..rest_count).map(|_| rest_ll_unit);
180 let args: Vec<_> = prefix_args.chain(rest_args).collect();
181 cx.type_struct(&args, false)
182 }
183}
184
185trait ArgAbiExt<'ll, 'tcx> {
186 fn store(
187 &self,
188 bx: &mut Builder<'_, 'll, 'tcx>,
189 val: &'ll Value,
190 dst: PlaceRef<'tcx, &'ll Value>,
191 );
192 fn store_fn_arg(
193 &self,
194 bx: &mut Builder<'_, 'll, 'tcx>,
195 idx: &mut usize,
196 dst: PlaceRef<'tcx, &'ll Value>,
197 );
198}
199
200impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
201 fn store(
206 &self,
207 bx: &mut Builder<'_, 'll, 'tcx>,
208 val: &'ll Value,
209 dst: PlaceRef<'tcx, &'ll Value>,
210 ) {
211 match &self.mode {
212 PassMode::Ignore => {}
213 PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
215 let align = attrs.pointee_align.unwrap_or(self.layout.align.abi);
216 OperandValue::Ref(PlaceValue::new_sized(val, align)).store(bx, dst);
217 }
218 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
220 ::rustc_middle::util::bug::bug_fmt(format_args!("unsized `ArgAbi` cannot be stored"));bug!("unsized `ArgAbi` cannot be stored");
221 }
222 PassMode::Cast { cast, pad_i32: _ } => {
223 let scratch_size = cast.size(bx);
227 let scratch_align = cast.align(bx);
228 let copy_bytes =
235 cmp::min(cast.unaligned_size(bx).bytes(), self.layout.size.bytes());
236 let llscratch = bx.alloca(scratch_size, scratch_align);
238 bx.lifetime_start(llscratch, scratch_size);
239 rustc_codegen_ssa::mir::store_cast(bx, cast, val, llscratch, scratch_align);
241 bx.memcpy(
243 dst.val.llval,
244 self.layout.align.abi,
245 llscratch,
246 scratch_align,
247 bx.const_usize(copy_bytes),
248 MemFlags::empty(),
249 None,
250 );
251 bx.lifetime_end(llscratch, scratch_size);
252 }
253 PassMode::Pair(..) | PassMode::Direct { .. } => {
254 OperandRef::from_immediate_or_packed_pair(bx, val, self.layout).val.store(bx, dst);
255 }
256 }
257 }
258
259 fn store_fn_arg(
260 &self,
261 bx: &mut Builder<'_, 'll, 'tcx>,
262 idx: &mut usize,
263 dst: PlaceRef<'tcx, &'ll Value>,
264 ) {
265 let mut next = || {
266 let val = llvm::get_param(bx.llfn(), *idx as c_uint);
267 *idx += 1;
268 val
269 };
270 match self.mode {
271 PassMode::Ignore => {}
272 PassMode::Pair(..) => {
273 OperandValue::Pair(next(), next()).store(bx, dst);
274 }
275 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
276 ::rustc_middle::util::bug::bug_fmt(format_args!("unsized `ArgAbi` cannot be stored"));bug!("unsized `ArgAbi` cannot be stored");
277 }
278 PassMode::Direct(_)
279 | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ }
280 | PassMode::Cast { .. } => {
281 let next_arg = next();
282 self.store(bx, next_arg, dst);
283 }
284 }
285 }
286}
287
288impl<'ll, 'tcx> ArgAbiBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
289 fn store_fn_arg(
290 &mut self,
291 arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
292 idx: &mut usize,
293 dst: PlaceRef<'tcx, Self::Value>,
294 ) {
295 arg_abi.store_fn_arg(self, idx, dst)
296 }
297 fn store_arg(
298 &mut self,
299 arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
300 val: &'ll Value,
301 dst: PlaceRef<'tcx, &'ll Value>,
302 ) {
303 arg_abi.store(self, val, dst)
304 }
305}
306
307pub(crate) trait FnAbiLlvmExt<'ll, 'tcx> {
308 fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type;
309 fn ptr_to_llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type;
310 fn llvm_cconv(&self, cx: &CodegenCx<'ll, 'tcx>) -> llvm::CallConv;
311
312 fn apply_attrs_llfn(
314 &self,
315 cx: &CodegenCx<'ll, 'tcx>,
316 llfn: &'ll Value,
317 instance: Option<ty::Instance<'tcx>>,
318 );
319
320 fn apply_attrs_callsite(&self, bx: &mut Builder<'_, 'll, 'tcx>, callsite: &'ll Value);
322}
323
324impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
325 fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
326 let args =
329 if self.c_variadic { &self.args[..self.fixed_count as usize] } else { &self.args };
330
331 let mut llargument_tys = Vec::with_capacity(
333 self.args.len() + if let PassMode::Indirect { .. } = self.ret.mode { 1 } else { 0 },
334 );
335
336 let llreturn_ty = match &self.ret.mode {
337 PassMode::Ignore => cx.type_void(),
338 PassMode::Direct(_) | PassMode::Pair(..) => self.ret.layout.immediate_llvm_type(cx),
339 PassMode::Cast { cast, pad_i32: _ } => cast.llvm_type(cx),
340 PassMode::Indirect { .. } => {
341 llargument_tys.push(cx.type_ptr());
342 cx.type_void()
343 }
344 };
345
346 for arg in args {
347 let llarg_ty = match &arg.mode {
351 PassMode::Ignore => continue,
352 PassMode::Direct(_) => {
353 arg.layout.immediate_llvm_type(cx)
357 }
358 PassMode::Pair(..) => {
359 llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 0, true));
363 llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 1, true));
364 continue;
365 }
366 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
367 let ptr_ty = Ty::new_mut_ptr(cx.tcx, arg.layout.ty);
372 let ptr_layout = cx.layout_of(ptr_ty);
373 llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 0, true));
374 llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 1, true));
375 continue;
376 }
377 PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => cx.type_ptr(),
378 PassMode::Cast { cast, pad_i32 } => {
379 if *pad_i32 {
381 llargument_tys.push(Reg::i32().llvm_type(cx));
382 }
383 cast.llvm_type(cx)
386 }
387 };
388 llargument_tys.push(llarg_ty);
389 }
390
391 if self.c_variadic {
392 cx.type_variadic_func(&llargument_tys, llreturn_ty)
393 } else {
394 cx.type_func(&llargument_tys, llreturn_ty)
395 }
396 }
397
398 fn ptr_to_llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
399 cx.type_ptr_ext(cx.data_layout().instruction_address_space)
400 }
401
402 fn llvm_cconv(&self, cx: &CodegenCx<'ll, 'tcx>) -> llvm::CallConv {
403 to_llvm_calling_convention(cx.tcx.sess, self.conv)
404 }
405
406 fn apply_attrs_llfn(
407 &self,
408 cx: &CodegenCx<'ll, 'tcx>,
409 llfn: &'ll Value,
410 instance: Option<ty::Instance<'tcx>>,
411 ) {
412 let mut func_attrs = SmallVec::<[_; 3]>::new();
413 if self.ret.layout.is_uninhabited() {
414 func_attrs.push(llvm::AttributeKind::NoReturn.create_attr(cx.llcx));
415 }
416 if !self.can_unwind {
417 func_attrs.push(llvm::AttributeKind::NoUnwind.create_attr(cx.llcx));
418 }
419 match self.conv {
420 CanonAbi::Interrupt(InterruptKind::RiscvMachine) => {
421 func_attrs.push(llvm::CreateAttrStringValue(cx.llcx, "interrupt", "machine"))
422 }
423 CanonAbi::Interrupt(InterruptKind::RiscvSupervisor) => {
424 func_attrs.push(llvm::CreateAttrStringValue(cx.llcx, "interrupt", "supervisor"))
425 }
426 CanonAbi::Arm(ArmCall::CCmseNonSecureEntry) => {
427 func_attrs.push(llvm::CreateAttrString(cx.llcx, "cmse_nonsecure_entry"))
428 }
429 _ => (),
430 }
431 attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &{ func_attrs });
432
433 let mut i = 0;
434 let mut apply = |attrs: &ArgAttributes| {
435 attrs.apply_attrs_to_llfn(llvm::AttributePlace::Argument(i), cx, llfn);
436 i += 1;
437 i - 1
438 };
439
440 let apply_range_attr = |idx: AttributePlace, scalar: rustc_abi::Scalar| {
441 if cx.sess().opts.optimize != config::OptLevel::No
442 && #[allow(non_exhaustive_omitted_patterns)] match scalar.primitive() {
Primitive::Int(..) => true,
_ => false,
}matches!(scalar.primitive(), Primitive::Int(..))
443 && !scalar.is_bool()
447 && !scalar.is_always_valid(cx)
449 {
450 attributes::apply_to_llfn(
451 llfn,
452 idx,
453 &[llvm::CreateRangeAttr(cx.llcx, scalar.size(cx), scalar.valid_range(cx))],
454 );
455 }
456 };
457
458 match &self.ret.mode {
459 PassMode::Direct(attrs) => {
460 attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn);
461 if let BackendRepr::Scalar(scalar) = self.ret.layout.backend_repr {
462 apply_range_attr(llvm::AttributePlace::ReturnValue, scalar);
463 }
464 }
465 PassMode::Indirect { attrs, meta_attrs: _, on_stack } => {
466 if !!on_stack { ::core::panicking::panic("assertion failed: !on_stack") };assert!(!on_stack);
467 let i = apply(attrs);
468 let sret = llvm::CreateStructRetAttr(
469 cx.llcx,
470 cx.type_array(cx.type_i8(), self.ret.layout.size.bytes()),
471 );
472 attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[sret]);
473 if cx.sess().opts.optimize != config::OptLevel::No {
474 attributes::apply_to_llfn(
475 llfn,
476 llvm::AttributePlace::Argument(i),
477 &[
478 llvm::AttributeKind::Writable.create_attr(cx.llcx),
479 llvm::AttributeKind::DeadOnUnwind.create_attr(cx.llcx),
480 ],
481 );
482 }
483 }
484 PassMode::Cast { cast, pad_i32: _ } => {
485 cast.attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn);
486 }
487 _ => {}
488 }
489 for arg in self.args.iter() {
490 match &arg.mode {
491 PassMode::Ignore => {}
492 PassMode::Indirect { attrs, meta_attrs: None, on_stack: true } => {
493 let i = apply(attrs);
494 let byval = llvm::CreateByValAttr(
495 cx.llcx,
496 cx.type_array(cx.type_i8(), arg.layout.size.bytes()),
497 );
498 attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[byval]);
499 }
500 PassMode::Direct(attrs) => {
501 let i = apply(attrs);
502 if let BackendRepr::Scalar(scalar) = arg.layout.backend_repr {
503 apply_range_attr(llvm::AttributePlace::Argument(i), scalar);
504 }
505 }
506 PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => {
507 let i = apply(attrs);
508 if cx.sess().opts.optimize != config::OptLevel::No {
509 attributes::apply_to_llfn(
510 llfn,
511 llvm::AttributePlace::Argument(i),
512 &[llvm::AttributeKind::DeadOnReturn.create_attr(cx.llcx)],
513 );
514 }
515 }
516 PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => {
517 if !!on_stack { ::core::panicking::panic("assertion failed: !on_stack") };assert!(!on_stack);
518 apply(attrs);
519 apply(meta_attrs);
520 }
521 PassMode::Pair(a, b) => {
522 let i = apply(a);
523 let ii = apply(b);
524 if let BackendRepr::ScalarPair(scalar_a, scalar_b) = arg.layout.backend_repr {
525 apply_range_attr(llvm::AttributePlace::Argument(i), scalar_a);
526 let primitive_b = scalar_b.primitive();
527 let scalar_b = if let rustc_abi::Primitive::Int(int, false) = primitive_b
528 && let ty::Ref(_, pointee_ty, _) = *arg.layout.ty.kind()
529 && let ty::Slice(element_ty) = *pointee_ty.kind()
530 && let elem_size = cx.layout_of(element_ty).size
531 && elem_size != rustc_abi::Size::ZERO
532 {
533 if true {
if !scalar_b.is_always_valid(cx) {
::core::panicking::panic("assertion failed: scalar_b.is_always_valid(cx)")
};
};debug_assert!(scalar_b.is_always_valid(cx));
537 let isize_max = int.signed_max() as u64;
538 rustc_abi::Scalar::Initialized {
539 value: primitive_b,
540 valid_range: rustc_abi::WrappingRange {
541 start: 0,
542 end: u128::from(isize_max / elem_size.bytes()),
543 },
544 }
545 } else {
546 scalar_b
547 };
548 apply_range_attr(llvm::AttributePlace::Argument(ii), scalar_b);
549 }
550 }
551 PassMode::Cast { cast, pad_i32 } => {
552 if *pad_i32 {
553 apply(&ArgAttributes::new());
554 }
555 apply(&cast.attrs);
556 }
557 }
558 }
559
560 if let Some(instance) = instance {
562 llfn_attrs_from_instance(
563 cx,
564 cx.tcx,
565 llfn,
566 &cx.tcx.codegen_instance_attrs(instance.def),
567 Some(instance),
568 );
569 }
570 }
571
572 fn apply_attrs_callsite(&self, bx: &mut Builder<'_, 'll, 'tcx>, callsite: &'ll Value) {
573 let mut func_attrs = SmallVec::<[_; 2]>::new();
574 if self.ret.layout.is_uninhabited() {
575 func_attrs.push(llvm::AttributeKind::NoReturn.create_attr(bx.cx.llcx));
576 }
577 if !self.can_unwind {
578 func_attrs.push(llvm::AttributeKind::NoUnwind.create_attr(bx.cx.llcx));
579 }
580 attributes::apply_to_callsite(callsite, llvm::AttributePlace::Function, &{ func_attrs });
581
582 let mut i = 0;
583 let mut apply = |cx: &CodegenCx<'_, '_>, attrs: &ArgAttributes| {
584 attrs.apply_attrs_to_callsite(llvm::AttributePlace::Argument(i), cx, callsite);
585 i += 1;
586 i - 1
587 };
588 match &self.ret.mode {
589 PassMode::Direct(attrs) => {
590 attrs.apply_attrs_to_callsite(llvm::AttributePlace::ReturnValue, bx.cx, callsite);
591 }
592 PassMode::Indirect { attrs, meta_attrs: _, on_stack } => {
593 if !!on_stack { ::core::panicking::panic("assertion failed: !on_stack") };assert!(!on_stack);
594 let i = apply(bx.cx, attrs);
595 let sret = llvm::CreateStructRetAttr(
596 bx.cx.llcx,
597 bx.cx.type_array(bx.cx.type_i8(), self.ret.layout.size.bytes()),
598 );
599 attributes::apply_to_callsite(callsite, llvm::AttributePlace::Argument(i), &[sret]);
600 }
601 PassMode::Cast { cast, pad_i32: _ } => {
602 cast.attrs.apply_attrs_to_callsite(
603 llvm::AttributePlace::ReturnValue,
604 bx.cx,
605 callsite,
606 );
607 }
608 _ => {}
609 }
610 for arg in self.args.iter() {
611 match &arg.mode {
612 PassMode::Ignore => {}
613 PassMode::Indirect { attrs, meta_attrs: None, on_stack: true } => {
614 let i = apply(bx.cx, attrs);
615 let byval = llvm::CreateByValAttr(
616 bx.cx.llcx,
617 bx.cx.type_array(bx.cx.type_i8(), arg.layout.size.bytes()),
618 );
619 attributes::apply_to_callsite(
620 callsite,
621 llvm::AttributePlace::Argument(i),
622 &[byval],
623 );
624 }
625 PassMode::Direct(attrs)
626 | PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => {
627 apply(bx.cx, attrs);
628 }
629 PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack: _ } => {
630 apply(bx.cx, attrs);
631 apply(bx.cx, meta_attrs);
632 }
633 PassMode::Pair(a, b) => {
634 apply(bx.cx, a);
635 apply(bx.cx, b);
636 }
637 PassMode::Cast { cast, pad_i32 } => {
638 if *pad_i32 {
639 apply(bx.cx, &ArgAttributes::new());
640 }
641 apply(bx.cx, &cast.attrs);
642 }
643 }
644 }
645
646 let cconv = self.llvm_cconv(&bx.cx);
647 if cconv != llvm::CCallConv {
648 llvm::SetInstructionCallConv(callsite, cconv);
649 }
650
651 if self.conv == CanonAbi::Arm(ArmCall::CCmseNonSecureCall) {
652 let cmse_nonsecure_call = llvm::CreateAttrString(bx.cx.llcx, "cmse_nonsecure_call");
655 attributes::apply_to_callsite(
656 callsite,
657 llvm::AttributePlace::Function,
658 &[cmse_nonsecure_call],
659 );
660 }
661
662 let element_type_index = unsafe { llvm::LLVMRustGetElementTypeArgIndex(callsite) };
665 if element_type_index >= 0 {
666 let arg_ty = self.args[element_type_index as usize].layout.ty;
667 let pointee_ty = arg_ty.builtin_deref(true).expect("Must be pointer argument");
668 let element_type_attr = unsafe {
669 llvm::LLVMRustCreateElementTypeAttr(bx.llcx, bx.layout_of(pointee_ty).llvm_type(bx))
670 };
671 attributes::apply_to_callsite(
672 callsite,
673 llvm::AttributePlace::Argument(element_type_index as u32),
674 &[element_type_attr],
675 );
676 }
677 }
678}
679
680impl AbiBuilderMethods for Builder<'_, '_, '_> {
681 fn get_param(&mut self, index: usize) -> Self::Value {
682 llvm::get_param(self.llfn(), index as c_uint)
683 }
684}
685
686pub(crate) fn to_llvm_calling_convention(sess: &Session, abi: CanonAbi) -> llvm::CallConv {
689 match abi {
690 CanonAbi::C | CanonAbi::Rust => llvm::CCallConv,
691 CanonAbi::RustCold => llvm::PreserveMost,
692 CanonAbi::RustPreserveNone => match &sess.target.arch {
693 Arch::X86_64 | Arch::AArch64 => llvm::PreserveNone,
694 _ => llvm::CCallConv,
695 },
696 CanonAbi::Custom => llvm::CCallConv,
700 CanonAbi::GpuKernel => match &sess.target.arch {
701 Arch::AmdGpu => llvm::AmdgpuKernel,
702 Arch::Nvptx64 => llvm::PtxKernel,
703 arch => {
::core::panicking::panic_fmt(format_args!("Architecture {0} does not support GpuKernel calling convention",
arch));
}panic!("Architecture {arch} does not support GpuKernel calling convention"),
704 },
705 CanonAbi::Interrupt(interrupt_kind) => match interrupt_kind {
706 InterruptKind::Avr => llvm::AvrInterrupt,
707 InterruptKind::AvrNonBlocking => llvm::AvrNonBlockingInterrupt,
708 InterruptKind::Msp430 => llvm::Msp430Intr,
709 InterruptKind::RiscvMachine | InterruptKind::RiscvSupervisor => llvm::CCallConv,
710 InterruptKind::X86 => llvm::X86_Intr,
711 },
712 CanonAbi::Arm(arm_call) => match arm_call {
713 ArmCall::Aapcs => llvm::ArmAapcsCallConv,
714 ArmCall::CCmseNonSecureCall | ArmCall::CCmseNonSecureEntry => llvm::CCallConv,
715 },
716 CanonAbi::X86(x86_call) => match x86_call {
717 X86Call::Fastcall => llvm::X86FastcallCallConv,
718 X86Call::Stdcall => llvm::X86StdcallCallConv,
719 X86Call::SysV64 => llvm::X86_64_SysV,
720 X86Call::Thiscall => llvm::X86_ThisCall,
721 X86Call::Vectorcall => llvm::X86_VectorCall,
722 X86Call::Win64 => llvm::X86_64_Win64,
723 },
724 }
725}