1use std::cmp;
2
3use libc::c_uint;
4use rustc_abi::{
5 ArmCall, BackendRepr, CanonAbi, HasDataLayout, InterruptKind, Primitive, Reg, RegKind, Size,
6 X86Call,
7};
8use rustc_codegen_ssa::MemFlags;
9use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
10use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue};
11use rustc_codegen_ssa::traits::*;
12use rustc_middle::ty::Ty;
13use rustc_middle::ty::layout::LayoutOf;
14use rustc_middle::{bug, ty};
15use rustc_session::{Session, config};
16use rustc_target::callconv::{
17 ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, CastTarget, FnAbi, PassMode,
18};
19use rustc_target::spec::{Arch, SanitizerSet};
20use smallvec::SmallVec;
21
22use crate::attributes::{self, llfn_attrs_from_instance};
23use crate::builder::Builder;
24use crate::context::CodegenCx;
25use crate::llvm::{self, Attribute, AttributePlace, Type, Value};
26use crate::llvm_util;
27use crate::type_of::LayoutLlvmExt;
28
29trait ArgAttributesExt {
30 fn apply_attrs_to_llfn(&self, idx: AttributePlace, cx: &CodegenCx<'_, '_>, llfn: &Value);
31 fn apply_attrs_to_callsite(
32 &self,
33 idx: AttributePlace,
34 cx: &CodegenCx<'_, '_>,
35 callsite: &Value,
36 );
37}
38
39const ABI_AFFECTING_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 1] =
40 [(ArgAttribute::InReg, llvm::AttributeKind::InReg)];
41
42const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 4] = [
43 (ArgAttribute::NoAlias, llvm::AttributeKind::NoAlias),
44 (ArgAttribute::NonNull, llvm::AttributeKind::NonNull),
45 (ArgAttribute::ReadOnly, llvm::AttributeKind::ReadOnly),
46 (ArgAttribute::NoUndef, llvm::AttributeKind::NoUndef),
47];
48
49fn get_attrs<'ll>(this: &ArgAttributes, cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'ll Attribute; 8]> {
50 let mut regular = this.regular;
51
52 let mut attrs = SmallVec::new();
53
54 for (attr, llattr) in ABI_AFFECTING_ATTRIBUTES {
56 if regular.contains(attr) {
57 attrs.push(llattr.create_attr(cx.llcx));
58 }
59 }
60 if let Some(align) = this.pointee_align {
61 attrs.push(llvm::CreateAlignmentAttr(cx.llcx, align.bytes()));
62 }
63 match this.arg_ext {
64 ArgExtension::None => {}
65 ArgExtension::Zext => attrs.push(llvm::AttributeKind::ZExt.create_attr(cx.llcx)),
66 ArgExtension::Sext => attrs.push(llvm::AttributeKind::SExt.create_attr(cx.llcx)),
67 }
68
69 if cx.sess().opts.optimize != config::OptLevel::No {
71 let deref = this.pointee_size.bytes();
72 if deref != 0 {
73 if regular.contains(ArgAttribute::NonNull) {
74 attrs.push(llvm::CreateDereferenceableAttr(cx.llcx, deref));
75 } else {
76 attrs.push(llvm::CreateDereferenceableOrNullAttr(cx.llcx, deref));
77 }
78 regular -= ArgAttribute::NonNull;
79 }
80 for (attr, llattr) in OPTIMIZATION_ATTRIBUTES {
81 if regular.contains(attr) {
82 attrs.push(llattr.create_attr(cx.llcx));
83 }
84 }
85 if (21, 0, 0) <= llvm_util::get_version() {
87 const CAPTURES_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 3] = [
88 (ArgAttribute::CapturesNone, llvm::AttributeKind::CapturesNone),
89 (ArgAttribute::CapturesAddress, llvm::AttributeKind::CapturesAddress),
90 (ArgAttribute::CapturesReadOnly, llvm::AttributeKind::CapturesReadOnly),
91 ];
92 for (attr, llattr) in CAPTURES_ATTRIBUTES {
93 if regular.contains(attr) {
94 attrs.push(llattr.create_attr(cx.llcx));
95 break;
96 }
97 }
98 }
99 } else if cx.tcx.sess.sanitizers().contains(SanitizerSet::MEMORY) {
100 if regular.contains(ArgAttribute::NoUndef) {
104 attrs.push(llvm::AttributeKind::NoUndef.create_attr(cx.llcx));
105 }
106 }
107
108 attrs
109}
110
111impl ArgAttributesExt for ArgAttributes {
112 fn apply_attrs_to_llfn(&self, idx: AttributePlace, cx: &CodegenCx<'_, '_>, llfn: &Value) {
113 let attrs = get_attrs(self, cx);
114 attributes::apply_to_llfn(llfn, idx, &attrs);
115 }
116
117 fn apply_attrs_to_callsite(
118 &self,
119 idx: AttributePlace,
120 cx: &CodegenCx<'_, '_>,
121 callsite: &Value,
122 ) {
123 let attrs = get_attrs(self, cx);
124 attributes::apply_to_callsite(callsite, idx, &attrs);
125 }
126}
127
128pub(crate) trait LlvmType {
129 fn llvm_type<'ll>(&self, cx: &CodegenCx<'ll, '_>) -> &'ll Type;
130}
131
132impl LlvmType for Reg {
133 fn llvm_type<'ll>(&self, cx: &CodegenCx<'ll, '_>) -> &'ll Type {
134 match self.kind {
135 RegKind::Integer => cx.type_ix(self.size.bits()),
136 RegKind::Float => match self.size.bits() {
137 16 => cx.type_f16(),
138 32 => cx.type_f32(),
139 64 => cx.type_f64(),
140 128 => cx.type_f128(),
141 _ => bug!("unsupported float: {:?}", self),
142 },
143 RegKind::Vector => cx.type_vector(cx.type_i8(), self.size.bytes()),
144 }
145 }
146}
147
148impl LlvmType for CastTarget {
149 fn llvm_type<'ll>(&self, cx: &CodegenCx<'ll, '_>) -> &'ll Type {
150 let rest_ll_unit = self.rest.unit.llvm_type(cx);
151 let rest_count = if self.rest.total == Size::ZERO {
152 0
153 } else {
154 assert_ne!(
155 self.rest.unit.size,
156 Size::ZERO,
157 "total size {:?} cannot be divided into units of zero size",
158 self.rest.total
159 );
160 if !self.rest.total.bytes().is_multiple_of(self.rest.unit.size.bytes()) {
161 assert_eq!(self.rest.unit.kind, RegKind::Integer, "only int regs can be split");
162 }
163 self.rest.total.bytes().div_ceil(self.rest.unit.size.bytes())
164 };
165
166 if self.prefix.iter().all(|x| x.is_none()) {
169 if rest_count == 1 && (!self.rest.is_consecutive || self.rest.unit != Reg::i128()) {
173 return rest_ll_unit;
174 }
175
176 return cx.type_array(rest_ll_unit, rest_count);
177 }
178
179 let prefix_args =
181 self.prefix.iter().flat_map(|option_reg| option_reg.map(|reg| reg.llvm_type(cx)));
182 let rest_args = (0..rest_count).map(|_| rest_ll_unit);
183 let args: Vec<_> = prefix_args.chain(rest_args).collect();
184 cx.type_struct(&args, false)
185 }
186}
187
188trait ArgAbiExt<'ll, 'tcx> {
189 fn store(
190 &self,
191 bx: &mut Builder<'_, 'll, 'tcx>,
192 val: &'ll Value,
193 dst: PlaceRef<'tcx, &'ll Value>,
194 );
195 fn store_fn_arg(
196 &self,
197 bx: &mut Builder<'_, 'll, 'tcx>,
198 idx: &mut usize,
199 dst: PlaceRef<'tcx, &'ll Value>,
200 );
201}
202
203impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
204 fn store(
209 &self,
210 bx: &mut Builder<'_, 'll, 'tcx>,
211 val: &'ll Value,
212 dst: PlaceRef<'tcx, &'ll Value>,
213 ) {
214 match &self.mode {
215 PassMode::Ignore => {}
216 PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
218 let align = attrs.pointee_align.unwrap_or(self.layout.align.abi);
219 OperandValue::Ref(PlaceValue::new_sized(val, align)).store(bx, dst);
220 }
221 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
223 bug!("unsized `ArgAbi` cannot be stored");
224 }
225 PassMode::Cast { cast, pad_i32: _ } => {
226 let scratch_size = cast.size(bx);
230 let scratch_align = cast.align(bx);
231 let copy_bytes =
238 cmp::min(cast.unaligned_size(bx).bytes(), self.layout.size.bytes());
239 let llscratch = bx.alloca(scratch_size, scratch_align);
241 bx.lifetime_start(llscratch, scratch_size);
242 rustc_codegen_ssa::mir::store_cast(bx, cast, val, llscratch, scratch_align);
244 bx.memcpy(
246 dst.val.llval,
247 self.layout.align.abi,
248 llscratch,
249 scratch_align,
250 bx.const_usize(copy_bytes),
251 MemFlags::empty(),
252 None,
253 );
254 bx.lifetime_end(llscratch, scratch_size);
255 }
256 PassMode::Pair(..) | PassMode::Direct { .. } => {
257 OperandRef::from_immediate_or_packed_pair(bx, val, self.layout).val.store(bx, dst);
258 }
259 }
260 }
261
262 fn store_fn_arg(
263 &self,
264 bx: &mut Builder<'_, 'll, 'tcx>,
265 idx: &mut usize,
266 dst: PlaceRef<'tcx, &'ll Value>,
267 ) {
268 let mut next = || {
269 let val = llvm::get_param(bx.llfn(), *idx as c_uint);
270 *idx += 1;
271 val
272 };
273 match self.mode {
274 PassMode::Ignore => {}
275 PassMode::Pair(..) => {
276 OperandValue::Pair(next(), next()).store(bx, dst);
277 }
278 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
279 bug!("unsized `ArgAbi` cannot be stored");
280 }
281 PassMode::Direct(_)
282 | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ }
283 | PassMode::Cast { .. } => {
284 let next_arg = next();
285 self.store(bx, next_arg, dst);
286 }
287 }
288 }
289}
290
291impl<'ll, 'tcx> ArgAbiBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
292 fn store_fn_arg(
293 &mut self,
294 arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
295 idx: &mut usize,
296 dst: PlaceRef<'tcx, Self::Value>,
297 ) {
298 arg_abi.store_fn_arg(self, idx, dst)
299 }
300 fn store_arg(
301 &mut self,
302 arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
303 val: &'ll Value,
304 dst: PlaceRef<'tcx, &'ll Value>,
305 ) {
306 arg_abi.store(self, val, dst)
307 }
308}
309
310pub(crate) trait FnAbiLlvmExt<'ll, 'tcx> {
311 fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type;
312 fn ptr_to_llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type;
313 fn llvm_cconv(&self, cx: &CodegenCx<'ll, 'tcx>) -> llvm::CallConv;
314
315 fn apply_attrs_llfn(
317 &self,
318 cx: &CodegenCx<'ll, 'tcx>,
319 llfn: &'ll Value,
320 instance: Option<ty::Instance<'tcx>>,
321 );
322
323 fn apply_attrs_callsite(&self, bx: &mut Builder<'_, 'll, 'tcx>, callsite: &'ll Value);
325}
326
327impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
328 fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
329 let args =
332 if self.c_variadic { &self.args[..self.fixed_count as usize] } else { &self.args };
333
334 let mut llargument_tys = Vec::with_capacity(
336 self.args.len() + if let PassMode::Indirect { .. } = self.ret.mode { 1 } else { 0 },
337 );
338
339 let llreturn_ty = match &self.ret.mode {
340 PassMode::Ignore => cx.type_void(),
341 PassMode::Direct(_) | PassMode::Pair(..) => self.ret.layout.immediate_llvm_type(cx),
342 PassMode::Cast { cast, pad_i32: _ } => cast.llvm_type(cx),
343 PassMode::Indirect { .. } => {
344 llargument_tys.push(cx.type_ptr());
345 cx.type_void()
346 }
347 };
348
349 for arg in args {
350 let llarg_ty = match &arg.mode {
354 PassMode::Ignore => continue,
355 PassMode::Direct(_) => {
356 arg.layout.immediate_llvm_type(cx)
360 }
361 PassMode::Pair(..) => {
362 llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 0, true));
366 llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 1, true));
367 continue;
368 }
369 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
370 let ptr_ty = Ty::new_mut_ptr(cx.tcx, arg.layout.ty);
375 let ptr_layout = cx.layout_of(ptr_ty);
376 llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 0, true));
377 llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 1, true));
378 continue;
379 }
380 PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => cx.type_ptr(),
381 PassMode::Cast { cast, pad_i32 } => {
382 if *pad_i32 {
384 llargument_tys.push(Reg::i32().llvm_type(cx));
385 }
386 cast.llvm_type(cx)
389 }
390 };
391 llargument_tys.push(llarg_ty);
392 }
393
394 if self.c_variadic {
395 cx.type_variadic_func(&llargument_tys, llreturn_ty)
396 } else {
397 cx.type_func(&llargument_tys, llreturn_ty)
398 }
399 }
400
401 fn ptr_to_llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
402 cx.type_ptr_ext(cx.data_layout().instruction_address_space)
403 }
404
405 fn llvm_cconv(&self, cx: &CodegenCx<'ll, 'tcx>) -> llvm::CallConv {
406 to_llvm_calling_convention(cx.tcx.sess, self.conv)
407 }
408
409 fn apply_attrs_llfn(
410 &self,
411 cx: &CodegenCx<'ll, 'tcx>,
412 llfn: &'ll Value,
413 instance: Option<ty::Instance<'tcx>>,
414 ) {
415 let mut func_attrs = SmallVec::<[_; 3]>::new();
416 if self.ret.layout.is_uninhabited() {
417 func_attrs.push(llvm::AttributeKind::NoReturn.create_attr(cx.llcx));
418 }
419 if !self.can_unwind {
420 func_attrs.push(llvm::AttributeKind::NoUnwind.create_attr(cx.llcx));
421 }
422 match self.conv {
423 CanonAbi::Interrupt(InterruptKind::RiscvMachine) => {
424 func_attrs.push(llvm::CreateAttrStringValue(cx.llcx, "interrupt", "machine"))
425 }
426 CanonAbi::Interrupt(InterruptKind::RiscvSupervisor) => {
427 func_attrs.push(llvm::CreateAttrStringValue(cx.llcx, "interrupt", "supervisor"))
428 }
429 CanonAbi::Arm(ArmCall::CCmseNonSecureEntry) => {
430 func_attrs.push(llvm::CreateAttrString(cx.llcx, "cmse_nonsecure_entry"))
431 }
432 _ => (),
433 }
434 attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &{ func_attrs });
435
436 let mut i = 0;
437 let mut apply = |attrs: &ArgAttributes| {
438 attrs.apply_attrs_to_llfn(llvm::AttributePlace::Argument(i), cx, llfn);
439 i += 1;
440 i - 1
441 };
442
443 let apply_range_attr = |idx: AttributePlace, scalar: rustc_abi::Scalar| {
444 if cx.sess().opts.optimize != config::OptLevel::No
445 && matches!(scalar.primitive(), Primitive::Int(..))
446 && !scalar.is_bool()
450 && !scalar.is_always_valid(cx)
452 {
453 attributes::apply_to_llfn(
454 llfn,
455 idx,
456 &[llvm::CreateRangeAttr(cx.llcx, scalar.size(cx), scalar.valid_range(cx))],
457 );
458 }
459 };
460
461 match &self.ret.mode {
462 PassMode::Direct(attrs) => {
463 attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn);
464 if let BackendRepr::Scalar(scalar) = self.ret.layout.backend_repr {
465 apply_range_attr(llvm::AttributePlace::ReturnValue, scalar);
466 }
467 }
468 PassMode::Indirect { attrs, meta_attrs: _, on_stack } => {
469 assert!(!on_stack);
470 let i = apply(attrs);
471 let sret = llvm::CreateStructRetAttr(
472 cx.llcx,
473 cx.type_array(cx.type_i8(), self.ret.layout.size.bytes()),
474 );
475 attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[sret]);
476 if cx.sess().opts.optimize != config::OptLevel::No {
477 attributes::apply_to_llfn(
478 llfn,
479 llvm::AttributePlace::Argument(i),
480 &[
481 llvm::AttributeKind::Writable.create_attr(cx.llcx),
482 llvm::AttributeKind::DeadOnUnwind.create_attr(cx.llcx),
483 ],
484 );
485 }
486 }
487 PassMode::Cast { cast, pad_i32: _ } => {
488 cast.attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn);
489 }
490 _ => {}
491 }
492 for arg in self.args.iter() {
493 match &arg.mode {
494 PassMode::Ignore => {}
495 PassMode::Indirect { attrs, meta_attrs: None, on_stack: true } => {
496 let i = apply(attrs);
497 let byval = llvm::CreateByValAttr(
498 cx.llcx,
499 cx.type_array(cx.type_i8(), arg.layout.size.bytes()),
500 );
501 attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[byval]);
502 }
503 PassMode::Direct(attrs) => {
504 let i = apply(attrs);
505 if let BackendRepr::Scalar(scalar) = arg.layout.backend_repr {
506 apply_range_attr(llvm::AttributePlace::Argument(i), scalar);
507 }
508 }
509 PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => {
510 let i = apply(attrs);
511 if cx.sess().opts.optimize != config::OptLevel::No
512 && llvm_util::get_version() >= (21, 0, 0)
513 {
514 attributes::apply_to_llfn(
515 llfn,
516 llvm::AttributePlace::Argument(i),
517 &[llvm::AttributeKind::DeadOnReturn.create_attr(cx.llcx)],
518 );
519 }
520 }
521 PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => {
522 assert!(!on_stack);
523 apply(attrs);
524 apply(meta_attrs);
525 }
526 PassMode::Pair(a, b) => {
527 let i = apply(a);
528 let ii = apply(b);
529 if let BackendRepr::ScalarPair(scalar_a, scalar_b) = arg.layout.backend_repr {
530 apply_range_attr(llvm::AttributePlace::Argument(i), scalar_a);
531 let primitive_b = scalar_b.primitive();
532 let scalar_b = if let rustc_abi::Primitive::Int(int, false) = primitive_b
533 && let ty::Ref(_, pointee_ty, _) = *arg.layout.ty.kind()
534 && let ty::Slice(element_ty) = *pointee_ty.kind()
535 && let elem_size = cx.layout_of(element_ty).size
536 && elem_size != rustc_abi::Size::ZERO
537 {
538 debug_assert!(scalar_b.is_always_valid(cx));
542 let isize_max = int.signed_max() as u64;
543 rustc_abi::Scalar::Initialized {
544 value: primitive_b,
545 valid_range: rustc_abi::WrappingRange {
546 start: 0,
547 end: u128::from(isize_max / elem_size.bytes()),
548 },
549 }
550 } else {
551 scalar_b
552 };
553 apply_range_attr(llvm::AttributePlace::Argument(ii), scalar_b);
554 }
555 }
556 PassMode::Cast { cast, pad_i32 } => {
557 if *pad_i32 {
558 apply(&ArgAttributes::new());
559 }
560 apply(&cast.attrs);
561 }
562 }
563 }
564
565 if let Some(instance) = instance {
567 llfn_attrs_from_instance(
568 cx,
569 cx.tcx,
570 llfn,
571 &cx.tcx.codegen_instance_attrs(instance.def),
572 Some(instance),
573 );
574 }
575 }
576
577 fn apply_attrs_callsite(&self, bx: &mut Builder<'_, 'll, 'tcx>, callsite: &'ll Value) {
578 let mut func_attrs = SmallVec::<[_; 2]>::new();
579 if self.ret.layout.is_uninhabited() {
580 func_attrs.push(llvm::AttributeKind::NoReturn.create_attr(bx.cx.llcx));
581 }
582 if !self.can_unwind {
583 func_attrs.push(llvm::AttributeKind::NoUnwind.create_attr(bx.cx.llcx));
584 }
585 attributes::apply_to_callsite(callsite, llvm::AttributePlace::Function, &{ func_attrs });
586
587 let mut i = 0;
588 let mut apply = |cx: &CodegenCx<'_, '_>, attrs: &ArgAttributes| {
589 attrs.apply_attrs_to_callsite(llvm::AttributePlace::Argument(i), cx, callsite);
590 i += 1;
591 i - 1
592 };
593 match &self.ret.mode {
594 PassMode::Direct(attrs) => {
595 attrs.apply_attrs_to_callsite(llvm::AttributePlace::ReturnValue, bx.cx, callsite);
596 }
597 PassMode::Indirect { attrs, meta_attrs: _, on_stack } => {
598 assert!(!on_stack);
599 let i = apply(bx.cx, attrs);
600 let sret = llvm::CreateStructRetAttr(
601 bx.cx.llcx,
602 bx.cx.type_array(bx.cx.type_i8(), self.ret.layout.size.bytes()),
603 );
604 attributes::apply_to_callsite(callsite, llvm::AttributePlace::Argument(i), &[sret]);
605 }
606 PassMode::Cast { cast, pad_i32: _ } => {
607 cast.attrs.apply_attrs_to_callsite(
608 llvm::AttributePlace::ReturnValue,
609 bx.cx,
610 callsite,
611 );
612 }
613 _ => {}
614 }
615 for arg in self.args.iter() {
616 match &arg.mode {
617 PassMode::Ignore => {}
618 PassMode::Indirect { attrs, meta_attrs: None, on_stack: true } => {
619 let i = apply(bx.cx, attrs);
620 let byval = llvm::CreateByValAttr(
621 bx.cx.llcx,
622 bx.cx.type_array(bx.cx.type_i8(), arg.layout.size.bytes()),
623 );
624 attributes::apply_to_callsite(
625 callsite,
626 llvm::AttributePlace::Argument(i),
627 &[byval],
628 );
629 }
630 PassMode::Direct(attrs)
631 | PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => {
632 apply(bx.cx, attrs);
633 }
634 PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack: _ } => {
635 apply(bx.cx, attrs);
636 apply(bx.cx, meta_attrs);
637 }
638 PassMode::Pair(a, b) => {
639 apply(bx.cx, a);
640 apply(bx.cx, b);
641 }
642 PassMode::Cast { cast, pad_i32 } => {
643 if *pad_i32 {
644 apply(bx.cx, &ArgAttributes::new());
645 }
646 apply(bx.cx, &cast.attrs);
647 }
648 }
649 }
650
651 let cconv = self.llvm_cconv(&bx.cx);
652 if cconv != llvm::CCallConv {
653 llvm::SetInstructionCallConv(callsite, cconv);
654 }
655
656 if self.conv == CanonAbi::Arm(ArmCall::CCmseNonSecureCall) {
657 let cmse_nonsecure_call = llvm::CreateAttrString(bx.cx.llcx, "cmse_nonsecure_call");
660 attributes::apply_to_callsite(
661 callsite,
662 llvm::AttributePlace::Function,
663 &[cmse_nonsecure_call],
664 );
665 }
666
667 let element_type_index = unsafe { llvm::LLVMRustGetElementTypeArgIndex(callsite) };
670 if element_type_index >= 0 {
671 let arg_ty = self.args[element_type_index as usize].layout.ty;
672 let pointee_ty = arg_ty.builtin_deref(true).expect("Must be pointer argument");
673 let element_type_attr = unsafe {
674 llvm::LLVMRustCreateElementTypeAttr(bx.llcx, bx.layout_of(pointee_ty).llvm_type(bx))
675 };
676 attributes::apply_to_callsite(
677 callsite,
678 llvm::AttributePlace::Argument(element_type_index as u32),
679 &[element_type_attr],
680 );
681 }
682 }
683}
684
685impl AbiBuilderMethods for Builder<'_, '_, '_> {
686 fn get_param(&mut self, index: usize) -> Self::Value {
687 llvm::get_param(self.llfn(), index as c_uint)
688 }
689}
690
691pub(crate) fn to_llvm_calling_convention(sess: &Session, abi: CanonAbi) -> llvm::CallConv {
694 match abi {
695 CanonAbi::C | CanonAbi::Rust => llvm::CCallConv,
696 CanonAbi::RustCold => llvm::PreserveMost,
697 CanonAbi::Custom => llvm::CCallConv,
701 CanonAbi::GpuKernel => match &sess.target.arch {
702 Arch::AmdGpu => llvm::AmdgpuKernel,
703 Arch::Nvptx64 => llvm::PtxKernel,
704 arch => panic!("Architecture {arch} does not support GpuKernel calling convention"),
705 },
706 CanonAbi::Interrupt(interrupt_kind) => match interrupt_kind {
707 InterruptKind::Avr => llvm::AvrInterrupt,
708 InterruptKind::AvrNonBlocking => llvm::AvrNonBlockingInterrupt,
709 InterruptKind::Msp430 => llvm::Msp430Intr,
710 InterruptKind::RiscvMachine | InterruptKind::RiscvSupervisor => llvm::CCallConv,
711 InterruptKind::X86 => llvm::X86_Intr,
712 },
713 CanonAbi::Arm(arm_call) => match arm_call {
714 ArmCall::Aapcs => llvm::ArmAapcsCallConv,
715 ArmCall::CCmseNonSecureCall | ArmCall::CCmseNonSecureEntry => llvm::CCallConv,
716 },
717 CanonAbi::X86(x86_call) => match x86_call {
718 X86Call::Fastcall => llvm::X86FastcallCallConv,
719 X86Call::Stdcall => llvm::X86StdcallCallConv,
720 X86Call::SysV64 => llvm::X86_64_SysV,
721 X86Call::Thiscall => llvm::X86_ThisCall,
722 X86Call::Vectorcall => llvm::X86_VectorCall,
723 X86Call::Win64 => llvm::X86_64_Win64,
724 },
725 }
726}