rustc_target/callconv/
xtensa.rs1use rustc_abi::{BackendRepr, HasDataLayout, Size, TyAbiInterface};
9
10use crate::callconv::{ArgAbi, FnAbi, Reg, Uniform};
11use crate::spec::HasTargetSpec;
12
13const NUM_ARG_GPRS: u64 = 6;
14const NUM_RET_GPRS: u64 = 4;
15const MAX_ARG_IN_REGS_SIZE: u64 = NUM_ARG_GPRS * 32;
16const MAX_RET_IN_REGS_SIZE: u64 = NUM_RET_GPRS * 32;
17
18fn classify_ret_ty<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>)
19where
20 Ty: TyAbiInterface<'a, C> + Copy,
21{
22 if arg.is_ignore() {
23 return;
24 }
25
26 let mut arg_gprs_left = NUM_RET_GPRS;
29 classify_arg_ty(cx, arg, &mut arg_gprs_left, true);
30 match arg.mode {
32 super::PassMode::Indirect { attrs: _, meta_attrs: _, ref mut on_stack } => {
33 *on_stack = false;
34 }
35 _ => {}
36 }
37}
38
39fn classify_arg_ty<'a, Ty, C>(
40 cx: &C,
41 arg: &mut ArgAbi<'a, Ty>,
42 arg_gprs_left: &mut u64,
43 is_ret: bool,
44) where
45 Ty: TyAbiInterface<'a, C> + Copy,
46{
47 assert!(*arg_gprs_left <= NUM_ARG_GPRS, "Arg GPR tracking underflow");
48
49 let max_size = if is_ret { MAX_RET_IN_REGS_SIZE } else { MAX_ARG_IN_REGS_SIZE };
50
51 if !is_ret && arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
52 *arg_gprs_left = arg_gprs_left.saturating_sub(1);
53 arg.make_indirect();
54 return;
55 }
56
57 if arg.layout.is_zst() {
59 return;
60 }
61
62 let size = arg.layout.size.bits();
63 let needed_align = arg.layout.align.bits();
64 let mut must_use_stack = false;
65
66 let mut needed_arg_gprs = size.div_ceil(32);
70 if needed_align == 64 {
71 needed_arg_gprs += *arg_gprs_left % 2;
72 }
73
74 if needed_arg_gprs > *arg_gprs_left
75 || needed_align > 128
76 || (*arg_gprs_left < (max_size / 32) && needed_align == 128)
77 {
78 must_use_stack = true;
79 needed_arg_gprs = *arg_gprs_left;
80 }
81 *arg_gprs_left -= needed_arg_gprs;
82
83 if must_use_stack {
84 arg.pass_by_stack_offset(None);
85 } else if is_xtensa_aggregate(arg) {
86 if size <= 32 {
93 arg.cast_to(Reg::i32());
94 } else {
95 let reg = if needed_align == 2 * 32 { Reg::i64() } else { Reg::i32() };
96 let total = Size::from_bits(((size + 32 - 1) / 32) * 32);
97 arg.cast_to(Uniform::new(reg, total));
98 }
99 } else {
100 if size < 32 {
105 arg.extend_integer_width_to(32);
106 }
107 }
108}
109
110pub(crate) fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
111where
112 Ty: TyAbiInterface<'a, C> + Copy,
113 C: HasDataLayout + HasTargetSpec,
114{
115 if !fn_abi.ret.is_ignore() {
116 classify_ret_ty(cx, &mut fn_abi.ret);
117 }
118
119 let mut arg_gprs_left = NUM_ARG_GPRS;
120
121 for arg in fn_abi.args.iter_mut() {
122 if arg.is_ignore() {
123 continue;
124 }
125 classify_arg_ty(cx, arg, &mut arg_gprs_left, false);
126 }
127}
128
129fn is_xtensa_aggregate<'a, Ty>(arg: &ArgAbi<'a, Ty>) -> bool {
130 match arg.layout.backend_repr {
131 BackendRepr::SimdVector { .. } => true,
132 _ => arg.layout.is_aggregate(),
133 }
134}