rustc_target/callconv/
x86.rs1use rustc_abi::{
2 AddressSpace, Align, BackendRepr, HasDataLayout, Primitive, Reg, RegKind, TyAbiInterface,
3 TyAndLayout,
4};
5
6use crate::callconv::{ArgAttribute, FnAbi, PassMode};
7use crate::spec::{HasTargetSpec, RustcAbi};
8
9#[derive(PartialEq)]
10pub(crate) enum Flavor {
11 General,
12 FastcallOrVectorcall,
13}
14
15pub(crate) struct X86Options {
16 pub flavor: Flavor,
17 pub regparm: Option<u32>,
18 pub reg_struct_return: bool,
19}
20
21pub(crate) fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, opts: X86Options)
22where
23 Ty: TyAbiInterface<'a, C> + Copy,
24 C: HasDataLayout + HasTargetSpec,
25{
26 if !fn_abi.ret.is_ignore() {
27 if fn_abi.ret.layout.is_aggregate() && fn_abi.ret.layout.is_sized() {
28 let t = cx.target_spec();
36 if t.abi_return_struct_as_int || opts.reg_struct_return {
37 if fn_abi.ret.layout.is_single_fp_element(cx) {
40 match fn_abi.ret.layout.size.bytes() {
41 4 => fn_abi.ret.cast_to(Reg::f32()),
42 8 => fn_abi.ret.cast_to(Reg::f64()),
43 _ => fn_abi.ret.make_indirect(),
44 }
45 } else {
46 match fn_abi.ret.layout.size.bytes() {
47 1 => fn_abi.ret.cast_to(Reg::i8()),
48 2 => fn_abi.ret.cast_to(Reg::i16()),
49 4 => fn_abi.ret.cast_to(Reg::i32()),
50 8 => fn_abi.ret.cast_to(Reg::i64()),
51 _ => fn_abi.ret.make_indirect(),
52 }
53 }
54 } else {
55 fn_abi.ret.make_indirect();
56 }
57 } else {
58 fn_abi.ret.extend_integer_width_to(32);
59 }
60 }
61
62 for arg in fn_abi.args.iter_mut() {
63 if arg.is_ignore() || !arg.layout.is_sized() {
64 continue;
65 }
66
67 if arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
68 arg.make_indirect();
69 continue;
70 }
71
72 let t = cx.target_spec();
73 let align_4 = Align::from_bytes(4).unwrap();
74 let align_16 = Align::from_bytes(16).unwrap();
75
76 if arg.layout.is_aggregate() {
77 fn contains_vector<'a, Ty, C>(cx: &C, layout: TyAndLayout<'a, Ty>) -> bool
92 where
93 Ty: TyAbiInterface<'a, C> + Copy,
94 {
95 match layout.backend_repr {
96 BackendRepr::Scalar(_) | BackendRepr::ScalarPair(..) => false,
97 BackendRepr::SimdVector { .. } => true,
98 BackendRepr::Memory { .. } => {
99 for i in 0..layout.fields.count() {
100 if contains_vector(cx, layout.field(cx, i)) {
101 return true;
102 }
103 }
104 false
105 }
106 BackendRepr::ScalableVector { .. } => {
107 panic!("scalable vectors are unsupported")
108 }
109 }
110 }
111
112 let byval_align = if arg.layout.align.abi < align_4 {
113 align_4
115 } else if t.is_like_darwin && contains_vector(cx, arg.layout) {
116 align_16
118 } else {
119 align_4
121 };
122
123 arg.pass_by_stack_offset(Some(byval_align));
124 } else {
125 arg.extend_integer_width_to(32);
126 }
127 }
128
129 fill_inregs(cx, fn_abi, opts, false);
130}
131
132pub(crate) fn fill_inregs<'a, Ty, C>(
133 cx: &C,
134 fn_abi: &mut FnAbi<'a, Ty>,
135 opts: X86Options,
136 rust_abi: bool,
137) where
138 Ty: TyAbiInterface<'a, C> + Copy,
139{
140 if opts.flavor != Flavor::FastcallOrVectorcall && opts.regparm.is_none_or(|x| x == 0) {
141 return;
142 }
143 let mut free_regs = opts.regparm.unwrap_or(2).into();
154
155 let has_casts = fn_abi.args.iter().any(|arg| matches!(arg.mode, PassMode::Cast { .. }));
158 if has_casts && rust_abi {
159 return;
160 }
161
162 for arg in fn_abi.args.iter_mut() {
163 let attrs = match arg.mode {
164 PassMode::Ignore | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
165 continue;
166 }
167 PassMode::Direct(ref mut attrs) => attrs,
168 PassMode::Pair(..)
169 | PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ }
170 | PassMode::Cast { .. } => {
171 unreachable!("x86 shouldn't be passing arguments by {:?}", arg.mode)
172 }
173 };
174
175 let unit = arg.layout.homogeneous_aggregate(cx).unwrap().unit().unwrap();
177 assert_eq!(unit.size, arg.layout.size);
178 if matches!(unit.kind, RegKind::Float | RegKind::Vector) {
179 continue;
180 }
181
182 let size_in_regs = arg.layout.size.bits().div_ceil(32);
183
184 if size_in_regs == 0 {
185 continue;
186 }
187
188 if size_in_regs > free_regs {
189 break;
190 }
191
192 free_regs -= size_in_regs;
193
194 if arg.layout.size.bits() <= 32 && unit.kind == RegKind::Integer {
195 attrs.set(ArgAttribute::InReg);
196 }
197
198 if free_regs == 0 {
199 break;
200 }
201 }
202}
203
204pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
205where
206 Ty: TyAbiInterface<'a, C> + Copy,
207 C: HasDataLayout + HasTargetSpec,
208{
209 if !fn_abi.ret.is_ignore() {
213 let has_float = match fn_abi.ret.layout.backend_repr {
214 BackendRepr::Scalar(s) => matches!(s.primitive(), Primitive::Float(_)),
215 BackendRepr::ScalarPair(s1, s2) => {
216 matches!(s1.primitive(), Primitive::Float(_))
217 || matches!(s2.primitive(), Primitive::Float(_))
218 }
219 _ => false, };
221 if has_float {
222 if cx.target_spec().rustc_abi == Some(RustcAbi::X86Sse2)
223 && fn_abi.ret.layout.backend_repr.is_scalar()
224 && fn_abi.ret.layout.size.bits() <= 128
225 {
226 fn_abi.ret.cast_to(Reg { kind: RegKind::Vector, size: fn_abi.ret.layout.size });
230 } else if fn_abi.ret.layout.size <= Primitive::Pointer(AddressSpace::ZERO).size(cx) {
231 fn_abi.ret.cast_to(Reg { kind: RegKind::Integer, size: fn_abi.ret.layout.size });
233 } else {
234 fn_abi.ret.make_indirect();
236 }
237 return;
238 }
239 }
240}