rustc_target/callconv/
x86.rs1use rustc_abi::{
2 AddressSpace, Align, BackendRepr, HasDataLayout, Primitive, Reg, RegKind, TyAbiInterface,
3 TyAndLayout,
4};
5
6use crate::callconv::{ArgAttribute, FnAbi, PassMode};
7use crate::spec::{HasTargetSpec, RustcAbi};
8
9#[derive(PartialEq)]
10pub(crate) enum Flavor {
11 General,
12 FastcallOrVectorcall,
13}
14
15pub(crate) struct X86Options {
16 pub flavor: Flavor,
17 pub regparm: Option<u32>,
18 pub reg_struct_return: bool,
19}
20
21pub(crate) fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, opts: X86Options)
22where
23 Ty: TyAbiInterface<'a, C> + Copy,
24 C: HasDataLayout + HasTargetSpec,
25{
26 if !fn_abi.ret.is_ignore() {
27 if fn_abi.ret.layout.is_aggregate() && fn_abi.ret.layout.is_sized() {
28 let t = cx.target_spec();
36 if t.abi_return_struct_as_int || opts.reg_struct_return {
37 if fn_abi.ret.layout.is_single_fp_element(cx) {
40 match fn_abi.ret.layout.size.bytes() {
41 4 => fn_abi.ret.cast_to(Reg::f32()),
42 8 => fn_abi.ret.cast_to(Reg::f64()),
43 _ => fn_abi.ret.make_indirect(),
44 }
45 } else {
46 match fn_abi.ret.layout.size.bytes() {
47 1 => fn_abi.ret.cast_to(Reg::i8()),
48 2 => fn_abi.ret.cast_to(Reg::i16()),
49 4 => fn_abi.ret.cast_to(Reg::i32()),
50 8 => fn_abi.ret.cast_to(Reg::i64()),
51 _ => fn_abi.ret.make_indirect(),
52 }
53 }
54 } else {
55 fn_abi.ret.make_indirect();
56 }
57 } else {
58 fn_abi.ret.extend_integer_width_to(32);
59 }
60 }
61
62 for arg in fn_abi.args.iter_mut() {
63 if arg.is_ignore() || !arg.layout.is_sized() {
64 continue;
65 }
66
67 if arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
68 arg.make_indirect();
69 continue;
70 }
71
72 let t = cx.target_spec();
73 let align_4 = Align::from_bytes(4).unwrap();
74 let align_16 = Align::from_bytes(16).unwrap();
75
76 if arg.layout.is_aggregate() {
77 fn contains_vector<'a, Ty, C>(cx: &C, layout: TyAndLayout<'a, Ty>) -> bool
92 where
93 Ty: TyAbiInterface<'a, C> + Copy,
94 {
95 match layout.backend_repr {
96 BackendRepr::Scalar(_) | BackendRepr::ScalarPair(..) => false,
97 BackendRepr::SimdVector { .. } => true,
98 BackendRepr::Memory { .. } => {
99 for i in 0..layout.fields.count() {
100 if contains_vector(cx, layout.field(cx, i)) {
101 return true;
102 }
103 }
104 false
105 }
106 }
107 }
108
109 let byval_align = if arg.layout.align.abi < align_4 {
110 align_4
112 } else if t.is_like_darwin && contains_vector(cx, arg.layout) {
113 align_16
115 } else {
116 align_4
118 };
119
120 arg.pass_by_stack_offset(Some(byval_align));
121 } else {
122 arg.extend_integer_width_to(32);
123 }
124 }
125
126 fill_inregs(cx, fn_abi, opts, false);
127}
128
129pub(crate) fn fill_inregs<'a, Ty, C>(
130 cx: &C,
131 fn_abi: &mut FnAbi<'a, Ty>,
132 opts: X86Options,
133 rust_abi: bool,
134) where
135 Ty: TyAbiInterface<'a, C> + Copy,
136{
137 if opts.flavor != Flavor::FastcallOrVectorcall && opts.regparm.is_none_or(|x| x == 0) {
138 return;
139 }
140 let mut free_regs = opts.regparm.unwrap_or(2).into();
151
152 let has_casts = fn_abi.args.iter().any(|arg| matches!(arg.mode, PassMode::Cast { .. }));
155 if has_casts && rust_abi {
156 return;
157 }
158
159 for arg in fn_abi.args.iter_mut() {
160 let attrs = match arg.mode {
161 PassMode::Ignore | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
162 continue;
163 }
164 PassMode::Direct(ref mut attrs) => attrs,
165 PassMode::Pair(..)
166 | PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ }
167 | PassMode::Cast { .. } => {
168 unreachable!("x86 shouldn't be passing arguments by {:?}", arg.mode)
169 }
170 };
171
172 let unit = arg.layout.homogeneous_aggregate(cx).unwrap().unit().unwrap();
174 assert_eq!(unit.size, arg.layout.size);
175 if matches!(unit.kind, RegKind::Float | RegKind::Vector) {
176 continue;
177 }
178
179 let size_in_regs = arg.layout.size.bits().div_ceil(32);
180
181 if size_in_regs == 0 {
182 continue;
183 }
184
185 if size_in_regs > free_regs {
186 break;
187 }
188
189 free_regs -= size_in_regs;
190
191 if arg.layout.size.bits() <= 32 && unit.kind == RegKind::Integer {
192 attrs.set(ArgAttribute::InReg);
193 }
194
195 if free_regs == 0 {
196 break;
197 }
198 }
199}
200
201pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
202where
203 Ty: TyAbiInterface<'a, C> + Copy,
204 C: HasDataLayout + HasTargetSpec,
205{
206 if !fn_abi.ret.is_ignore() {
210 let has_float = match fn_abi.ret.layout.backend_repr {
211 BackendRepr::Scalar(s) => matches!(s.primitive(), Primitive::Float(_)),
212 BackendRepr::ScalarPair(s1, s2) => {
213 matches!(s1.primitive(), Primitive::Float(_))
214 || matches!(s2.primitive(), Primitive::Float(_))
215 }
216 _ => false, };
218 if has_float {
219 if cx.target_spec().rustc_abi == Some(RustcAbi::X86Sse2)
220 && fn_abi.ret.layout.backend_repr.is_scalar()
221 && fn_abi.ret.layout.size.bits() <= 128
222 {
223 fn_abi.ret.cast_to(Reg { kind: RegKind::Vector, size: fn_abi.ret.layout.size });
227 } else if fn_abi.ret.layout.size <= Primitive::Pointer(AddressSpace::ZERO).size(cx) {
228 fn_abi.ret.cast_to(Reg { kind: RegKind::Integer, size: fn_abi.ret.layout.size });
230 } else {
231 fn_abi.ret.make_indirect();
233 }
234 return;
235 }
236 }
237}