rustc_target/callconv/
x86.rs
1use rustc_abi::{
2 AddressSpace, Align, BackendRepr, ExternAbi, HasDataLayout, Primitive, Reg, RegKind,
3 TyAbiInterface, TyAndLayout,
4};
5
6use crate::callconv::{ArgAttribute, FnAbi, PassMode};
7use crate::spec::{HasTargetSpec, RustcAbi};
8
9#[derive(PartialEq)]
10pub(crate) enum Flavor {
11 General,
12 FastcallOrVectorcall,
13}
14
15pub(crate) struct X86Options {
16 pub flavor: Flavor,
17 pub regparm: Option<u32>,
18 pub reg_struct_return: bool,
19}
20
21pub(crate) fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, opts: X86Options)
22where
23 Ty: TyAbiInterface<'a, C> + Copy,
24 C: HasDataLayout + HasTargetSpec,
25{
26 if !fn_abi.ret.is_ignore() {
27 if fn_abi.ret.layout.is_aggregate() && fn_abi.ret.layout.is_sized() {
28 let t = cx.target_spec();
36 if t.abi_return_struct_as_int || opts.reg_struct_return {
37 if fn_abi.ret.layout.is_single_fp_element(cx) {
40 match fn_abi.ret.layout.size.bytes() {
41 4 => fn_abi.ret.cast_to(Reg::f32()),
42 8 => fn_abi.ret.cast_to(Reg::f64()),
43 _ => fn_abi.ret.make_indirect(),
44 }
45 } else {
46 match fn_abi.ret.layout.size.bytes() {
47 1 => fn_abi.ret.cast_to(Reg::i8()),
48 2 => fn_abi.ret.cast_to(Reg::i16()),
49 4 => fn_abi.ret.cast_to(Reg::i32()),
50 8 => fn_abi.ret.cast_to(Reg::i64()),
51 _ => fn_abi.ret.make_indirect(),
52 }
53 }
54 } else {
55 fn_abi.ret.make_indirect();
56 }
57 } else {
58 fn_abi.ret.extend_integer_width_to(32);
59 }
60 }
61
62 for arg in fn_abi.args.iter_mut() {
63 if arg.is_ignore() || !arg.layout.is_sized() {
64 continue;
65 }
66
67 let t = cx.target_spec();
68 let align_4 = Align::from_bytes(4).unwrap();
69 let align_16 = Align::from_bytes(16).unwrap();
70
71 if arg.layout.is_aggregate() {
72 fn contains_vector<'a, Ty, C>(cx: &C, layout: TyAndLayout<'a, Ty>) -> bool
87 where
88 Ty: TyAbiInterface<'a, C> + Copy,
89 {
90 match layout.backend_repr {
91 BackendRepr::Scalar(_) | BackendRepr::ScalarPair(..) => false,
92 BackendRepr::SimdVector { .. } => true,
93 BackendRepr::Memory { .. } => {
94 for i in 0..layout.fields.count() {
95 if contains_vector(cx, layout.field(cx, i)) {
96 return true;
97 }
98 }
99 false
100 }
101 }
102 }
103
104 let byval_align = if arg.layout.align.abi < align_4 {
105 align_4
107 } else if t.is_like_osx && contains_vector(cx, arg.layout) {
108 align_16
110 } else {
111 align_4
113 };
114
115 arg.pass_by_stack_offset(Some(byval_align));
116 } else {
117 arg.extend_integer_width_to(32);
118 }
119 }
120
121 fill_inregs(cx, fn_abi, opts, false);
122}
123
124pub(crate) fn fill_inregs<'a, Ty, C>(
125 cx: &C,
126 fn_abi: &mut FnAbi<'a, Ty>,
127 opts: X86Options,
128 rust_abi: bool,
129) where
130 Ty: TyAbiInterface<'a, C> + Copy,
131{
132 if opts.flavor != Flavor::FastcallOrVectorcall && opts.regparm.is_none_or(|x| x == 0) {
133 return;
134 }
135 let mut free_regs = opts.regparm.unwrap_or(2).into();
146
147 let has_casts = fn_abi.args.iter().any(|arg| matches!(arg.mode, PassMode::Cast { .. }));
150 if has_casts && rust_abi {
151 return;
152 }
153
154 for arg in fn_abi.args.iter_mut() {
155 let attrs = match arg.mode {
156 PassMode::Ignore | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
157 continue;
158 }
159 PassMode::Direct(ref mut attrs) => attrs,
160 PassMode::Pair(..)
161 | PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ }
162 | PassMode::Cast { .. } => {
163 unreachable!("x86 shouldn't be passing arguments by {:?}", arg.mode)
164 }
165 };
166
167 let unit = arg.layout.homogeneous_aggregate(cx).unwrap().unit().unwrap();
169 assert_eq!(unit.size, arg.layout.size);
170 if matches!(unit.kind, RegKind::Float | RegKind::Vector) {
171 continue;
172 }
173
174 let size_in_regs = (arg.layout.size.bits() + 31) / 32;
175
176 if size_in_regs == 0 {
177 continue;
178 }
179
180 if size_in_regs > free_regs {
181 break;
182 }
183
184 free_regs -= size_in_regs;
185
186 if arg.layout.size.bits() <= 32 && unit.kind == RegKind::Integer {
187 attrs.set(ArgAttribute::InReg);
188 }
189
190 if free_regs == 0 {
191 break;
192 }
193 }
194}
195
196pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, abi: ExternAbi)
197where
198 Ty: TyAbiInterface<'a, C> + Copy,
199 C: HasDataLayout + HasTargetSpec,
200{
201 if !fn_abi.ret.is_ignore()
205 && abi != ExternAbi::RustIntrinsic
207 {
208 let has_float = match fn_abi.ret.layout.backend_repr {
209 BackendRepr::Scalar(s) => matches!(s.primitive(), Primitive::Float(_)),
210 BackendRepr::ScalarPair(s1, s2) => {
211 matches!(s1.primitive(), Primitive::Float(_))
212 || matches!(s2.primitive(), Primitive::Float(_))
213 }
214 _ => false, };
216 if has_float {
217 if cx.target_spec().rustc_abi == Some(RustcAbi::X86Sse2)
218 && fn_abi.ret.layout.backend_repr.is_scalar()
219 && fn_abi.ret.layout.size.bits() <= 128
220 {
221 fn_abi.ret.cast_to(Reg { kind: RegKind::Vector, size: fn_abi.ret.layout.size });
225 } else if fn_abi.ret.layout.size <= Primitive::Pointer(AddressSpace::DATA).size(cx) {
226 fn_abi.ret.cast_to(Reg { kind: RegKind::Integer, size: fn_abi.ret.layout.size });
228 } else {
229 fn_abi.ret.make_indirect();
231 }
232 return;
233 }
234 }
235}