1use rustc_abi::{
2 AddressSpace, Align, BackendRepr, HasDataLayout, Primitive, Reg, RegKind, TyAndLayout,
3};
4
5use crate::callconv::{ArgAttribute, FnAbi, PassMode, TyAbiInterface};
6use crate::spec::{HasTargetSpec, RustcAbi};
7
8#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for Flavor {
#[inline]
fn eq(&self, other: &Flavor) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq)]
9pub(crate) enum Flavor {
10 General,
11 FastcallOrVectorcall,
12}
13
14pub(crate) struct X86Options {
15 pub flavor: Flavor,
16 pub regparm: Option<u32>,
17 pub reg_struct_return: bool,
18}
19
20pub(crate) fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, opts: X86Options)
21where
22 Ty: TyAbiInterface<'a, C> + Copy,
23 C: HasDataLayout + HasTargetSpec,
24{
25 if !fn_abi.ret.is_ignore() {
26 if fn_abi.ret.layout.is_aggregate() && fn_abi.ret.layout.is_sized() {
27 let t = cx.target_spec();
35 if t.abi_return_struct_as_int || opts.reg_struct_return {
36 if fn_abi.ret.layout.is_single_fp_element(cx) {
39 match fn_abi.ret.layout.size.bytes() {
40 4 => fn_abi.ret.cast_to(Reg::f32()),
41 8 => fn_abi.ret.cast_to(Reg::f64()),
42 _ => fn_abi.ret.make_indirect(),
43 }
44 } else {
45 match fn_abi.ret.layout.size.bytes() {
46 1 => fn_abi.ret.cast_to(Reg::i8()),
47 2 => fn_abi.ret.cast_to(Reg::i16()),
48 4 => fn_abi.ret.cast_to(Reg::i32()),
49 8 => fn_abi.ret.cast_to(Reg::i64()),
50 _ => fn_abi.ret.make_indirect(),
51 }
52 }
53 } else {
54 fn_abi.ret.make_indirect();
55 }
56 } else {
57 fn_abi.ret.extend_integer_width_to(32);
58 }
59 }
60
61 for arg in fn_abi.args.iter_mut() {
62 if arg.is_ignore() || !arg.layout.is_sized() {
63 continue;
64 }
65
66 if arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
67 arg.make_indirect();
68 continue;
69 }
70
71 let t = cx.target_spec();
72 let align_4 = Align::from_bytes(4).unwrap();
73 let align_16 = Align::from_bytes(16).unwrap();
74
75 if arg.layout.is_aggregate() {
76 fn contains_vector<'a, Ty, C>(cx: &C, layout: TyAndLayout<'a, Ty>) -> bool
91 where
92 Ty: TyAbiInterface<'a, C> + Copy,
93 {
94 match layout.backend_repr {
95 BackendRepr::Scalar(_) | BackendRepr::ScalarPair(..) => false,
96 BackendRepr::SimdVector { .. } => true,
97 BackendRepr::Memory { .. } => {
98 for i in 0..layout.fields.count() {
99 if contains_vector(cx, layout.field(cx, i)) {
100 return true;
101 }
102 }
103 false
104 }
105 BackendRepr::SimdScalableVector { .. } => {
106 {
::core::panicking::panic_fmt(format_args!("scalable vectors are unsupported"));
}panic!("scalable vectors are unsupported")
107 }
108 }
109 }
110
111 let byval_align = if arg.layout.align.abi < align_4 {
112 align_4
114 } else if t.is_like_darwin && contains_vector(cx, arg.layout) {
115 align_16
117 } else {
118 align_4
120 };
121
122 arg.pass_by_stack_offset(Some(byval_align));
123 } else {
124 arg.extend_integer_width_to(32);
125 }
126 }
127
128 fill_inregs(cx, fn_abi, opts, false);
129}
130
131pub(crate) fn fill_inregs<'a, Ty, C>(
132 cx: &C,
133 fn_abi: &mut FnAbi<'a, Ty>,
134 opts: X86Options,
135 rust_abi: bool,
136) where
137 Ty: TyAbiInterface<'a, C> + Copy,
138{
139 if opts.flavor != Flavor::FastcallOrVectorcall && opts.regparm.is_none_or(|x| x == 0) {
140 return;
141 }
142 let mut free_regs = opts.regparm.unwrap_or(2).into();
153
154 let has_casts = fn_abi.args.iter().any(|arg| #[allow(non_exhaustive_omitted_patterns)] match arg.mode {
PassMode::Cast { .. } => true,
_ => false,
}matches!(arg.mode, PassMode::Cast { .. }));
157 if has_casts && rust_abi {
158 return;
159 }
160
161 for arg in fn_abi.args.iter_mut() {
162 let attrs = match arg.mode {
163 PassMode::Ignore | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } => {
164 continue;
165 }
166 PassMode::Direct(ref mut attrs) => attrs,
167 PassMode::Pair(..)
168 | PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ }
169 | PassMode::Cast { .. } => {
170 {
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("x86 shouldn\'t be passing arguments by {0:?}",
arg.mode)));
}unreachable!("x86 shouldn't be passing arguments by {:?}", arg.mode)
171 }
172 };
173
174 let unit = arg.layout.homogeneous_aggregate(cx).unwrap().unit().unwrap();
176 match (&unit.size, &arg.layout.size) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(unit.size, arg.layout.size);
177 if #[allow(non_exhaustive_omitted_patterns)] match unit.kind {
RegKind::Float | RegKind::Vector { .. } => true,
_ => false,
}matches!(unit.kind, RegKind::Float | RegKind::Vector { .. }) {
178 continue;
179 }
180
181 let size_in_regs = arg.layout.size.bits().div_ceil(32);
182
183 if size_in_regs == 0 {
184 continue;
185 }
186
187 if size_in_regs > free_regs {
188 break;
189 }
190
191 free_regs -= size_in_regs;
192
193 if arg.layout.size.bits() <= 32 && unit.kind == RegKind::Integer {
194 attrs.set(ArgAttribute::InReg);
195 }
196
197 if free_regs == 0 {
198 break;
199 }
200 }
201}
202
203pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
204where
205 Ty: TyAbiInterface<'a, C> + Copy,
206 C: HasDataLayout + HasTargetSpec,
207{
208 if !fn_abi.ret.is_ignore() {
212 let has_float = match fn_abi.ret.layout.backend_repr {
213 BackendRepr::Scalar(s) => #[allow(non_exhaustive_omitted_patterns)] match s.primitive() {
Primitive::Float(_) => true,
_ => false,
}matches!(s.primitive(), Primitive::Float(_)),
214 BackendRepr::ScalarPair(s1, s2) => {
215 #[allow(non_exhaustive_omitted_patterns)] match s1.primitive() {
Primitive::Float(_) => true,
_ => false,
}matches!(s1.primitive(), Primitive::Float(_))
216 || #[allow(non_exhaustive_omitted_patterns)] match s2.primitive() {
Primitive::Float(_) => true,
_ => false,
}matches!(s2.primitive(), Primitive::Float(_))
217 }
218 _ => false, };
220 if has_float {
221 if cx.target_spec().rustc_abi == Some(RustcAbi::X86Sse2)
222 && fn_abi.ret.layout.backend_repr.is_scalar()
223 && fn_abi.ret.layout.size.bits() <= 128
224 {
225 fn_abi.ret.cast_to(Reg::opaque_vector(fn_abi.ret.layout.size));
229 } else if fn_abi.ret.layout.size <= Primitive::Pointer(AddressSpace::ZERO).size(cx) {
230 fn_abi.ret.cast_to(Reg { kind: RegKind::Integer, size: fn_abi.ret.layout.size });
232 } else {
233 fn_abi.ret.make_indirect();
235 }
236 return;
237 }
238 }
239}