1use rustc_abi::{
5 BackendRepr, HasDataLayout, Primitive, Reg, RegKind, Size, TyAbiInterface, TyAndLayout,
6 Variants,
7};
8
9use crate::callconv::{ArgAbi, CastTarget, FnAbi};
10
11#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
15enum Class {
16 Int,
17 Sse,
18 SseUp,
19}
20
21#[derive(Clone, Copy, Debug)]
22struct Memory;
23
24const LARGEST_VECTOR_SIZE: usize = 512;
26const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64;
27
28fn classify_arg<'a, Ty, C>(
29 cx: &C,
30 arg: &ArgAbi<'a, Ty>,
31) -> Result<[Option<Class>; MAX_EIGHTBYTES], Memory>
32where
33 Ty: TyAbiInterface<'a, C> + Copy,
34 C: HasDataLayout,
35{
36 fn classify<'a, Ty, C>(
37 cx: &C,
38 layout: TyAndLayout<'a, Ty>,
39 cls: &mut [Option<Class>],
40 off: Size,
41 ) -> Result<(), Memory>
42 where
43 Ty: TyAbiInterface<'a, C> + Copy,
44 C: HasDataLayout,
45 {
46 if !off.is_aligned(layout.align.abi) {
47 if !layout.is_zst() {
48 return Err(Memory);
49 }
50 return Ok(());
51 }
52
53 let mut c = match layout.backend_repr {
54 BackendRepr::Uninhabited => return Ok(()),
55
56 BackendRepr::Scalar(scalar) => match scalar.primitive() {
57 Primitive::Int(..) | Primitive::Pointer(_) => Class::Int,
58 Primitive::Float(_) => Class::Sse,
59 },
60
61 BackendRepr::Vector { .. } => Class::Sse,
62
63 BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => {
64 for i in 0..layout.fields.count() {
65 let field_off = off + layout.fields.offset(i);
66 classify(cx, layout.field(cx, i), cls, field_off)?;
67 }
68
69 match &layout.variants {
70 Variants::Single { .. } | Variants::Empty => {}
71 Variants::Multiple { variants, .. } => {
72 for variant_idx in variants.indices() {
74 classify(cx, layout.for_variant(cx, variant_idx), cls, off)?;
75 }
76 }
77 }
78
79 return Ok(());
80 }
81 };
82
83 let first = (off.bytes() / 8) as usize;
85 let last = ((off.bytes() + layout.size.bytes() - 1) / 8) as usize;
86 for cls in &mut cls[first..=last] {
87 *cls = Some(cls.map_or(c, |old| old.min(c)));
88
89 if c == Class::Sse {
92 c = Class::SseUp;
93 }
94 }
95
96 Ok(())
97 }
98
99 let n = ((arg.layout.size.bytes() + 7) / 8) as usize;
100 if n > MAX_EIGHTBYTES {
101 return Err(Memory);
102 }
103
104 let mut cls = [None; MAX_EIGHTBYTES];
105 classify(cx, arg.layout, &mut cls, Size::ZERO)?;
106 if n > 2 {
107 if cls[0] != Some(Class::Sse) {
108 return Err(Memory);
109 }
110 if cls[1..n].iter().any(|&c| c != Some(Class::SseUp)) {
111 return Err(Memory);
112 }
113 } else {
114 let mut i = 0;
115 while i < n {
116 if cls[i] == Some(Class::SseUp) {
117 cls[i] = Some(Class::Sse);
118 } else if cls[i] == Some(Class::Sse) {
119 i += 1;
120 while i != n && cls[i] == Some(Class::SseUp) {
121 i += 1;
122 }
123 } else {
124 i += 1;
125 }
126 }
127 }
128
129 Ok(cls)
130}
131
132fn reg_component(cls: &[Option<Class>], i: &mut usize, size: Size) -> Option<Reg> {
133 if *i >= cls.len() {
134 return None;
135 }
136
137 match cls[*i] {
138 None => None,
139 Some(Class::Int) => {
140 *i += 1;
141 Some(if size.bytes() < 8 { Reg { kind: RegKind::Integer, size } } else { Reg::i64() })
142 }
143 Some(Class::Sse) => {
144 let vec_len =
145 1 + cls[*i + 1..].iter().take_while(|&&c| c == Some(Class::SseUp)).count();
146 *i += vec_len;
147 Some(if vec_len == 1 {
148 match size.bytes() {
149 4 => Reg::f32(),
150 _ => Reg::f64(),
151 }
152 } else {
153 Reg { kind: RegKind::Vector, size: Size::from_bytes(8) * (vec_len as u64) }
154 })
155 }
156 Some(c) => unreachable!("reg_component: unhandled class {:?}", c),
157 }
158}
159
160fn cast_target(cls: &[Option<Class>], size: Size) -> CastTarget {
161 let mut i = 0;
162 let lo = reg_component(cls, &mut i, size).unwrap();
163 let offset = Size::from_bytes(8) * (i as u64);
164 let mut target = CastTarget::from(lo);
165 if size > offset {
166 if let Some(hi) = reg_component(cls, &mut i, size - offset) {
167 target = CastTarget::pair(lo, hi);
168 }
169 }
170 assert_eq!(reg_component(cls, &mut i, Size::ZERO), None);
171 target
172}
173
174const MAX_INT_REGS: usize = 6; const MAX_SSE_REGS: usize = 8; pub(crate) fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
178where
179 Ty: TyAbiInterface<'a, C> + Copy,
180 C: HasDataLayout,
181{
182 let mut int_regs = MAX_INT_REGS;
183 let mut sse_regs = MAX_SSE_REGS;
184
185 let mut x86_64_arg_or_ret = |arg: &mut ArgAbi<'a, Ty>, is_arg: bool| {
186 if !arg.layout.is_sized() {
187 return;
189 }
190 let mut cls_or_mem = classify_arg(cx, arg);
191
192 if is_arg {
193 if let Ok(cls) = cls_or_mem {
194 let mut needed_int = 0;
195 let mut needed_sse = 0;
196 for c in cls {
197 match c {
198 Some(Class::Int) => needed_int += 1,
199 Some(Class::Sse) => needed_sse += 1,
200 _ => {}
201 }
202 }
203 match (int_regs.checked_sub(needed_int), sse_regs.checked_sub(needed_sse)) {
204 (Some(left_int), Some(left_sse)) => {
205 int_regs = left_int;
206 sse_regs = left_sse;
207 }
208 _ => {
209 if arg.layout.is_aggregate() {
214 cls_or_mem = Err(Memory);
215 }
216 }
217 }
218 }
219 }
220
221 match cls_or_mem {
222 Err(Memory) => {
223 if is_arg {
224 arg.pass_by_stack_offset(None);
227 } else {
228 arg.make_indirect();
230 assert_eq!(int_regs, MAX_INT_REGS);
233 int_regs -= 1;
234 }
235 }
236 Ok(ref cls) => {
237 if arg.layout.is_aggregate() {
239 let size = arg.layout.size;
240 arg.cast_to(cast_target(cls, size));
241 } else {
242 arg.extend_integer_width_to(32);
243 }
244 }
245 }
246 };
247
248 if !fn_abi.ret.is_ignore() {
249 x86_64_arg_or_ret(&mut fn_abi.ret, false);
250 }
251
252 for arg in fn_abi.args.iter_mut() {
253 if arg.is_ignore() {
254 continue;
255 }
256 x86_64_arg_or_ret(arg, true);
257 }
258}