rustc_target/callconv/
s390x.rs
1use rustc_abi::{BackendRepr, HasDataLayout, TyAbiInterface};
5
6use crate::callconv::{ArgAbi, FnAbi, Reg, RegKind};
7use crate::spec::HasTargetSpec;
8
9fn classify_ret<Ty>(ret: &mut ArgAbi<'_, Ty>) {
10 let size = ret.layout.size;
11 if size.bits() <= 128 && matches!(ret.layout.backend_repr, BackendRepr::Vector { .. }) {
12 return;
13 }
14 if !ret.layout.is_aggregate() && size.bits() <= 64 {
15 ret.extend_integer_width_to(64);
16 } else {
17 ret.make_indirect();
18 }
19}
20
21fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>)
22where
23 Ty: TyAbiInterface<'a, C> + Copy,
24 C: HasDataLayout + HasTargetSpec,
25{
26 if !arg.layout.is_sized() {
27 return;
29 }
30 if arg.is_ignore() {
31 if cx.target_spec().os == "linux"
33 && matches!(&*cx.target_spec().env, "gnu" | "musl" | "uclibc")
34 && arg.layout.is_zst()
35 {
36 arg.make_indirect_from_ignore();
37 }
38 return;
39 }
40
41 let size = arg.layout.size;
42 if size.bits() <= 128 {
43 if let BackendRepr::Vector { .. } = arg.layout.backend_repr {
44 return;
46 }
47
48 if arg.layout.is_single_vector_element(cx, size) {
49 arg.cast_to(Reg { kind: RegKind::Vector, size });
51 return;
52 }
53 }
54 if !arg.layout.is_aggregate() && size.bits() <= 64 {
55 arg.extend_integer_width_to(64);
56 return;
57 }
58
59 if arg.layout.is_single_fp_element(cx) {
60 match size.bytes() {
61 4 => arg.cast_to(Reg::f32()),
62 8 => arg.cast_to(Reg::f64()),
63 _ => arg.make_indirect(),
64 }
65 } else {
66 match size.bytes() {
67 1 => arg.cast_to(Reg::i8()),
68 2 => arg.cast_to(Reg::i16()),
69 4 => arg.cast_to(Reg::i32()),
70 8 => arg.cast_to(Reg::i64()),
71 _ => arg.make_indirect(),
72 }
73 }
74}
75
76pub(crate) fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
77where
78 Ty: TyAbiInterface<'a, C> + Copy,
79 C: HasDataLayout + HasTargetSpec,
80{
81 if !fn_abi.ret.is_ignore() {
82 classify_ret(&mut fn_abi.ret);
83 }
84
85 for arg in fn_abi.args.iter_mut() {
86 classify_arg(cx, arg);
87 }
88}