rustc_target/callconv/
aarch64.rs

1use std::iter;
2
3use rustc_abi::{BackendRepr, HasDataLayout, Primitive, TyAbiInterface};
4
5use crate::callconv::{ArgAbi, FnAbi, Reg, RegKind, Uniform};
6use crate::spec::{Abi, HasTargetSpec, Target};
7
8/// Indicates the variant of the AArch64 ABI we are compiling for.
9/// Used to accommodate Apple and Microsoft's deviations from the usual AAPCS ABI.
10///
11/// Corresponds to Clang's `AArch64ABIInfo::ABIKind`.
12#[derive(#[automatically_derived]
impl ::core::marker::Copy for AbiKind { }Copy, #[automatically_derived]
impl ::core::clone::Clone for AbiKind {
    #[inline]
    fn clone(&self) -> AbiKind { *self }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for AbiKind {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::write_str(f,
            match self {
                AbiKind::AAPCS => "AAPCS",
                AbiKind::DarwinPCS => "DarwinPCS",
                AbiKind::Win64 => "Win64",
            })
    }
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for AbiKind {
    #[inline]
    fn eq(&self, other: &AbiKind) -> bool {
        let __self_discr = ::core::intrinsics::discriminant_value(self);
        let __arg1_discr = ::core::intrinsics::discriminant_value(other);
        __self_discr == __arg1_discr
    }
}PartialEq)]
13pub(crate) enum AbiKind {
14    AAPCS,
15    DarwinPCS,
16    Win64,
17}
18
19#[allow(clippy :: suspicious_else_formatting)]
{
    let __tracing_attr_span;
    let __tracing_attr_guard;
    if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() ||
            { false } {
        __tracing_attr_span =
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("is_homogeneous_aggregate",
                                    "rustc_target::callconv::aarch64", ::tracing::Level::DEBUG,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_target/src/callconv/aarch64.rs"),
                                    ::tracing_core::__macro_support::Option::Some(19u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_target::callconv::aarch64"),
                                    ::tracing_core::field::FieldSet::new(&["arg"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::SPAN)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let mut interest = ::tracing::subscriber::Interest::never();
                if ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::LevelFilter::current() &&
                            { interest = __CALLSITE.interest(); !interest.is_never() }
                        &&
                        ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                            interest) {
                    let meta = __CALLSITE.metadata();
                    ::tracing::Span::new(meta,
                        &{
                                #[allow(unused_imports)]
                                use ::tracing::field::{debug, display, Value};
                                let mut iter = meta.fields().iter();
                                meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&arg)
                                                            as &dyn Value))])
                            })
                } else {
                    let span =
                        ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                    {};
                    span
                }
            };
        __tracing_attr_guard = __tracing_attr_span.enter();
    }

    #[warn(clippy :: suspicious_else_formatting)]
    {

        #[allow(unknown_lints, unreachable_code, clippy ::
        diverging_sub_expression, clippy :: empty_loop, clippy ::
        let_unit_value, clippy :: let_with_type_underscore, clippy ::
        needless_return, clippy :: unreachable)]
        if false {
            let __tracing_attr_fake_return: Option<Uniform> = loop {};
            return __tracing_attr_fake_return;
        }
        {
            arg.layout.homogeneous_aggregate(cx).ok().and_then(|ha|
                        ha.unit()).and_then(|unit|
                    {
                        let size = arg.layout.size;
                        if size > unit.size.checked_mul(4, cx).unwrap() {
                            return None;
                        }
                        let valid_unit =
                            match unit.kind {
                                RegKind::Integer => false,
                                RegKind::Float => cx.target_spec().abi != Abi::SoftFloat,
                                RegKind::Vector => size.bits() == 64 || size.bits() == 128,
                            };
                        valid_unit.then_some(Uniform::consecutive(unit, size))
                    })
        }
    }
}#[tracing::instrument(skip(cx), level = "debug")]
20fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>) -> Option<Uniform>
21where
22    Ty: TyAbiInterface<'a, C> + Copy,
23    C: HasDataLayout + HasTargetSpec,
24{
25    arg.layout.homogeneous_aggregate(cx).ok().and_then(|ha| ha.unit()).and_then(|unit| {
26        let size = arg.layout.size;
27
28        // Ensure we have at most four uniquely addressable members.
29        if size > unit.size.checked_mul(4, cx).unwrap() {
30            return None;
31        }
32
33        let valid_unit = match unit.kind {
34            RegKind::Integer => false,
35            // The softfloat ABI treats floats like integers, so they
36            // do not get homogeneous aggregate treatment.
37            RegKind::Float => cx.target_spec().abi != Abi::SoftFloat,
38            RegKind::Vector => size.bits() == 64 || size.bits() == 128,
39        };
40
41        valid_unit.then_some(Uniform::consecutive(unit, size))
42    })
43}
44
45fn softfloat_float_abi<Ty>(target: &Target, arg: &mut ArgAbi<'_, Ty>) {
46    if target.abi != Abi::SoftFloat {
47        return;
48    }
49    // Do *not* use the float registers for passing arguments, as that would make LLVM pick the ABI
50    // and its choice depends on whether `neon` instructions are enabled. Instead, we follow the
51    // AAPCS "softfloat" ABI, which specifies that floats should be passed as equivalently-sized
52    // integers. Nominally this only exists for "R" profile chips, but sometimes people don't want
53    // to use hardfloats even if the hardware supports them, so we do this for all softfloat
54    // targets.
55    if let BackendRepr::Scalar(s) = arg.layout.backend_repr
56        && let Primitive::Float(f) = s.primitive()
57    {
58        arg.cast_to(Reg { kind: RegKind::Integer, size: f.size() });
59    } else if let BackendRepr::ScalarPair(s1, s2) = arg.layout.backend_repr
60        && (#[allow(non_exhaustive_omitted_patterns)] match s1.primitive() {
    Primitive::Float(_) => true,
    _ => false,
}matches!(s1.primitive(), Primitive::Float(_))
61            || #[allow(non_exhaustive_omitted_patterns)] match s2.primitive() {
    Primitive::Float(_) => true,
    _ => false,
}matches!(s2.primitive(), Primitive::Float(_)))
62    {
63        // This case can only be reached for the Rust ABI, so we can do whatever we want here as
64        // long as it does not depend on target features (i.e., as long as we do not use float
65        // registers). So we pass small things in integer registers and large things via pointer
66        // indirection. This means we lose the nice "pass it as two arguments" optimization, but we
67        // currently just have to way to combine a `PassMode::Cast` with that optimization (and we
68        // need a cast since we want to pass the float as an int).
69        if arg.layout.size.bits() <= target.pointer_width.into() {
70            arg.cast_to(Reg { kind: RegKind::Integer, size: arg.layout.size });
71        } else {
72            arg.make_indirect();
73        }
74    }
75}
76
77#[allow(clippy :: suspicious_else_formatting)]
{
    let __tracing_attr_span;
    let __tracing_attr_guard;
    if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() ||
            { false } {
        __tracing_attr_span =
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("classify_ret",
                                    "rustc_target::callconv::aarch64", ::tracing::Level::DEBUG,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_target/src/callconv/aarch64.rs"),
                                    ::tracing_core::__macro_support::Option::Some(77u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_target::callconv::aarch64"),
                                    ::tracing_core::field::FieldSet::new(&["ret", "kind"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::SPAN)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let mut interest = ::tracing::subscriber::Interest::never();
                if ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::LevelFilter::current() &&
                            { interest = __CALLSITE.interest(); !interest.is_never() }
                        &&
                        ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                            interest) {
                    let meta = __CALLSITE.metadata();
                    ::tracing::Span::new(meta,
                        &{
                                #[allow(unused_imports)]
                                use ::tracing::field::{debug, display, Value};
                                let mut iter = meta.fields().iter();
                                meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&ret)
                                                            as &dyn Value)),
                                                (&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&kind)
                                                            as &dyn Value))])
                            })
                } else {
                    let span =
                        ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                    {};
                    span
                }
            };
        __tracing_attr_guard = __tracing_attr_span.enter();
    }

    #[warn(clippy :: suspicious_else_formatting)]
    {

        #[allow(unknown_lints, unreachable_code, clippy ::
        diverging_sub_expression, clippy :: empty_loop, clippy ::
        let_unit_value, clippy :: let_with_type_underscore, clippy ::
        needless_return, clippy :: unreachable)]
        if false {
            let __tracing_attr_fake_return: () = loop {};
            return __tracing_attr_fake_return;
        }
        {
            if !ret.layout.is_sized() || ret.layout.is_scalable_vector() {
                return;
            }
            if !ret.layout.is_aggregate() {
                if kind == AbiKind::DarwinPCS {
                    ret.extend_integer_width_to(32)
                }
                softfloat_float_abi(cx.target_spec(), ret);
                return;
            }
            if let Some(uniform) = is_homogeneous_aggregate(cx, ret) {
                ret.cast_to(uniform);
                return;
            }
            let size = ret.layout.size;
            let bits = size.bits();
            if bits <= 128 {
                ret.cast_to(Uniform::new(Reg::i64(), size));
                return;
            }
            ret.make_indirect();
        }
    }
}#[tracing::instrument(skip(cx), level = "debug")]
78fn classify_ret<'a, Ty, C>(cx: &C, ret: &mut ArgAbi<'a, Ty>, kind: AbiKind)
79where
80    Ty: TyAbiInterface<'a, C> + Copy,
81    C: HasDataLayout + HasTargetSpec,
82{
83    if !ret.layout.is_sized() || ret.layout.is_scalable_vector() {
84        // Not touching this...
85        return;
86    }
87    if !ret.layout.is_aggregate() {
88        if kind == AbiKind::DarwinPCS {
89            // On Darwin, when returning an i8/i16, it must be sign-extended to 32 bits,
90            // and likewise a u8/u16 must be zero-extended to 32-bits.
91            // See also: <https://developer.apple.com/documentation/xcode/writing-arm64-code-for-apple-platforms#Pass-Arguments-to-Functions-Correctly>
92            ret.extend_integer_width_to(32)
93        }
94        softfloat_float_abi(cx.target_spec(), ret);
95        return;
96    }
97    if let Some(uniform) = is_homogeneous_aggregate(cx, ret) {
98        ret.cast_to(uniform);
99        return;
100    }
101    let size = ret.layout.size;
102    let bits = size.bits();
103    if bits <= 128 {
104        ret.cast_to(Uniform::new(Reg::i64(), size));
105        return;
106    }
107    ret.make_indirect();
108}
109
110#[allow(clippy :: suspicious_else_formatting)]
{
    let __tracing_attr_span;
    let __tracing_attr_guard;
    if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::DEBUG <=
                    ::tracing::level_filters::LevelFilter::current() ||
            { false } {
        __tracing_attr_span =
            {
                use ::tracing::__macro_support::Callsite as _;
                static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                    {
                        static META: ::tracing::Metadata<'static> =
                            {
                                ::tracing_core::metadata::Metadata::new("classify_arg",
                                    "rustc_target::callconv::aarch64", ::tracing::Level::DEBUG,
                                    ::tracing_core::__macro_support::Option::Some("compiler/rustc_target/src/callconv/aarch64.rs"),
                                    ::tracing_core::__macro_support::Option::Some(110u32),
                                    ::tracing_core::__macro_support::Option::Some("rustc_target::callconv::aarch64"),
                                    ::tracing_core::field::FieldSet::new(&["arg", "kind"],
                                        ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                    ::tracing::metadata::Kind::SPAN)
                            };
                        ::tracing::callsite::DefaultCallsite::new(&META)
                    };
                let mut interest = ::tracing::subscriber::Interest::never();
                if ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::STATIC_MAX_LEVEL &&
                                ::tracing::Level::DEBUG <=
                                    ::tracing::level_filters::LevelFilter::current() &&
                            { interest = __CALLSITE.interest(); !interest.is_never() }
                        &&
                        ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                            interest) {
                    let meta = __CALLSITE.metadata();
                    ::tracing::Span::new(meta,
                        &{
                                #[allow(unused_imports)]
                                use ::tracing::field::{debug, display, Value};
                                let mut iter = meta.fields().iter();
                                meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&arg)
                                                            as &dyn Value)),
                                                (&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                    ::tracing::__macro_support::Option::Some(&::tracing::field::debug(&kind)
                                                            as &dyn Value))])
                            })
                } else {
                    let span =
                        ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                    {};
                    span
                }
            };
        __tracing_attr_guard = __tracing_attr_span.enter();
    }

    #[warn(clippy :: suspicious_else_formatting)]
    {

        #[allow(unknown_lints, unreachable_code, clippy ::
        diverging_sub_expression, clippy :: empty_loop, clippy ::
        let_unit_value, clippy :: let_with_type_underscore, clippy ::
        needless_return, clippy :: unreachable)]
        if false {
            let __tracing_attr_fake_return: () = loop {};
            return __tracing_attr_fake_return;
        }
        {
            if !arg.layout.is_sized() || arg.layout.is_scalable_vector() {
                return;
            }
            if arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
                arg.make_indirect();
                return;
            }
            if !arg.layout.is_aggregate() {
                if kind == AbiKind::DarwinPCS {
                    arg.extend_integer_width_to(32);
                }
                softfloat_float_abi(cx.target_spec(), arg);
                return;
            }
            if let Some(uniform) = is_homogeneous_aggregate(cx, arg) {
                arg.cast_to(uniform);
                return;
            }
            let size = arg.layout.size;
            let align =
                if kind == AbiKind::AAPCS {
                    arg.layout.unadjusted_abi_align
                } else { arg.layout.align.abi };
            if size.bits() <= 128 {
                if align.bits() == 128 {
                    arg.cast_to(Uniform::new(Reg::i128(), size));
                } else { arg.cast_to(Uniform::new(Reg::i64(), size)); }
                return;
            }
            arg.make_indirect();
        }
    }
}#[tracing::instrument(skip(cx), level = "debug")]
111fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, kind: AbiKind)
112where
113    Ty: TyAbiInterface<'a, C> + Copy,
114    C: HasDataLayout + HasTargetSpec,
115{
116    if !arg.layout.is_sized() || arg.layout.is_scalable_vector() {
117        // Not touching this...
118        return;
119    }
120    if arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
121        arg.make_indirect();
122        return;
123    }
124    if !arg.layout.is_aggregate() {
125        if kind == AbiKind::DarwinPCS {
126            // On Darwin, when passing an i8/i16, it must be sign-extended to 32 bits,
127            // and likewise a u8/u16 must be zero-extended to 32-bits.
128            // See also: <https://developer.apple.com/documentation/xcode/writing-arm64-code-for-apple-platforms#Pass-Arguments-to-Functions-Correctly>
129            arg.extend_integer_width_to(32);
130        }
131        softfloat_float_abi(cx.target_spec(), arg);
132
133        return;
134    }
135    if let Some(uniform) = is_homogeneous_aggregate(cx, arg) {
136        arg.cast_to(uniform);
137        return;
138    }
139    let size = arg.layout.size;
140    let align = if kind == AbiKind::AAPCS {
141        // When passing small aggregates by value, the AAPCS ABI mandates using the unadjusted
142        // alignment of the type (not including `repr(align)`).
143        // This matches behavior of `AArch64ABIInfo::classifyArgumentType` in Clang.
144        // See: <https://github.com/llvm/llvm-project/blob/5e691a1c9b0ad22689d4a434ddf4fed940e58dec/clang/lib/CodeGen/TargetInfo.cpp#L5816-L5823>
145        arg.layout.unadjusted_abi_align
146    } else {
147        arg.layout.align.abi
148    };
149    if size.bits() <= 128 {
150        if align.bits() == 128 {
151            arg.cast_to(Uniform::new(Reg::i128(), size));
152        } else {
153            arg.cast_to(Uniform::new(Reg::i64(), size));
154        }
155        return;
156    }
157    arg.make_indirect();
158}
159
160pub(crate) fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, kind: AbiKind)
161where
162    Ty: TyAbiInterface<'a, C> + Copy,
163    C: HasDataLayout + HasTargetSpec,
164{
165    if !fn_abi.ret.is_ignore() {
166        classify_ret(cx, &mut fn_abi.ret, kind);
167    }
168
169    for arg in fn_abi.args.iter_mut() {
170        if arg.is_ignore() {
171            continue;
172        }
173        classify_arg(cx, arg, kind);
174    }
175}
176
177pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
178where
179    Ty: TyAbiInterface<'a, C> + Copy,
180    C: HasDataLayout + HasTargetSpec,
181{
182    for arg in fn_abi.args.iter_mut().chain(iter::once(&mut fn_abi.ret)) {
183        softfloat_float_abi(cx.target_spec(), arg);
184    }
185}