rustc_ty_utils/
abi.rs

1use std::assert_matches::assert_matches;
2use std::iter;
3
4use rustc_abi::Primitive::Pointer;
5use rustc_abi::{BackendRepr, ExternAbi, PointerKind, Scalar, Size};
6use rustc_hir as hir;
7use rustc_hir::lang_items::LangItem;
8use rustc_middle::bug;
9use rustc_middle::middle::deduced_param_attrs::DeducedParamAttrs;
10use rustc_middle::query::Providers;
11use rustc_middle::ty::layout::{
12    FnAbiError, HasTyCtxt, HasTypingEnv, LayoutCx, LayoutOf, TyAndLayout, fn_can_unwind,
13};
14use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt};
15use rustc_session::config::OptLevel;
16use rustc_span::DUMMY_SP;
17use rustc_span::def_id::DefId;
18use rustc_target::callconv::{
19    AbiMap, ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, FnAbi, PassMode,
20};
21use tracing::debug;
22
23pub(crate) fn provide(providers: &mut Providers) {
24    *providers = Providers { fn_abi_of_fn_ptr, fn_abi_of_instance, ..*providers };
25}
26
27// NOTE(eddyb) this is private to avoid using it from outside of
28// `fn_abi_of_instance` - any other uses are either too high-level
29// for `Instance` (e.g. typeck would use `Ty::fn_sig` instead),
30// or should go through `FnAbi` instead, to avoid losing any
31// adjustments `fn_abi_of_instance` might be performing.
32#[tracing::instrument(level = "debug", skip(tcx, typing_env))]
33fn fn_sig_for_fn_abi<'tcx>(
34    tcx: TyCtxt<'tcx>,
35    instance: ty::Instance<'tcx>,
36    typing_env: ty::TypingEnv<'tcx>,
37) -> ty::FnSig<'tcx> {
38    if let InstanceKind::ThreadLocalShim(..) = instance.def {
39        return tcx.mk_fn_sig(
40            [],
41            tcx.thread_local_ptr_ty(instance.def_id()),
42            false,
43            hir::Safety::Safe,
44            rustc_abi::ExternAbi::Rust,
45        );
46    }
47
48    let ty = instance.ty(tcx, typing_env);
49    match *ty.kind() {
50        ty::FnDef(def_id, args) => {
51            let mut sig = tcx
52                .instantiate_bound_regions_with_erased(tcx.fn_sig(def_id).instantiate(tcx, args));
53
54            // Modify `fn(self, ...)` to `fn(self: *mut Self, ...)`.
55            if let ty::InstanceKind::VTableShim(..) = instance.def {
56                let mut inputs_and_output = sig.inputs_and_output.to_vec();
57                inputs_and_output[0] = Ty::new_mut_ptr(tcx, inputs_and_output[0]);
58                sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
59            }
60
61            sig
62        }
63        ty::Closure(def_id, args) => {
64            let sig = tcx.instantiate_bound_regions_with_erased(args.as_closure().sig());
65            let env_ty = tcx.closure_env_ty(
66                Ty::new_closure(tcx, def_id, args),
67                args.as_closure().kind(),
68                tcx.lifetimes.re_erased,
69            );
70
71            tcx.mk_fn_sig(
72                iter::once(env_ty).chain(sig.inputs().iter().cloned()),
73                sig.output(),
74                sig.c_variadic,
75                sig.safety,
76                sig.abi,
77            )
78        }
79        ty::CoroutineClosure(def_id, args) => {
80            let coroutine_ty = Ty::new_coroutine_closure(tcx, def_id, args);
81            let sig = args.as_coroutine_closure().coroutine_closure_sig();
82
83            // When this `CoroutineClosure` comes from a `ConstructCoroutineInClosureShim`,
84            // make sure we respect the `target_kind` in that shim.
85            // FIXME(async_closures): This shouldn't be needed, and we should be populating
86            // a separate def-id for these bodies.
87            let mut coroutine_kind = args.as_coroutine_closure().kind();
88
89            let env_ty =
90                if let InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref, .. } =
91                    instance.def
92                {
93                    coroutine_kind = ty::ClosureKind::FnOnce;
94
95                    // Implementations of `FnMut` and `Fn` for coroutine-closures
96                    // still take their receiver by ref.
97                    if receiver_by_ref {
98                        Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty)
99                    } else {
100                        coroutine_ty
101                    }
102                } else {
103                    tcx.closure_env_ty(coroutine_ty, coroutine_kind, tcx.lifetimes.re_erased)
104                };
105
106            let sig = tcx.instantiate_bound_regions_with_erased(sig);
107
108            tcx.mk_fn_sig(
109                iter::once(env_ty).chain([sig.tupled_inputs_ty]),
110                sig.to_coroutine_given_kind_and_upvars(
111                    tcx,
112                    args.as_coroutine_closure().parent_args(),
113                    tcx.coroutine_for_closure(def_id),
114                    coroutine_kind,
115                    tcx.lifetimes.re_erased,
116                    args.as_coroutine_closure().tupled_upvars_ty(),
117                    args.as_coroutine_closure().coroutine_captures_by_ref_ty(),
118                ),
119                sig.c_variadic,
120                sig.safety,
121                sig.abi,
122            )
123        }
124        ty::Coroutine(did, args) => {
125            let coroutine_kind = tcx.coroutine_kind(did).unwrap();
126            let sig = args.as_coroutine().sig();
127
128            let env_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
129
130            let pin_did = tcx.require_lang_item(LangItem::Pin, DUMMY_SP);
131            let pin_adt_ref = tcx.adt_def(pin_did);
132            let pin_args = tcx.mk_args(&[env_ty.into()]);
133            let env_ty = match coroutine_kind {
134                hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
135                    // Iterator::next doesn't accept a pinned argument,
136                    // unlike for all other coroutine kinds.
137                    env_ty
138                }
139                hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)
140                | hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)
141                | hir::CoroutineKind::Coroutine(_) => Ty::new_adt(tcx, pin_adt_ref, pin_args),
142            };
143
144            // The `FnSig` and the `ret_ty` here is for a coroutines main
145            // `Coroutine::resume(...) -> CoroutineState` function in case we
146            // have an ordinary coroutine, the `Future::poll(...) -> Poll`
147            // function in case this is a special coroutine backing an async construct
148            // or the `Iterator::next(...) -> Option` function in case this is a
149            // special coroutine backing a gen construct.
150            let (resume_ty, ret_ty) = match coroutine_kind {
151                hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => {
152                    // The signature should be `Future::poll(_, &mut Context<'_>) -> Poll<Output>`
153                    assert_eq!(sig.yield_ty, tcx.types.unit);
154
155                    let poll_did = tcx.require_lang_item(LangItem::Poll, DUMMY_SP);
156                    let poll_adt_ref = tcx.adt_def(poll_did);
157                    let poll_args = tcx.mk_args(&[sig.return_ty.into()]);
158                    let ret_ty = Ty::new_adt(tcx, poll_adt_ref, poll_args);
159
160                    // We have to replace the `ResumeTy` that is used for type and borrow checking
161                    // with `&mut Context<'_>` which is used in codegen.
162                    #[cfg(debug_assertions)]
163                    {
164                        if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
165                            let expected_adt =
166                                tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
167                            assert_eq!(*resume_ty_adt, expected_adt);
168                        } else {
169                            panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
170                        };
171                    }
172                    let context_mut_ref = Ty::new_task_context(tcx);
173
174                    (Some(context_mut_ref), ret_ty)
175                }
176                hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
177                    // The signature should be `Iterator::next(_) -> Option<Yield>`
178                    let option_did = tcx.require_lang_item(LangItem::Option, DUMMY_SP);
179                    let option_adt_ref = tcx.adt_def(option_did);
180                    let option_args = tcx.mk_args(&[sig.yield_ty.into()]);
181                    let ret_ty = Ty::new_adt(tcx, option_adt_ref, option_args);
182
183                    assert_eq!(sig.return_ty, tcx.types.unit);
184                    assert_eq!(sig.resume_ty, tcx.types.unit);
185
186                    (None, ret_ty)
187                }
188                hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => {
189                    // The signature should be
190                    // `AsyncIterator::poll_next(_, &mut Context<'_>) -> Poll<Option<Output>>`
191                    assert_eq!(sig.return_ty, tcx.types.unit);
192
193                    // Yield type is already `Poll<Option<yield_ty>>`
194                    let ret_ty = sig.yield_ty;
195
196                    // We have to replace the `ResumeTy` that is used for type and borrow checking
197                    // with `&mut Context<'_>` which is used in codegen.
198                    #[cfg(debug_assertions)]
199                    {
200                        if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
201                            let expected_adt =
202                                tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
203                            assert_eq!(*resume_ty_adt, expected_adt);
204                        } else {
205                            panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
206                        };
207                    }
208                    let context_mut_ref = Ty::new_task_context(tcx);
209
210                    (Some(context_mut_ref), ret_ty)
211                }
212                hir::CoroutineKind::Coroutine(_) => {
213                    // The signature should be `Coroutine::resume(_, Resume) -> CoroutineState<Yield, Return>`
214                    let state_did = tcx.require_lang_item(LangItem::CoroutineState, DUMMY_SP);
215                    let state_adt_ref = tcx.adt_def(state_did);
216                    let state_args = tcx.mk_args(&[sig.yield_ty.into(), sig.return_ty.into()]);
217                    let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args);
218
219                    (Some(sig.resume_ty), ret_ty)
220                }
221            };
222
223            if let Some(resume_ty) = resume_ty {
224                tcx.mk_fn_sig(
225                    [env_ty, resume_ty],
226                    ret_ty,
227                    false,
228                    hir::Safety::Safe,
229                    rustc_abi::ExternAbi::Rust,
230                )
231            } else {
232                // `Iterator::next` doesn't have a `resume` argument.
233                tcx.mk_fn_sig(
234                    [env_ty],
235                    ret_ty,
236                    false,
237                    hir::Safety::Safe,
238                    rustc_abi::ExternAbi::Rust,
239                )
240            }
241        }
242        _ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
243    }
244}
245
246fn fn_abi_of_fn_ptr<'tcx>(
247    tcx: TyCtxt<'tcx>,
248    query: ty::PseudoCanonicalInput<'tcx, (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
249) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
250    let ty::PseudoCanonicalInput { typing_env, value: (sig, extra_args) } = query;
251    fn_abi_new_uncached(
252        &LayoutCx::new(tcx, typing_env),
253        tcx.instantiate_bound_regions_with_erased(sig),
254        extra_args,
255        None,
256    )
257}
258
259fn fn_abi_of_instance<'tcx>(
260    tcx: TyCtxt<'tcx>,
261    query: ty::PseudoCanonicalInput<'tcx, (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
262) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
263    let ty::PseudoCanonicalInput { typing_env, value: (instance, extra_args) } = query;
264    fn_abi_new_uncached(
265        &LayoutCx::new(tcx, typing_env),
266        fn_sig_for_fn_abi(tcx, instance, typing_env),
267        extra_args,
268        Some(instance),
269    )
270}
271
272// Handle safe Rust thin and wide pointers.
273fn arg_attrs_for_rust_scalar<'tcx>(
274    cx: LayoutCx<'tcx>,
275    scalar: Scalar,
276    layout: TyAndLayout<'tcx>,
277    offset: Size,
278    is_return: bool,
279    drop_target_pointee: Option<Ty<'tcx>>,
280) -> ArgAttributes {
281    let mut attrs = ArgAttributes::new();
282
283    // Booleans are always a noundef i1 that needs to be zero-extended.
284    if scalar.is_bool() {
285        attrs.ext(ArgExtension::Zext);
286        attrs.set(ArgAttribute::NoUndef);
287        return attrs;
288    }
289
290    if !scalar.is_uninit_valid() {
291        attrs.set(ArgAttribute::NoUndef);
292    }
293
294    // Only pointer types handled below.
295    let Scalar::Initialized { value: Pointer(_), valid_range } = scalar else { return attrs };
296
297    // Set `nonnull` if the validity range excludes zero, or for the argument to `drop_in_place`,
298    // which must be nonnull per its documented safety requirements.
299    if !valid_range.contains(0) || drop_target_pointee.is_some() {
300        attrs.set(ArgAttribute::NonNull);
301    }
302
303    let tcx = cx.tcx();
304
305    if let Some(pointee) = layout.pointee_info_at(&cx, offset) {
306        let kind = if let Some(kind) = pointee.safe {
307            Some(kind)
308        } else if let Some(pointee) = drop_target_pointee {
309            // The argument to `drop_in_place` is semantically equivalent to a mutable reference.
310            Some(PointerKind::MutableRef { unpin: pointee.is_unpin(tcx, cx.typing_env) })
311        } else {
312            None
313        };
314        if let Some(kind) = kind {
315            attrs.pointee_align =
316                Some(pointee.align.min(cx.tcx().sess.target.max_reliable_alignment()));
317
318            // `Box` are not necessarily dereferenceable for the entire duration of the function as
319            // they can be deallocated at any time. Same for non-frozen shared references (see
320            // <https://github.com/rust-lang/rust/pull/98017>), and for mutable references to
321            // potentially self-referential types (see
322            // <https://github.com/rust-lang/unsafe-code-guidelines/issues/381>). If LLVM had a way
323            // to say "dereferenceable on entry" we could use it here.
324            attrs.pointee_size = match kind {
325                PointerKind::Box { .. }
326                | PointerKind::SharedRef { frozen: false }
327                | PointerKind::MutableRef { unpin: false } => Size::ZERO,
328                PointerKind::SharedRef { frozen: true }
329                | PointerKind::MutableRef { unpin: true } => pointee.size,
330            };
331
332            // The aliasing rules for `Box<T>` are still not decided, but currently we emit
333            // `noalias` for it. This can be turned off using an unstable flag.
334            // See https://github.com/rust-lang/unsafe-code-guidelines/issues/326
335            let noalias_for_box = tcx.sess.opts.unstable_opts.box_noalias;
336
337            // LLVM prior to version 12 had known miscompiles in the presence of noalias attributes
338            // (see #54878), so it was conditionally disabled, but we don't support earlier
339            // versions at all anymore. We still support turning it off using -Zmutable-noalias.
340            let noalias_mut_ref = tcx.sess.opts.unstable_opts.mutable_noalias;
341
342            // `&T` where `T` contains no `UnsafeCell<U>` is immutable, and can be marked as both
343            // `readonly` and `noalias`, as LLVM's definition of `noalias` is based solely on memory
344            // dependencies rather than pointer equality. However this only applies to arguments,
345            // not return values.
346            //
347            // `&mut T` and `Box<T>` where `T: Unpin` are unique and hence `noalias`.
348            let no_alias = match kind {
349                PointerKind::SharedRef { frozen } => frozen,
350                PointerKind::MutableRef { unpin } => unpin && noalias_mut_ref,
351                PointerKind::Box { unpin, global } => unpin && global && noalias_for_box,
352            };
353            // We can never add `noalias` in return position; that LLVM attribute has some very surprising semantics
354            // (see <https://github.com/rust-lang/unsafe-code-guidelines/issues/385#issuecomment-1368055745>).
355            if no_alias && !is_return {
356                attrs.set(ArgAttribute::NoAlias);
357            }
358
359            if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return {
360                attrs.set(ArgAttribute::ReadOnly);
361                attrs.set(ArgAttribute::CapturesReadOnly);
362            }
363        }
364    }
365
366    attrs
367}
368
369/// Ensure that the ABI makes basic sense.
370fn fn_abi_sanity_check<'tcx>(
371    cx: &LayoutCx<'tcx>,
372    fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
373    spec_abi: ExternAbi,
374) {
375    fn fn_arg_sanity_check<'tcx>(
376        cx: &LayoutCx<'tcx>,
377        fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
378        spec_abi: ExternAbi,
379        arg: &ArgAbi<'tcx, Ty<'tcx>>,
380    ) {
381        let tcx = cx.tcx();
382
383        if spec_abi.is_rustic_abi() {
384            if arg.layout.is_zst() {
385                // Casting closures to function pointers depends on ZST closure types being
386                // omitted entirely in the calling convention.
387                assert!(arg.is_ignore());
388            }
389            if let PassMode::Indirect { on_stack, .. } = arg.mode {
390                assert!(!on_stack, "rust abi shouldn't use on_stack");
391            }
392        } else if arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
393            assert_matches!(
394                arg.mode,
395                PassMode::Indirect { on_stack: false, .. },
396                "the {spec_abi} ABI does not implement `#[rustc_pass_indirectly_in_non_rustic_abis]`"
397            );
398        }
399
400        match &arg.mode {
401            PassMode::Ignore => {
402                assert!(arg.layout.is_zst());
403            }
404            PassMode::Direct(_) => {
405                // Here the Rust type is used to determine the actual ABI, so we have to be very
406                // careful. Scalar/Vector is fine, since backends will generally use
407                // `layout.backend_repr` and ignore everything else. We should just reject
408                //`Aggregate` entirely here, but some targets need to be fixed first.
409                match arg.layout.backend_repr {
410                    BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => {}
411                    BackendRepr::ScalarPair(..) => {
412                        panic!("`PassMode::Direct` used for ScalarPair type {}", arg.layout.ty)
413                    }
414                    BackendRepr::Memory { sized } => {
415                        // For an unsized type we'd only pass the sized prefix, so there is no universe
416                        // in which we ever want to allow this.
417                        assert!(sized, "`PassMode::Direct` for unsized type in ABI: {:#?}", fn_abi);
418
419                        // This really shouldn't happen even for sized aggregates, since
420                        // `immediate_llvm_type` will use `layout.fields` to turn this Rust type into an
421                        // LLVM type. This means all sorts of Rust type details leak into the ABI.
422                        // The unadjusted ABI however uses Direct for all args. It is ill-specified,
423                        // but unfortunately we need it for calling certain LLVM intrinsics.
424                        assert!(
425                            matches!(spec_abi, ExternAbi::Unadjusted),
426                            "`PassMode::Direct` for aggregates only allowed for \"unadjusted\"\n\
427                             Problematic type: {:#?}",
428                            arg.layout,
429                        );
430                    }
431                }
432            }
433            PassMode::Pair(_, _) => {
434                // Similar to `Direct`, we need to make sure that backends use `layout.backend_repr`
435                // and ignore the rest of the layout.
436                assert!(
437                    matches!(arg.layout.backend_repr, BackendRepr::ScalarPair(..)),
438                    "PassMode::Pair for type {}",
439                    arg.layout.ty
440                );
441            }
442            PassMode::Cast { .. } => {
443                // `Cast` means "transmute to `CastType`"; that only makes sense for sized types.
444                assert!(arg.layout.is_sized());
445            }
446            PassMode::Indirect { meta_attrs: None, .. } => {
447                // No metadata, must be sized.
448                // Conceptually, unsized arguments must be copied around, which requires dynamically
449                // determining their size, which we cannot do without metadata. Consult
450                // t-opsem before removing this check.
451                assert!(arg.layout.is_sized());
452            }
453            PassMode::Indirect { meta_attrs: Some(_), on_stack, .. } => {
454                // With metadata. Must be unsized and not on the stack.
455                assert!(arg.layout.is_unsized() && !on_stack);
456                // Also, must not be `extern` type.
457                let tail = tcx.struct_tail_for_codegen(arg.layout.ty, cx.typing_env);
458                if matches!(tail.kind(), ty::Foreign(..)) {
459                    // These types do not have metadata, so having `meta_attrs` is bogus.
460                    // Conceptually, unsized arguments must be copied around, which requires dynamically
461                    // determining their size. Therefore, we cannot allow `extern` types here. Consult
462                    // t-opsem before removing this check.
463                    panic!("unsized arguments must not be `extern` types");
464                }
465            }
466        }
467    }
468
469    for arg in fn_abi.args.iter() {
470        fn_arg_sanity_check(cx, fn_abi, spec_abi, arg);
471    }
472    fn_arg_sanity_check(cx, fn_abi, spec_abi, &fn_abi.ret);
473}
474
475#[tracing::instrument(level = "debug", skip(cx, instance))]
476fn fn_abi_new_uncached<'tcx>(
477    cx: &LayoutCx<'tcx>,
478    sig: ty::FnSig<'tcx>,
479    extra_args: &[Ty<'tcx>],
480    instance: Option<ty::Instance<'tcx>>,
481) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
482    let tcx = cx.tcx();
483    let (caller_location, determined_fn_def_id, is_virtual_call) = if let Some(instance) = instance
484    {
485        let is_virtual_call = matches!(instance.def, ty::InstanceKind::Virtual(..));
486        let is_tls_shim_call = matches!(instance.def, ty::InstanceKind::ThreadLocalShim(_));
487        (
488            instance.def.requires_caller_location(tcx).then(|| tcx.caller_location_ty()),
489            if is_virtual_call || is_tls_shim_call { None } else { Some(instance.def_id()) },
490            is_virtual_call,
491        )
492    } else {
493        (None, None, false)
494    };
495    let sig = tcx.normalize_erasing_regions(cx.typing_env, sig);
496
497    let abi_map = AbiMap::from_target(&tcx.sess.target);
498    let conv = abi_map.canonize_abi(sig.abi, sig.c_variadic).unwrap();
499
500    let mut inputs = sig.inputs();
501    let extra_args = if sig.abi == ExternAbi::RustCall {
502        assert!(!sig.c_variadic && extra_args.is_empty());
503
504        if let Some(input) = sig.inputs().last()
505            && let ty::Tuple(tupled_arguments) = input.kind()
506        {
507            inputs = &sig.inputs()[0..sig.inputs().len() - 1];
508            tupled_arguments
509        } else {
510            bug!(
511                "argument to function with \"rust-call\" ABI \
512                    is not a tuple"
513            );
514        }
515    } else {
516        assert!(sig.c_variadic || extra_args.is_empty());
517        extra_args
518    };
519
520    let is_drop_in_place = determined_fn_def_id.is_some_and(|def_id| {
521        tcx.is_lang_item(def_id, LangItem::DropInPlace)
522            || tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace)
523    });
524
525    let arg_of = |ty: Ty<'tcx>, arg_idx: Option<usize>| -> Result<_, &'tcx FnAbiError<'tcx>> {
526        let span = tracing::debug_span!("arg_of");
527        let _entered = span.enter();
528        let is_return = arg_idx.is_none();
529        let is_drop_target = is_drop_in_place && arg_idx == Some(0);
530        let drop_target_pointee = is_drop_target.then(|| match ty.kind() {
531            ty::RawPtr(ty, _) => *ty,
532            _ => bug!("argument to drop_in_place is not a raw ptr: {:?}", ty),
533        });
534
535        let layout = cx.layout_of(ty).map_err(|err| &*tcx.arena.alloc(FnAbiError::Layout(*err)))?;
536        let layout = if is_virtual_call && arg_idx == Some(0) {
537            // Don't pass the vtable, it's not an argument of the virtual fn.
538            // Instead, pass just the data pointer, but give it the type `*const/mut dyn Trait`
539            // or `&/&mut dyn Trait` because this is special-cased elsewhere in codegen
540            make_thin_self_ptr(cx, layout)
541        } else {
542            layout
543        };
544
545        let mut arg = ArgAbi::new(cx, layout, |layout, scalar, offset| {
546            arg_attrs_for_rust_scalar(*cx, scalar, *layout, offset, is_return, drop_target_pointee)
547        });
548
549        if arg.layout.is_zst() {
550            arg.mode = PassMode::Ignore;
551        }
552
553        Ok(arg)
554    };
555
556    let mut fn_abi = FnAbi {
557        ret: arg_of(sig.output(), None)?,
558        args: inputs
559            .iter()
560            .copied()
561            .chain(extra_args.iter().copied())
562            .chain(caller_location)
563            .enumerate()
564            .map(|(i, ty)| arg_of(ty, Some(i)))
565            .collect::<Result<_, _>>()?,
566        c_variadic: sig.c_variadic,
567        fixed_count: inputs.len() as u32,
568        conv,
569        // FIXME return false for tls shim
570        can_unwind: fn_can_unwind(
571            tcx,
572            // Since `#[rustc_nounwind]` can change unwinding, we cannot infer unwinding by `fn_def_id` for a virtual call.
573            determined_fn_def_id,
574            sig.abi,
575        ),
576    };
577    fn_abi_adjust_for_abi(
578        cx,
579        &mut fn_abi,
580        sig.abi,
581        // If this is a virtual call, we cannot pass the `fn_def_id`, as it might call other
582        // functions from vtable. And for a tls shim, passing the `fn_def_id` would refer to
583        // the underlying static. Internally, `deduced_param_attrs` attempts to infer attributes
584        // by visit the function body.
585        determined_fn_def_id,
586    );
587    debug!("fn_abi_new_uncached = {:?}", fn_abi);
588    fn_abi_sanity_check(cx, &fn_abi, sig.abi);
589    Ok(tcx.arena.alloc(fn_abi))
590}
591
592#[tracing::instrument(level = "trace", skip(cx))]
593fn fn_abi_adjust_for_abi<'tcx>(
594    cx: &LayoutCx<'tcx>,
595    fn_abi: &mut FnAbi<'tcx, Ty<'tcx>>,
596    abi: ExternAbi,
597    fn_def_id: Option<DefId>,
598) {
599    if abi == ExternAbi::Unadjusted {
600        // The "unadjusted" ABI passes aggregates in "direct" mode. That's fragile but needed for
601        // some LLVM intrinsics.
602        fn unadjust<'tcx>(arg: &mut ArgAbi<'tcx, Ty<'tcx>>) {
603            // This still uses `PassMode::Pair` for ScalarPair types. That's unlikely to be intended,
604            // but who knows what breaks if we change this now.
605            if matches!(arg.layout.backend_repr, BackendRepr::Memory { .. }) {
606                assert!(
607                    arg.layout.backend_repr.is_sized(),
608                    "'unadjusted' ABI does not support unsized arguments"
609                );
610            }
611            arg.make_direct_deprecated();
612        }
613
614        unadjust(&mut fn_abi.ret);
615        for arg in fn_abi.args.iter_mut() {
616            unadjust(arg);
617        }
618        return;
619    }
620
621    let tcx = cx.tcx();
622
623    if abi.is_rustic_abi() {
624        fn_abi.adjust_for_rust_abi(cx);
625        // Look up the deduced parameter attributes for this function, if we have its def ID and
626        // we're optimizing in non-incremental mode. We'll tag its parameters with those attributes
627        // as appropriate.
628        let deduced =
629            if tcx.sess.opts.optimize != OptLevel::No && tcx.sess.opts.incremental.is_none() {
630                fn_def_id.map(|fn_def_id| tcx.deduced_param_attrs(fn_def_id)).unwrap_or_default()
631            } else {
632                &[]
633            };
634        if !deduced.is_empty() {
635            apply_deduced_attributes(cx, deduced, 0, &mut fn_abi.ret);
636            for (arg_idx, arg) in fn_abi.args.iter_mut().enumerate() {
637                apply_deduced_attributes(cx, deduced, arg_idx + 1, arg);
638            }
639        }
640    } else {
641        fn_abi.adjust_for_foreign_abi(cx, abi);
642    }
643}
644
645/// Apply deduced optimization attributes to a parameter using an indirect pass mode.
646///
647/// `deduced` is a possibly truncated list of deduced attributes for a return place and arguments.
648/// `idx` the index of the parameter on the list (0 for a return place, and 1.. for arguments).
649fn apply_deduced_attributes<'tcx>(
650    cx: &LayoutCx<'tcx>,
651    deduced: &[DeducedParamAttrs],
652    idx: usize,
653    arg: &mut ArgAbi<'tcx, Ty<'tcx>>,
654) {
655    // Deduction is performed under the assumption of the indirection pass mode.
656    let PassMode::Indirect { ref mut attrs, .. } = arg.mode else {
657        return;
658    };
659    // The default values at the tail of the list are not encoded.
660    let Some(deduced) = deduced.get(idx) else {
661        return;
662    };
663    if deduced.read_only(cx.tcx(), cx.typing_env, arg.layout.ty) {
664        debug!("added deduced ReadOnly attribute");
665        attrs.regular.insert(ArgAttribute::ReadOnly);
666    }
667    if deduced.captures_none(cx.tcx(), cx.typing_env, arg.layout.ty) {
668        debug!("added deduced CapturesNone attribute");
669        attrs.regular.insert(ArgAttribute::CapturesNone);
670    }
671}
672
673#[tracing::instrument(level = "debug", skip(cx))]
674fn make_thin_self_ptr<'tcx>(
675    cx: &(impl HasTyCtxt<'tcx> + HasTypingEnv<'tcx>),
676    layout: TyAndLayout<'tcx>,
677) -> TyAndLayout<'tcx> {
678    let tcx = cx.tcx();
679    let wide_pointer_ty = if layout.is_unsized() {
680        // unsized `self` is passed as a pointer to `self`
681        // FIXME (mikeyhew) change this to use &own if it is ever added to the language
682        Ty::new_mut_ptr(tcx, layout.ty)
683    } else {
684        match layout.backend_repr {
685            BackendRepr::ScalarPair(..) | BackendRepr::Scalar(..) => (),
686            _ => bug!("receiver type has unsupported layout: {:?}", layout),
687        }
688
689        // In the case of Rc<Self>, we need to explicitly pass a *mut RcInner<Self>
690        // with a Scalar (not ScalarPair) ABI. This is a hack that is understood
691        // elsewhere in the compiler as a method on a `dyn Trait`.
692        // To get the type `*mut RcInner<Self>`, we just keep unwrapping newtypes until we
693        // get a built-in pointer type
694        let mut wide_pointer_layout = layout;
695        while !wide_pointer_layout.ty.is_raw_ptr() && !wide_pointer_layout.ty.is_ref() {
696            wide_pointer_layout = wide_pointer_layout
697                .non_1zst_field(cx)
698                .expect("not exactly one non-1-ZST field in a `DispatchFromDyn` type")
699                .1
700        }
701
702        wide_pointer_layout.ty
703    };
704
705    // we now have a type like `*mut RcInner<dyn Trait>`
706    // change its layout to that of `*mut ()`, a thin pointer, but keep the same type
707    // this is understood as a special case elsewhere in the compiler
708    let unit_ptr_ty = Ty::new_mut_ptr(tcx, tcx.types.unit);
709
710    TyAndLayout {
711        ty: wide_pointer_ty,
712
713        // NOTE(eddyb) using an empty `ParamEnv`, and `unwrap`-ing the `Result`
714        // should always work because the type is always `*mut ()`.
715        ..tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(unit_ptr_ty)).unwrap()
716    }
717}