1use std::iter;
2
3use rustc_abi::Primitive::Pointer;
4use rustc_abi::{BackendRepr, ExternAbi, PointerKind, Scalar, Size};
5use rustc_hir as hir;
6use rustc_hir::lang_items::LangItem;
7use rustc_middle::bug;
8use rustc_middle::query::Providers;
9use rustc_middle::ty::layout::{
10 FnAbiError, HasTyCtxt, HasTypingEnv, LayoutCx, LayoutOf, TyAndLayout, fn_can_unwind,
11};
12use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt};
13use rustc_session::config::OptLevel;
14use rustc_span::def_id::DefId;
15use rustc_target::callconv::{
16 ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, Conv, FnAbi, PassMode, RiscvInterruptKind,
17};
18use tracing::debug;
19
20pub(crate) fn provide(providers: &mut Providers) {
21 *providers = Providers { fn_abi_of_fn_ptr, fn_abi_of_instance, ..*providers };
22}
23
24#[tracing::instrument(level = "debug", skip(tcx, typing_env))]
30fn fn_sig_for_fn_abi<'tcx>(
31 tcx: TyCtxt<'tcx>,
32 instance: ty::Instance<'tcx>,
33 typing_env: ty::TypingEnv<'tcx>,
34) -> ty::FnSig<'tcx> {
35 if let InstanceKind::ThreadLocalShim(..) = instance.def {
36 return tcx.mk_fn_sig(
37 [],
38 tcx.thread_local_ptr_ty(instance.def_id()),
39 false,
40 hir::Safety::Safe,
41 rustc_abi::ExternAbi::Unadjusted,
42 );
43 }
44
45 let ty = instance.ty(tcx, typing_env);
46 match *ty.kind() {
47 ty::FnDef(def_id, args) => {
48 let mut sig = tcx
49 .instantiate_bound_regions_with_erased(tcx.fn_sig(def_id).instantiate(tcx, args));
50
51 if let ty::InstanceKind::VTableShim(..) = instance.def {
53 let mut inputs_and_output = sig.inputs_and_output.to_vec();
54 inputs_and_output[0] = Ty::new_mut_ptr(tcx, inputs_and_output[0]);
55 sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
56 }
57
58 if let ty::InstanceKind::ReifyShim(def_id, _) = instance.def
60 && let Some((rpitit_def_id, fn_args)) =
61 tcx.return_position_impl_trait_in_trait_shim_data(def_id)
62 {
63 let fn_args = fn_args.instantiate(tcx, args);
64 let rpitit_args =
65 fn_args.extend_to(tcx, rpitit_def_id, |param, _| match param.kind {
66 ty::GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(),
67 ty::GenericParamDefKind::Type { .. }
68 | ty::GenericParamDefKind::Const { .. } => {
69 unreachable!("rpitit should have no addition ty/ct")
70 }
71 });
72 let dyn_star_ty = Ty::new_dynamic(
73 tcx,
74 tcx.item_bounds_to_existential_predicates(rpitit_def_id, rpitit_args),
75 tcx.lifetimes.re_erased,
76 ty::DynStar,
77 );
78 let mut inputs_and_output = sig.inputs_and_output.to_vec();
79 *inputs_and_output.last_mut().unwrap() = dyn_star_ty;
80 sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
81 }
82
83 sig
84 }
85 ty::Closure(def_id, args) => {
86 let sig = tcx.instantiate_bound_regions_with_erased(args.as_closure().sig());
87 let env_ty = tcx.closure_env_ty(
88 Ty::new_closure(tcx, def_id, args),
89 args.as_closure().kind(),
90 tcx.lifetimes.re_erased,
91 );
92
93 tcx.mk_fn_sig(
94 iter::once(env_ty).chain(sig.inputs().iter().cloned()),
95 sig.output(),
96 sig.c_variadic,
97 sig.safety,
98 sig.abi,
99 )
100 }
101 ty::CoroutineClosure(def_id, args) => {
102 let coroutine_ty = Ty::new_coroutine_closure(tcx, def_id, args);
103 let sig = args.as_coroutine_closure().coroutine_closure_sig();
104
105 let mut coroutine_kind = args.as_coroutine_closure().kind();
110
111 let env_ty =
112 if let InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref, .. } =
113 instance.def
114 {
115 coroutine_kind = ty::ClosureKind::FnOnce;
116
117 if receiver_by_ref {
120 Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty)
121 } else {
122 coroutine_ty
123 }
124 } else {
125 tcx.closure_env_ty(coroutine_ty, coroutine_kind, tcx.lifetimes.re_erased)
126 };
127
128 let sig = tcx.instantiate_bound_regions_with_erased(sig);
129
130 tcx.mk_fn_sig(
131 iter::once(env_ty).chain([sig.tupled_inputs_ty]),
132 sig.to_coroutine_given_kind_and_upvars(
133 tcx,
134 args.as_coroutine_closure().parent_args(),
135 tcx.coroutine_for_closure(def_id),
136 coroutine_kind,
137 tcx.lifetimes.re_erased,
138 args.as_coroutine_closure().tupled_upvars_ty(),
139 args.as_coroutine_closure().coroutine_captures_by_ref_ty(),
140 ),
141 sig.c_variadic,
142 sig.safety,
143 sig.abi,
144 )
145 }
146 ty::Coroutine(did, args) => {
147 let coroutine_kind = tcx.coroutine_kind(did).unwrap();
148 let sig = args.as_coroutine().sig();
149
150 let env_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
151
152 let pin_did = tcx.require_lang_item(LangItem::Pin, None);
153 let pin_adt_ref = tcx.adt_def(pin_did);
154 let pin_args = tcx.mk_args(&[env_ty.into()]);
155 let env_ty = match coroutine_kind {
156 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
157 env_ty
160 }
161 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)
162 | hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)
163 | hir::CoroutineKind::Coroutine(_) => Ty::new_adt(tcx, pin_adt_ref, pin_args),
164 };
165
166 let (resume_ty, ret_ty) = match coroutine_kind {
173 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => {
174 assert_eq!(sig.yield_ty, tcx.types.unit);
176
177 let poll_did = tcx.require_lang_item(LangItem::Poll, None);
178 let poll_adt_ref = tcx.adt_def(poll_did);
179 let poll_args = tcx.mk_args(&[sig.return_ty.into()]);
180 let ret_ty = Ty::new_adt(tcx, poll_adt_ref, poll_args);
181
182 #[cfg(debug_assertions)]
185 {
186 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
187 let expected_adt =
188 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None));
189 assert_eq!(*resume_ty_adt, expected_adt);
190 } else {
191 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
192 };
193 }
194 let context_mut_ref = Ty::new_task_context(tcx);
195
196 (Some(context_mut_ref), ret_ty)
197 }
198 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
199 let option_did = tcx.require_lang_item(LangItem::Option, None);
201 let option_adt_ref = tcx.adt_def(option_did);
202 let option_args = tcx.mk_args(&[sig.yield_ty.into()]);
203 let ret_ty = Ty::new_adt(tcx, option_adt_ref, option_args);
204
205 assert_eq!(sig.return_ty, tcx.types.unit);
206 assert_eq!(sig.resume_ty, tcx.types.unit);
207
208 (None, ret_ty)
209 }
210 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => {
211 assert_eq!(sig.return_ty, tcx.types.unit);
214
215 let ret_ty = sig.yield_ty;
217
218 #[cfg(debug_assertions)]
221 {
222 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
223 let expected_adt =
224 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None));
225 assert_eq!(*resume_ty_adt, expected_adt);
226 } else {
227 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
228 };
229 }
230 let context_mut_ref = Ty::new_task_context(tcx);
231
232 (Some(context_mut_ref), ret_ty)
233 }
234 hir::CoroutineKind::Coroutine(_) => {
235 let state_did = tcx.require_lang_item(LangItem::CoroutineState, None);
237 let state_adt_ref = tcx.adt_def(state_did);
238 let state_args = tcx.mk_args(&[sig.yield_ty.into(), sig.return_ty.into()]);
239 let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args);
240
241 (Some(sig.resume_ty), ret_ty)
242 }
243 };
244
245 if let Some(resume_ty) = resume_ty {
246 tcx.mk_fn_sig(
247 [env_ty, resume_ty],
248 ret_ty,
249 false,
250 hir::Safety::Safe,
251 rustc_abi::ExternAbi::Rust,
252 )
253 } else {
254 tcx.mk_fn_sig(
256 [env_ty],
257 ret_ty,
258 false,
259 hir::Safety::Safe,
260 rustc_abi::ExternAbi::Rust,
261 )
262 }
263 }
264 _ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
265 }
266}
267
268#[inline]
269fn conv_from_spec_abi(tcx: TyCtxt<'_>, abi: ExternAbi, c_variadic: bool) -> Conv {
270 use rustc_abi::ExternAbi::*;
271 match tcx.sess.target.adjust_abi(abi, c_variadic) {
272 RustIntrinsic | Rust | RustCall => Conv::Rust,
273
274 RustCold => Conv::PreserveMost,
277
278 System { .. } => bug!("system abi should be selected elsewhere"),
280 EfiApi => bug!("eficall abi should be selected elsewhere"),
281
282 Stdcall { .. } => Conv::X86Stdcall,
283 Fastcall { .. } => Conv::X86Fastcall,
284 Vectorcall { .. } => Conv::X86VectorCall,
285 Thiscall { .. } => Conv::X86ThisCall,
286 C { .. } => Conv::C,
287 Unadjusted => Conv::C,
288 Win64 { .. } => Conv::X86_64Win64,
289 SysV64 { .. } => Conv::X86_64SysV,
290 Aapcs { .. } => Conv::ArmAapcs,
291 CCmseNonSecureCall => Conv::CCmseNonSecureCall,
292 CCmseNonSecureEntry => Conv::CCmseNonSecureEntry,
293 PtxKernel => Conv::GpuKernel,
294 Msp430Interrupt => Conv::Msp430Intr,
295 X86Interrupt => Conv::X86Intr,
296 GpuKernel => Conv::GpuKernel,
297 AvrInterrupt => Conv::AvrInterrupt,
298 AvrNonBlockingInterrupt => Conv::AvrNonBlockingInterrupt,
299 RiscvInterruptM => Conv::RiscvInterrupt { kind: RiscvInterruptKind::Machine },
300 RiscvInterruptS => Conv::RiscvInterrupt { kind: RiscvInterruptKind::Supervisor },
301
302 Cdecl { .. } => Conv::C,
304 }
305}
306
307fn fn_abi_of_fn_ptr<'tcx>(
308 tcx: TyCtxt<'tcx>,
309 query: ty::PseudoCanonicalInput<'tcx, (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
310) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
311 let ty::PseudoCanonicalInput { typing_env, value: (sig, extra_args) } = query;
312 fn_abi_new_uncached(
313 &LayoutCx::new(tcx, typing_env),
314 tcx.instantiate_bound_regions_with_erased(sig),
315 extra_args,
316 None,
317 )
318}
319
320fn fn_abi_of_instance<'tcx>(
321 tcx: TyCtxt<'tcx>,
322 query: ty::PseudoCanonicalInput<'tcx, (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
323) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
324 let ty::PseudoCanonicalInput { typing_env, value: (instance, extra_args) } = query;
325 fn_abi_new_uncached(
326 &LayoutCx::new(tcx, typing_env),
327 fn_sig_for_fn_abi(tcx, instance, typing_env),
328 extra_args,
329 Some(instance),
330 )
331}
332
333fn adjust_for_rust_scalar<'tcx>(
335 cx: LayoutCx<'tcx>,
336 attrs: &mut ArgAttributes,
337 scalar: Scalar,
338 layout: TyAndLayout<'tcx>,
339 offset: Size,
340 is_return: bool,
341 drop_target_pointee: Option<Ty<'tcx>>,
342) {
343 if scalar.is_bool() {
345 attrs.ext(ArgExtension::Zext);
346 attrs.set(ArgAttribute::NoUndef);
347 return;
348 }
349
350 if !scalar.is_uninit_valid() {
351 attrs.set(ArgAttribute::NoUndef);
352 }
353
354 let Scalar::Initialized { value: Pointer(_), valid_range } = scalar else { return };
356
357 if !valid_range.contains(0) || drop_target_pointee.is_some() {
360 attrs.set(ArgAttribute::NonNull);
361 }
362
363 let tcx = cx.tcx();
364
365 if let Some(pointee) = layout.pointee_info_at(&cx, offset) {
366 let kind = if let Some(kind) = pointee.safe {
367 Some(kind)
368 } else if let Some(pointee) = drop_target_pointee {
369 Some(PointerKind::MutableRef { unpin: pointee.is_unpin(tcx, cx.typing_env) })
371 } else {
372 None
373 };
374 if let Some(kind) = kind {
375 attrs.pointee_align = Some(pointee.align);
376
377 attrs.pointee_size = match kind {
384 PointerKind::Box { .. }
385 | PointerKind::SharedRef { frozen: false }
386 | PointerKind::MutableRef { unpin: false } => Size::ZERO,
387 PointerKind::SharedRef { frozen: true }
388 | PointerKind::MutableRef { unpin: true } => pointee.size,
389 };
390
391 let noalias_for_box = tcx.sess.opts.unstable_opts.box_noalias;
395
396 let noalias_mut_ref = tcx.sess.opts.unstable_opts.mutable_noalias;
400
401 let no_alias = match kind {
408 PointerKind::SharedRef { frozen } => frozen,
409 PointerKind::MutableRef { unpin } => unpin && noalias_mut_ref,
410 PointerKind::Box { unpin, global } => unpin && global && noalias_for_box,
411 };
412 if no_alias && !is_return {
415 attrs.set(ArgAttribute::NoAlias);
416 }
417
418 if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return {
419 attrs.set(ArgAttribute::ReadOnly);
420 }
421 }
422 }
423}
424
425fn fn_abi_sanity_check<'tcx>(
427 cx: &LayoutCx<'tcx>,
428 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
429 spec_abi: ExternAbi,
430) {
431 fn fn_arg_sanity_check<'tcx>(
432 cx: &LayoutCx<'tcx>,
433 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
434 spec_abi: ExternAbi,
435 arg: &ArgAbi<'tcx, Ty<'tcx>>,
436 ) {
437 let tcx = cx.tcx();
438
439 if spec_abi == ExternAbi::Rust
440 || spec_abi == ExternAbi::RustCall
441 || spec_abi == ExternAbi::RustCold
442 {
443 if arg.layout.is_zst() {
444 assert!(arg.is_ignore());
447 }
448 if let PassMode::Indirect { on_stack, .. } = arg.mode {
449 assert!(!on_stack, "rust abi shouldn't use on_stack");
450 }
451 }
452
453 match &arg.mode {
454 PassMode::Ignore => {
455 assert!(arg.layout.is_zst() || arg.layout.is_uninhabited());
456 }
457 PassMode::Direct(_) => {
458 match arg.layout.backend_repr {
463 BackendRepr::Uninhabited
464 | BackendRepr::Scalar(_)
465 | BackendRepr::Vector { .. } => {}
466 BackendRepr::ScalarPair(..) => {
467 panic!("`PassMode::Direct` used for ScalarPair type {}", arg.layout.ty)
468 }
469 BackendRepr::Memory { sized } => {
470 assert!(sized, "`PassMode::Direct` for unsized type in ABI: {:#?}", fn_abi);
473 match spec_abi {
484 ExternAbi::Unadjusted => {}
485 ExternAbi::C { unwind: _ }
486 if matches!(&*tcx.sess.target.arch, "wasm32" | "wasm64") => {}
487 _ => {
488 panic!(
489 "`PassMode::Direct` for aggregates only allowed for \"unadjusted\" functions and on wasm\n\
490 Problematic type: {:#?}",
491 arg.layout,
492 );
493 }
494 }
495 }
496 }
497 }
498 PassMode::Pair(_, _) => {
499 assert!(
502 matches!(arg.layout.backend_repr, BackendRepr::ScalarPair(..)),
503 "PassMode::Pair for type {}",
504 arg.layout.ty
505 );
506 }
507 PassMode::Cast { .. } => {
508 assert!(arg.layout.is_sized());
510 }
511 PassMode::Indirect { meta_attrs: None, .. } => {
512 assert!(arg.layout.is_sized());
517 }
518 PassMode::Indirect { meta_attrs: Some(_), on_stack, .. } => {
519 assert!(arg.layout.is_unsized() && !on_stack);
521 let tail = tcx.struct_tail_for_codegen(arg.layout.ty, cx.typing_env);
523 if matches!(tail.kind(), ty::Foreign(..)) {
524 panic!("unsized arguments must not be `extern` types");
529 }
530 }
531 }
532 }
533
534 for arg in fn_abi.args.iter() {
535 fn_arg_sanity_check(cx, fn_abi, spec_abi, arg);
536 }
537 fn_arg_sanity_check(cx, fn_abi, spec_abi, &fn_abi.ret);
538}
539
540#[tracing::instrument(level = "debug", skip(cx, instance))]
541fn fn_abi_new_uncached<'tcx>(
542 cx: &LayoutCx<'tcx>,
543 sig: ty::FnSig<'tcx>,
544 extra_args: &[Ty<'tcx>],
545 instance: Option<ty::Instance<'tcx>>,
546) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
547 let tcx = cx.tcx();
548 let (caller_location, determined_fn_def_id, is_virtual_call) = if let Some(instance) = instance
549 {
550 let is_virtual_call = matches!(instance.def, ty::InstanceKind::Virtual(..));
551 (
552 instance.def.requires_caller_location(tcx).then(|| tcx.caller_location_ty()),
553 if is_virtual_call { None } else { Some(instance.def_id()) },
554 is_virtual_call,
555 )
556 } else {
557 (None, None, false)
558 };
559 let sig = tcx.normalize_erasing_regions(cx.typing_env, sig);
560
561 let conv = conv_from_spec_abi(cx.tcx(), sig.abi, sig.c_variadic);
562
563 let mut inputs = sig.inputs();
564 let extra_args = if sig.abi == ExternAbi::RustCall {
565 assert!(!sig.c_variadic && extra_args.is_empty());
566
567 if let Some(input) = sig.inputs().last()
568 && let ty::Tuple(tupled_arguments) = input.kind()
569 {
570 inputs = &sig.inputs()[0..sig.inputs().len() - 1];
571 tupled_arguments
572 } else {
573 bug!(
574 "argument to function with \"rust-call\" ABI \
575 is not a tuple"
576 );
577 }
578 } else {
579 assert!(sig.c_variadic || extra_args.is_empty());
580 extra_args
581 };
582
583 let is_drop_in_place =
584 determined_fn_def_id.is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::DropInPlace));
585
586 let arg_of = |ty: Ty<'tcx>, arg_idx: Option<usize>| -> Result<_, &'tcx FnAbiError<'tcx>> {
587 let span = tracing::debug_span!("arg_of");
588 let _entered = span.enter();
589 let is_return = arg_idx.is_none();
590 let is_drop_target = is_drop_in_place && arg_idx == Some(0);
591 let drop_target_pointee = is_drop_target.then(|| match ty.kind() {
592 ty::RawPtr(ty, _) => *ty,
593 _ => bug!("argument to drop_in_place is not a raw ptr: {:?}", ty),
594 });
595
596 let layout = cx.layout_of(ty).map_err(|err| &*tcx.arena.alloc(FnAbiError::Layout(*err)))?;
597 let layout = if is_virtual_call && arg_idx == Some(0) {
598 make_thin_self_ptr(cx, layout)
602 } else {
603 layout
604 };
605
606 let mut arg = ArgAbi::new(cx, layout, |layout, scalar, offset| {
607 let mut attrs = ArgAttributes::new();
608 adjust_for_rust_scalar(
609 *cx,
610 &mut attrs,
611 scalar,
612 *layout,
613 offset,
614 is_return,
615 drop_target_pointee,
616 );
617 attrs
618 });
619
620 if arg.layout.is_zst() {
621 arg.mode = PassMode::Ignore;
622 }
623
624 Ok(arg)
625 };
626
627 let mut fn_abi = FnAbi {
628 ret: arg_of(sig.output(), None)?,
629 args: inputs
630 .iter()
631 .copied()
632 .chain(extra_args.iter().copied())
633 .chain(caller_location)
634 .enumerate()
635 .map(|(i, ty)| arg_of(ty, Some(i)))
636 .collect::<Result<_, _>>()?,
637 c_variadic: sig.c_variadic,
638 fixed_count: inputs.len() as u32,
639 conv,
640 can_unwind: fn_can_unwind(
641 tcx,
642 determined_fn_def_id,
644 sig.abi,
645 ),
646 };
647 fn_abi_adjust_for_abi(
648 cx,
649 &mut fn_abi,
650 sig.abi,
651 determined_fn_def_id,
655 );
656 debug!("fn_abi_new_uncached = {:?}", fn_abi);
657 fn_abi_sanity_check(cx, &fn_abi, sig.abi);
658 Ok(tcx.arena.alloc(fn_abi))
659}
660
661#[tracing::instrument(level = "trace", skip(cx))]
662fn fn_abi_adjust_for_abi<'tcx>(
663 cx: &LayoutCx<'tcx>,
664 fn_abi: &mut FnAbi<'tcx, Ty<'tcx>>,
665 abi: ExternAbi,
666 fn_def_id: Option<DefId>,
667) {
668 if abi == ExternAbi::Unadjusted {
669 fn unadjust<'tcx>(arg: &mut ArgAbi<'tcx, Ty<'tcx>>) {
672 if matches!(arg.layout.backend_repr, BackendRepr::Memory { .. }) {
675 assert!(
676 arg.layout.backend_repr.is_sized(),
677 "'unadjusted' ABI does not support unsized arguments"
678 );
679 }
680 arg.make_direct_deprecated();
681 }
682
683 unadjust(&mut fn_abi.ret);
684 for arg in fn_abi.args.iter_mut() {
685 unadjust(arg);
686 }
687 return;
688 }
689
690 let tcx = cx.tcx();
691
692 if abi == ExternAbi::Rust || abi == ExternAbi::RustCall || abi == ExternAbi::RustIntrinsic {
693 fn_abi.adjust_for_rust_abi(cx, abi);
694
695 let deduced_param_attrs =
699 if tcx.sess.opts.optimize != OptLevel::No && tcx.sess.opts.incremental.is_none() {
700 fn_def_id.map(|fn_def_id| tcx.deduced_param_attrs(fn_def_id)).unwrap_or_default()
701 } else {
702 &[]
703 };
704
705 for (arg_idx, arg) in fn_abi.args.iter_mut().enumerate() {
706 if arg.is_ignore() {
707 continue;
708 }
709
710 if let &mut PassMode::Indirect { ref mut attrs, .. } = &mut arg.mode {
716 if let Some(deduced_param_attrs) = deduced_param_attrs.get(arg_idx) {
720 if deduced_param_attrs.read_only {
721 attrs.regular.insert(ArgAttribute::ReadOnly);
722 debug!("added deduced read-only attribute");
723 }
724 }
725 }
726 }
727 } else {
728 fn_abi.adjust_for_foreign_abi(cx, abi);
729 }
730}
731
732#[tracing::instrument(level = "debug", skip(cx))]
733fn make_thin_self_ptr<'tcx>(
734 cx: &(impl HasTyCtxt<'tcx> + HasTypingEnv<'tcx>),
735 layout: TyAndLayout<'tcx>,
736) -> TyAndLayout<'tcx> {
737 let tcx = cx.tcx();
738 let wide_pointer_ty = if layout.is_unsized() {
739 Ty::new_mut_ptr(tcx, layout.ty)
742 } else {
743 match layout.backend_repr {
744 BackendRepr::ScalarPair(..) | BackendRepr::Scalar(..) => (),
745 _ => bug!("receiver type has unsupported layout: {:?}", layout),
746 }
747
748 let mut wide_pointer_layout = layout;
754 while !wide_pointer_layout.ty.is_raw_ptr() && !wide_pointer_layout.ty.is_ref() {
755 wide_pointer_layout = wide_pointer_layout
756 .non_1zst_field(cx)
757 .expect("not exactly one non-1-ZST field in a `DispatchFromDyn` type")
758 .1
759 }
760
761 wide_pointer_layout.ty
762 };
763
764 let unit_ptr_ty = Ty::new_mut_ptr(tcx, tcx.types.unit);
768
769 TyAndLayout {
770 ty: wide_pointer_ty,
771
772 ..tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(unit_ptr_ty)).unwrap()
775 }
776}