1use std::iter;
2
3use rustc_abi::Primitive::Pointer;
4use rustc_abi::{BackendRepr, ExternAbi, PointerKind, Scalar, Size};
5use rustc_hir as hir;
6use rustc_hir::lang_items::LangItem;
7use rustc_middle::bug;
8use rustc_middle::query::Providers;
9use rustc_middle::ty::layout::{
10 FnAbiError, HasTyCtxt, HasTypingEnv, LayoutCx, LayoutOf, TyAndLayout, fn_can_unwind,
11};
12use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt};
13use rustc_session::config::OptLevel;
14use rustc_span::def_id::DefId;
15use rustc_target::callconv::{
16 ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, Conv, FnAbi, PassMode, RiscvInterruptKind,
17};
18use tracing::debug;
19
20pub(crate) fn provide(providers: &mut Providers) {
21 *providers = Providers { fn_abi_of_fn_ptr, fn_abi_of_instance, ..*providers };
22}
23
24#[tracing::instrument(level = "debug", skip(tcx, typing_env))]
30fn fn_sig_for_fn_abi<'tcx>(
31 tcx: TyCtxt<'tcx>,
32 instance: ty::Instance<'tcx>,
33 typing_env: ty::TypingEnv<'tcx>,
34) -> ty::FnSig<'tcx> {
35 if let InstanceKind::ThreadLocalShim(..) = instance.def {
36 return tcx.mk_fn_sig(
37 [],
38 tcx.thread_local_ptr_ty(instance.def_id()),
39 false,
40 hir::Safety::Safe,
41 rustc_abi::ExternAbi::Unadjusted,
42 );
43 }
44
45 let ty = instance.ty(tcx, typing_env);
46 match *ty.kind() {
47 ty::FnDef(def_id, args) => {
48 let mut sig = tcx
49 .instantiate_bound_regions_with_erased(tcx.fn_sig(def_id).instantiate(tcx, args));
50
51 if let ty::InstanceKind::VTableShim(..) = instance.def {
53 let mut inputs_and_output = sig.inputs_and_output.to_vec();
54 inputs_and_output[0] = Ty::new_mut_ptr(tcx, inputs_and_output[0]);
55 sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
56 }
57
58 sig
59 }
60 ty::Closure(def_id, args) => {
61 let sig = tcx.instantiate_bound_regions_with_erased(args.as_closure().sig());
62 let env_ty = tcx.closure_env_ty(
63 Ty::new_closure(tcx, def_id, args),
64 args.as_closure().kind(),
65 tcx.lifetimes.re_erased,
66 );
67
68 tcx.mk_fn_sig(
69 iter::once(env_ty).chain(sig.inputs().iter().cloned()),
70 sig.output(),
71 sig.c_variadic,
72 sig.safety,
73 sig.abi,
74 )
75 }
76 ty::CoroutineClosure(def_id, args) => {
77 let coroutine_ty = Ty::new_coroutine_closure(tcx, def_id, args);
78 let sig = args.as_coroutine_closure().coroutine_closure_sig();
79
80 let mut coroutine_kind = args.as_coroutine_closure().kind();
85
86 let env_ty =
87 if let InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref, .. } =
88 instance.def
89 {
90 coroutine_kind = ty::ClosureKind::FnOnce;
91
92 if receiver_by_ref {
95 Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty)
96 } else {
97 coroutine_ty
98 }
99 } else {
100 tcx.closure_env_ty(coroutine_ty, coroutine_kind, tcx.lifetimes.re_erased)
101 };
102
103 let sig = tcx.instantiate_bound_regions_with_erased(sig);
104
105 tcx.mk_fn_sig(
106 iter::once(env_ty).chain([sig.tupled_inputs_ty]),
107 sig.to_coroutine_given_kind_and_upvars(
108 tcx,
109 args.as_coroutine_closure().parent_args(),
110 tcx.coroutine_for_closure(def_id),
111 coroutine_kind,
112 tcx.lifetimes.re_erased,
113 args.as_coroutine_closure().tupled_upvars_ty(),
114 args.as_coroutine_closure().coroutine_captures_by_ref_ty(),
115 ),
116 sig.c_variadic,
117 sig.safety,
118 sig.abi,
119 )
120 }
121 ty::Coroutine(did, args) => {
122 let coroutine_kind = tcx.coroutine_kind(did).unwrap();
123 let sig = args.as_coroutine().sig();
124
125 let env_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
126
127 let pin_did = tcx.require_lang_item(LangItem::Pin, None);
128 let pin_adt_ref = tcx.adt_def(pin_did);
129 let pin_args = tcx.mk_args(&[env_ty.into()]);
130 let env_ty = match coroutine_kind {
131 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
132 env_ty
135 }
136 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)
137 | hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)
138 | hir::CoroutineKind::Coroutine(_) => Ty::new_adt(tcx, pin_adt_ref, pin_args),
139 };
140
141 let (resume_ty, ret_ty) = match coroutine_kind {
148 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => {
149 assert_eq!(sig.yield_ty, tcx.types.unit);
151
152 let poll_did = tcx.require_lang_item(LangItem::Poll, None);
153 let poll_adt_ref = tcx.adt_def(poll_did);
154 let poll_args = tcx.mk_args(&[sig.return_ty.into()]);
155 let ret_ty = Ty::new_adt(tcx, poll_adt_ref, poll_args);
156
157 #[cfg(debug_assertions)]
160 {
161 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
162 let expected_adt =
163 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None));
164 assert_eq!(*resume_ty_adt, expected_adt);
165 } else {
166 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
167 };
168 }
169 let context_mut_ref = Ty::new_task_context(tcx);
170
171 (Some(context_mut_ref), ret_ty)
172 }
173 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
174 let option_did = tcx.require_lang_item(LangItem::Option, None);
176 let option_adt_ref = tcx.adt_def(option_did);
177 let option_args = tcx.mk_args(&[sig.yield_ty.into()]);
178 let ret_ty = Ty::new_adt(tcx, option_adt_ref, option_args);
179
180 assert_eq!(sig.return_ty, tcx.types.unit);
181 assert_eq!(sig.resume_ty, tcx.types.unit);
182
183 (None, ret_ty)
184 }
185 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => {
186 assert_eq!(sig.return_ty, tcx.types.unit);
189
190 let ret_ty = sig.yield_ty;
192
193 #[cfg(debug_assertions)]
196 {
197 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
198 let expected_adt =
199 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None));
200 assert_eq!(*resume_ty_adt, expected_adt);
201 } else {
202 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
203 };
204 }
205 let context_mut_ref = Ty::new_task_context(tcx);
206
207 (Some(context_mut_ref), ret_ty)
208 }
209 hir::CoroutineKind::Coroutine(_) => {
210 let state_did = tcx.require_lang_item(LangItem::CoroutineState, None);
212 let state_adt_ref = tcx.adt_def(state_did);
213 let state_args = tcx.mk_args(&[sig.yield_ty.into(), sig.return_ty.into()]);
214 let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args);
215
216 (Some(sig.resume_ty), ret_ty)
217 }
218 };
219
220 if let Some(resume_ty) = resume_ty {
221 tcx.mk_fn_sig(
222 [env_ty, resume_ty],
223 ret_ty,
224 false,
225 hir::Safety::Safe,
226 rustc_abi::ExternAbi::Rust,
227 )
228 } else {
229 tcx.mk_fn_sig(
231 [env_ty],
232 ret_ty,
233 false,
234 hir::Safety::Safe,
235 rustc_abi::ExternAbi::Rust,
236 )
237 }
238 }
239 _ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
240 }
241}
242
243#[inline]
244fn conv_from_spec_abi(tcx: TyCtxt<'_>, abi: ExternAbi, c_variadic: bool) -> Conv {
245 use rustc_abi::ExternAbi::*;
246 match tcx.sess.target.adjust_abi(abi, c_variadic) {
247 RustIntrinsic | Rust | RustCall => Conv::Rust,
248
249 RustCold => Conv::PreserveMost,
252
253 System { .. } => bug!("system abi should be selected elsewhere"),
255 EfiApi => bug!("eficall abi should be selected elsewhere"),
256
257 Stdcall { .. } => Conv::X86Stdcall,
258 Fastcall { .. } => Conv::X86Fastcall,
259 Vectorcall { .. } => Conv::X86VectorCall,
260 Thiscall { .. } => Conv::X86ThisCall,
261 C { .. } => Conv::C,
262 Unadjusted => Conv::C,
263 Win64 { .. } => Conv::X86_64Win64,
264 SysV64 { .. } => Conv::X86_64SysV,
265 Aapcs { .. } => Conv::ArmAapcs,
266 CCmseNonSecureCall => Conv::CCmseNonSecureCall,
267 CCmseNonSecureEntry => Conv::CCmseNonSecureEntry,
268 PtxKernel => Conv::GpuKernel,
269 Msp430Interrupt => Conv::Msp430Intr,
270 X86Interrupt => Conv::X86Intr,
271 GpuKernel => Conv::GpuKernel,
272 AvrInterrupt => Conv::AvrInterrupt,
273 AvrNonBlockingInterrupt => Conv::AvrNonBlockingInterrupt,
274 RiscvInterruptM => Conv::RiscvInterrupt { kind: RiscvInterruptKind::Machine },
275 RiscvInterruptS => Conv::RiscvInterrupt { kind: RiscvInterruptKind::Supervisor },
276
277 Cdecl { .. } => Conv::C,
279 }
280}
281
282fn fn_abi_of_fn_ptr<'tcx>(
283 tcx: TyCtxt<'tcx>,
284 query: ty::PseudoCanonicalInput<'tcx, (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
285) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
286 let ty::PseudoCanonicalInput { typing_env, value: (sig, extra_args) } = query;
287 fn_abi_new_uncached(
288 &LayoutCx::new(tcx, typing_env),
289 tcx.instantiate_bound_regions_with_erased(sig),
290 extra_args,
291 None,
292 )
293}
294
295fn fn_abi_of_instance<'tcx>(
296 tcx: TyCtxt<'tcx>,
297 query: ty::PseudoCanonicalInput<'tcx, (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
298) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
299 let ty::PseudoCanonicalInput { typing_env, value: (instance, extra_args) } = query;
300 fn_abi_new_uncached(
301 &LayoutCx::new(tcx, typing_env),
302 fn_sig_for_fn_abi(tcx, instance, typing_env),
303 extra_args,
304 Some(instance),
305 )
306}
307
308fn adjust_for_rust_scalar<'tcx>(
310 cx: LayoutCx<'tcx>,
311 attrs: &mut ArgAttributes,
312 scalar: Scalar,
313 layout: TyAndLayout<'tcx>,
314 offset: Size,
315 is_return: bool,
316 drop_target_pointee: Option<Ty<'tcx>>,
317) {
318 if scalar.is_bool() {
320 attrs.ext(ArgExtension::Zext);
321 attrs.set(ArgAttribute::NoUndef);
322 return;
323 }
324
325 if !scalar.is_uninit_valid() {
326 attrs.set(ArgAttribute::NoUndef);
327 }
328
329 let Scalar::Initialized { value: Pointer(_), valid_range } = scalar else { return };
331
332 if !valid_range.contains(0) || drop_target_pointee.is_some() {
335 attrs.set(ArgAttribute::NonNull);
336 }
337
338 let tcx = cx.tcx();
339
340 if let Some(pointee) = layout.pointee_info_at(&cx, offset) {
341 let kind = if let Some(kind) = pointee.safe {
342 Some(kind)
343 } else if let Some(pointee) = drop_target_pointee {
344 Some(PointerKind::MutableRef { unpin: pointee.is_unpin(tcx, cx.typing_env) })
346 } else {
347 None
348 };
349 if let Some(kind) = kind {
350 attrs.pointee_align = Some(pointee.align);
351
352 attrs.pointee_size = match kind {
359 PointerKind::Box { .. }
360 | PointerKind::SharedRef { frozen: false }
361 | PointerKind::MutableRef { unpin: false } => Size::ZERO,
362 PointerKind::SharedRef { frozen: true }
363 | PointerKind::MutableRef { unpin: true } => pointee.size,
364 };
365
366 let noalias_for_box = tcx.sess.opts.unstable_opts.box_noalias;
370
371 let noalias_mut_ref = tcx.sess.opts.unstable_opts.mutable_noalias;
375
376 let no_alias = match kind {
383 PointerKind::SharedRef { frozen } => frozen,
384 PointerKind::MutableRef { unpin } => unpin && noalias_mut_ref,
385 PointerKind::Box { unpin, global } => unpin && global && noalias_for_box,
386 };
387 if no_alias && !is_return {
390 attrs.set(ArgAttribute::NoAlias);
391 }
392
393 if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return {
394 attrs.set(ArgAttribute::ReadOnly);
395 }
396 }
397 }
398}
399
400fn fn_abi_sanity_check<'tcx>(
402 cx: &LayoutCx<'tcx>,
403 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
404 spec_abi: ExternAbi,
405) {
406 fn fn_arg_sanity_check<'tcx>(
407 cx: &LayoutCx<'tcx>,
408 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
409 spec_abi: ExternAbi,
410 arg: &ArgAbi<'tcx, Ty<'tcx>>,
411 ) {
412 let tcx = cx.tcx();
413
414 if spec_abi.is_rustic_abi() {
415 if arg.layout.is_zst() {
416 assert!(arg.is_ignore());
419 }
420 if let PassMode::Indirect { on_stack, .. } = arg.mode {
421 assert!(!on_stack, "rust abi shouldn't use on_stack");
422 }
423 }
424
425 match &arg.mode {
426 PassMode::Ignore => {
427 assert!(arg.layout.is_zst());
428 }
429 PassMode::Direct(_) => {
430 match arg.layout.backend_repr {
435 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => {}
436 BackendRepr::ScalarPair(..) => {
437 panic!("`PassMode::Direct` used for ScalarPair type {}", arg.layout.ty)
438 }
439 BackendRepr::Memory { sized } => {
440 assert!(sized, "`PassMode::Direct` for unsized type in ABI: {:#?}", fn_abi);
443 match spec_abi {
454 ExternAbi::Unadjusted => {}
455 ExternAbi::C { unwind: _ }
456 if matches!(&*tcx.sess.target.arch, "wasm32" | "wasm64") => {}
457 _ => {
458 panic!(
459 "`PassMode::Direct` for aggregates only allowed for \"unadjusted\" functions and on wasm\n\
460 Problematic type: {:#?}",
461 arg.layout,
462 );
463 }
464 }
465 }
466 }
467 }
468 PassMode::Pair(_, _) => {
469 assert!(
472 matches!(arg.layout.backend_repr, BackendRepr::ScalarPair(..)),
473 "PassMode::Pair for type {}",
474 arg.layout.ty
475 );
476 }
477 PassMode::Cast { .. } => {
478 assert!(arg.layout.is_sized());
480 }
481 PassMode::Indirect { meta_attrs: None, .. } => {
482 assert!(arg.layout.is_sized());
487 }
488 PassMode::Indirect { meta_attrs: Some(_), on_stack, .. } => {
489 assert!(arg.layout.is_unsized() && !on_stack);
491 let tail = tcx.struct_tail_for_codegen(arg.layout.ty, cx.typing_env);
493 if matches!(tail.kind(), ty::Foreign(..)) {
494 panic!("unsized arguments must not be `extern` types");
499 }
500 }
501 }
502 }
503
504 for arg in fn_abi.args.iter() {
505 fn_arg_sanity_check(cx, fn_abi, spec_abi, arg);
506 }
507 fn_arg_sanity_check(cx, fn_abi, spec_abi, &fn_abi.ret);
508}
509
510#[tracing::instrument(level = "debug", skip(cx, instance))]
511fn fn_abi_new_uncached<'tcx>(
512 cx: &LayoutCx<'tcx>,
513 sig: ty::FnSig<'tcx>,
514 extra_args: &[Ty<'tcx>],
515 instance: Option<ty::Instance<'tcx>>,
516) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
517 let tcx = cx.tcx();
518 let (caller_location, determined_fn_def_id, is_virtual_call) = if let Some(instance) = instance
519 {
520 let is_virtual_call = matches!(instance.def, ty::InstanceKind::Virtual(..));
521 (
522 instance.def.requires_caller_location(tcx).then(|| tcx.caller_location_ty()),
523 if is_virtual_call { None } else { Some(instance.def_id()) },
524 is_virtual_call,
525 )
526 } else {
527 (None, None, false)
528 };
529 let sig = tcx.normalize_erasing_regions(cx.typing_env, sig);
530
531 let conv = conv_from_spec_abi(cx.tcx(), sig.abi, sig.c_variadic);
532
533 let mut inputs = sig.inputs();
534 let extra_args = if sig.abi == ExternAbi::RustCall {
535 assert!(!sig.c_variadic && extra_args.is_empty());
536
537 if let Some(input) = sig.inputs().last()
538 && let ty::Tuple(tupled_arguments) = input.kind()
539 {
540 inputs = &sig.inputs()[0..sig.inputs().len() - 1];
541 tupled_arguments
542 } else {
543 bug!(
544 "argument to function with \"rust-call\" ABI \
545 is not a tuple"
546 );
547 }
548 } else {
549 assert!(sig.c_variadic || extra_args.is_empty());
550 extra_args
551 };
552
553 let is_drop_in_place =
554 determined_fn_def_id.is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::DropInPlace));
555
556 let arg_of = |ty: Ty<'tcx>, arg_idx: Option<usize>| -> Result<_, &'tcx FnAbiError<'tcx>> {
557 let span = tracing::debug_span!("arg_of");
558 let _entered = span.enter();
559 let is_return = arg_idx.is_none();
560 let is_drop_target = is_drop_in_place && arg_idx == Some(0);
561 let drop_target_pointee = is_drop_target.then(|| match ty.kind() {
562 ty::RawPtr(ty, _) => *ty,
563 _ => bug!("argument to drop_in_place is not a raw ptr: {:?}", ty),
564 });
565
566 let layout = cx.layout_of(ty).map_err(|err| &*tcx.arena.alloc(FnAbiError::Layout(*err)))?;
567 let layout = if is_virtual_call && arg_idx == Some(0) {
568 make_thin_self_ptr(cx, layout)
572 } else {
573 layout
574 };
575
576 let mut arg = ArgAbi::new(cx, layout, |layout, scalar, offset| {
577 let mut attrs = ArgAttributes::new();
578 adjust_for_rust_scalar(
579 *cx,
580 &mut attrs,
581 scalar,
582 *layout,
583 offset,
584 is_return,
585 drop_target_pointee,
586 );
587 attrs
588 });
589
590 if arg.layout.is_zst() {
591 arg.mode = PassMode::Ignore;
592 }
593
594 Ok(arg)
595 };
596
597 let mut fn_abi = FnAbi {
598 ret: arg_of(sig.output(), None)?,
599 args: inputs
600 .iter()
601 .copied()
602 .chain(extra_args.iter().copied())
603 .chain(caller_location)
604 .enumerate()
605 .map(|(i, ty)| arg_of(ty, Some(i)))
606 .collect::<Result<_, _>>()?,
607 c_variadic: sig.c_variadic,
608 fixed_count: inputs.len() as u32,
609 conv,
610 can_unwind: fn_can_unwind(
611 tcx,
612 determined_fn_def_id,
614 sig.abi,
615 ),
616 };
617 fn_abi_adjust_for_abi(
618 cx,
619 &mut fn_abi,
620 sig.abi,
621 determined_fn_def_id,
625 );
626 debug!("fn_abi_new_uncached = {:?}", fn_abi);
627 fn_abi_sanity_check(cx, &fn_abi, sig.abi);
628 Ok(tcx.arena.alloc(fn_abi))
629}
630
631#[tracing::instrument(level = "trace", skip(cx))]
632fn fn_abi_adjust_for_abi<'tcx>(
633 cx: &LayoutCx<'tcx>,
634 fn_abi: &mut FnAbi<'tcx, Ty<'tcx>>,
635 abi: ExternAbi,
636 fn_def_id: Option<DefId>,
637) {
638 if abi == ExternAbi::Unadjusted {
639 fn unadjust<'tcx>(arg: &mut ArgAbi<'tcx, Ty<'tcx>>) {
642 if matches!(arg.layout.backend_repr, BackendRepr::Memory { .. }) {
645 assert!(
646 arg.layout.backend_repr.is_sized(),
647 "'unadjusted' ABI does not support unsized arguments"
648 );
649 }
650 arg.make_direct_deprecated();
651 }
652
653 unadjust(&mut fn_abi.ret);
654 for arg in fn_abi.args.iter_mut() {
655 unadjust(arg);
656 }
657 return;
658 }
659
660 let tcx = cx.tcx();
661
662 if abi.is_rustic_abi() {
663 fn_abi.adjust_for_rust_abi(cx, abi);
664
665 let deduced_param_attrs =
669 if tcx.sess.opts.optimize != OptLevel::No && tcx.sess.opts.incremental.is_none() {
670 fn_def_id.map(|fn_def_id| tcx.deduced_param_attrs(fn_def_id)).unwrap_or_default()
671 } else {
672 &[]
673 };
674
675 for (arg_idx, arg) in fn_abi.args.iter_mut().enumerate() {
676 if arg.is_ignore() {
677 continue;
678 }
679
680 if let &mut PassMode::Indirect { ref mut attrs, .. } = &mut arg.mode {
686 if let Some(deduced_param_attrs) = deduced_param_attrs.get(arg_idx) {
690 if deduced_param_attrs.read_only {
691 attrs.regular.insert(ArgAttribute::ReadOnly);
692 debug!("added deduced read-only attribute");
693 }
694 }
695 }
696 }
697 } else {
698 fn_abi.adjust_for_foreign_abi(cx, abi);
699 }
700}
701
702#[tracing::instrument(level = "debug", skip(cx))]
703fn make_thin_self_ptr<'tcx>(
704 cx: &(impl HasTyCtxt<'tcx> + HasTypingEnv<'tcx>),
705 layout: TyAndLayout<'tcx>,
706) -> TyAndLayout<'tcx> {
707 let tcx = cx.tcx();
708 let wide_pointer_ty = if layout.is_unsized() {
709 Ty::new_mut_ptr(tcx, layout.ty)
712 } else {
713 match layout.backend_repr {
714 BackendRepr::ScalarPair(..) | BackendRepr::Scalar(..) => (),
715 _ => bug!("receiver type has unsupported layout: {:?}", layout),
716 }
717
718 let mut wide_pointer_layout = layout;
724 while !wide_pointer_layout.ty.is_raw_ptr() && !wide_pointer_layout.ty.is_ref() {
725 wide_pointer_layout = wide_pointer_layout
726 .non_1zst_field(cx)
727 .expect("not exactly one non-1-ZST field in a `DispatchFromDyn` type")
728 .1
729 }
730
731 wide_pointer_layout.ty
732 };
733
734 let unit_ptr_ty = Ty::new_mut_ptr(tcx, tcx.types.unit);
738
739 TyAndLayout {
740 ty: wide_pointer_ty,
741
742 ..tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(unit_ptr_ty)).unwrap()
745 }
746}