1use std::assert_matches::assert_matches;
2use std::iter;
3
4use rustc_abi::Primitive::Pointer;
5use rustc_abi::{BackendRepr, ExternAbi, PointerKind, Scalar, Size};
6use rustc_hir as hir;
7use rustc_hir::lang_items::LangItem;
8use rustc_middle::bug;
9use rustc_middle::middle::deduced_param_attrs::DeducedParamAttrs;
10use rustc_middle::query::Providers;
11use rustc_middle::ty::layout::{
12 FnAbiError, HasTyCtxt, HasTypingEnv, LayoutCx, LayoutOf, TyAndLayout, fn_can_unwind,
13};
14use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt};
15use rustc_session::config::OptLevel;
16use rustc_span::DUMMY_SP;
17use rustc_span::def_id::DefId;
18use rustc_target::callconv::{
19 AbiMap, ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, FnAbi, PassMode,
20};
21use tracing::debug;
22
23pub(crate) fn provide(providers: &mut Providers) {
24 *providers = Providers { fn_abi_of_fn_ptr, fn_abi_of_instance, ..*providers };
25}
26
27#[tracing::instrument(level = "debug", skip(tcx, typing_env))]
33fn fn_sig_for_fn_abi<'tcx>(
34 tcx: TyCtxt<'tcx>,
35 instance: ty::Instance<'tcx>,
36 typing_env: ty::TypingEnv<'tcx>,
37) -> ty::FnSig<'tcx> {
38 if let InstanceKind::ThreadLocalShim(..) = instance.def {
39 return tcx.mk_fn_sig(
40 [],
41 tcx.thread_local_ptr_ty(instance.def_id()),
42 false,
43 hir::Safety::Safe,
44 rustc_abi::ExternAbi::Rust,
45 );
46 }
47
48 let ty = instance.ty(tcx, typing_env);
49 match *ty.kind() {
50 ty::FnDef(def_id, args) => {
51 let mut sig = tcx
52 .instantiate_bound_regions_with_erased(tcx.fn_sig(def_id).instantiate(tcx, args));
53
54 if let ty::InstanceKind::VTableShim(..) = instance.def {
56 let mut inputs_and_output = sig.inputs_and_output.to_vec();
57 inputs_and_output[0] = Ty::new_mut_ptr(tcx, inputs_and_output[0]);
58 sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
59 }
60
61 sig
62 }
63 ty::Closure(def_id, args) => {
64 let sig = tcx.instantiate_bound_regions_with_erased(args.as_closure().sig());
65 let env_ty = tcx.closure_env_ty(
66 Ty::new_closure(tcx, def_id, args),
67 args.as_closure().kind(),
68 tcx.lifetimes.re_erased,
69 );
70
71 tcx.mk_fn_sig(
72 iter::once(env_ty).chain(sig.inputs().iter().cloned()),
73 sig.output(),
74 sig.c_variadic,
75 sig.safety,
76 sig.abi,
77 )
78 }
79 ty::CoroutineClosure(def_id, args) => {
80 let coroutine_ty = Ty::new_coroutine_closure(tcx, def_id, args);
81 let sig = args.as_coroutine_closure().coroutine_closure_sig();
82
83 let mut coroutine_kind = args.as_coroutine_closure().kind();
88
89 let env_ty =
90 if let InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref, .. } =
91 instance.def
92 {
93 coroutine_kind = ty::ClosureKind::FnOnce;
94
95 if receiver_by_ref {
98 Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty)
99 } else {
100 coroutine_ty
101 }
102 } else {
103 tcx.closure_env_ty(coroutine_ty, coroutine_kind, tcx.lifetimes.re_erased)
104 };
105
106 let sig = tcx.instantiate_bound_regions_with_erased(sig);
107
108 tcx.mk_fn_sig(
109 iter::once(env_ty).chain([sig.tupled_inputs_ty]),
110 sig.to_coroutine_given_kind_and_upvars(
111 tcx,
112 args.as_coroutine_closure().parent_args(),
113 tcx.coroutine_for_closure(def_id),
114 coroutine_kind,
115 tcx.lifetimes.re_erased,
116 args.as_coroutine_closure().tupled_upvars_ty(),
117 args.as_coroutine_closure().coroutine_captures_by_ref_ty(),
118 ),
119 sig.c_variadic,
120 sig.safety,
121 sig.abi,
122 )
123 }
124 ty::Coroutine(did, args) => {
125 let coroutine_kind = tcx.coroutine_kind(did).unwrap();
126 let sig = args.as_coroutine().sig();
127
128 let env_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
129
130 let pin_did = tcx.require_lang_item(LangItem::Pin, DUMMY_SP);
131 let pin_adt_ref = tcx.adt_def(pin_did);
132 let pin_args = tcx.mk_args(&[env_ty.into()]);
133 let env_ty = match coroutine_kind {
134 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
135 env_ty
138 }
139 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)
140 | hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)
141 | hir::CoroutineKind::Coroutine(_) => Ty::new_adt(tcx, pin_adt_ref, pin_args),
142 };
143
144 let (resume_ty, ret_ty) = match coroutine_kind {
151 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => {
152 assert_eq!(sig.yield_ty, tcx.types.unit);
154
155 let poll_did = tcx.require_lang_item(LangItem::Poll, DUMMY_SP);
156 let poll_adt_ref = tcx.adt_def(poll_did);
157 let poll_args = tcx.mk_args(&[sig.return_ty.into()]);
158 let ret_ty = Ty::new_adt(tcx, poll_adt_ref, poll_args);
159
160 #[cfg(debug_assertions)]
163 {
164 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
165 let expected_adt =
166 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
167 assert_eq!(*resume_ty_adt, expected_adt);
168 } else {
169 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
170 };
171 }
172 let context_mut_ref = Ty::new_task_context(tcx);
173
174 (Some(context_mut_ref), ret_ty)
175 }
176 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
177 let option_did = tcx.require_lang_item(LangItem::Option, DUMMY_SP);
179 let option_adt_ref = tcx.adt_def(option_did);
180 let option_args = tcx.mk_args(&[sig.yield_ty.into()]);
181 let ret_ty = Ty::new_adt(tcx, option_adt_ref, option_args);
182
183 assert_eq!(sig.return_ty, tcx.types.unit);
184 assert_eq!(sig.resume_ty, tcx.types.unit);
185
186 (None, ret_ty)
187 }
188 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => {
189 assert_eq!(sig.return_ty, tcx.types.unit);
192
193 let ret_ty = sig.yield_ty;
195
196 #[cfg(debug_assertions)]
199 {
200 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
201 let expected_adt =
202 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
203 assert_eq!(*resume_ty_adt, expected_adt);
204 } else {
205 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
206 };
207 }
208 let context_mut_ref = Ty::new_task_context(tcx);
209
210 (Some(context_mut_ref), ret_ty)
211 }
212 hir::CoroutineKind::Coroutine(_) => {
213 let state_did = tcx.require_lang_item(LangItem::CoroutineState, DUMMY_SP);
215 let state_adt_ref = tcx.adt_def(state_did);
216 let state_args = tcx.mk_args(&[sig.yield_ty.into(), sig.return_ty.into()]);
217 let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args);
218
219 (Some(sig.resume_ty), ret_ty)
220 }
221 };
222
223 if let Some(resume_ty) = resume_ty {
224 tcx.mk_fn_sig(
225 [env_ty, resume_ty],
226 ret_ty,
227 false,
228 hir::Safety::Safe,
229 rustc_abi::ExternAbi::Rust,
230 )
231 } else {
232 tcx.mk_fn_sig(
234 [env_ty],
235 ret_ty,
236 false,
237 hir::Safety::Safe,
238 rustc_abi::ExternAbi::Rust,
239 )
240 }
241 }
242 _ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
243 }
244}
245
246fn fn_abi_of_fn_ptr<'tcx>(
247 tcx: TyCtxt<'tcx>,
248 query: ty::PseudoCanonicalInput<'tcx, (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
249) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
250 let ty::PseudoCanonicalInput { typing_env, value: (sig, extra_args) } = query;
251 fn_abi_new_uncached(
252 &LayoutCx::new(tcx, typing_env),
253 tcx.instantiate_bound_regions_with_erased(sig),
254 extra_args,
255 None,
256 )
257}
258
259fn fn_abi_of_instance<'tcx>(
260 tcx: TyCtxt<'tcx>,
261 query: ty::PseudoCanonicalInput<'tcx, (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
262) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
263 let ty::PseudoCanonicalInput { typing_env, value: (instance, extra_args) } = query;
264 fn_abi_new_uncached(
265 &LayoutCx::new(tcx, typing_env),
266 fn_sig_for_fn_abi(tcx, instance, typing_env),
267 extra_args,
268 Some(instance),
269 )
270}
271
272fn arg_attrs_for_rust_scalar<'tcx>(
274 cx: LayoutCx<'tcx>,
275 scalar: Scalar,
276 layout: TyAndLayout<'tcx>,
277 offset: Size,
278 is_return: bool,
279 drop_target_pointee: Option<Ty<'tcx>>,
280) -> ArgAttributes {
281 let mut attrs = ArgAttributes::new();
282
283 if scalar.is_bool() {
285 attrs.ext(ArgExtension::Zext);
286 attrs.set(ArgAttribute::NoUndef);
287 return attrs;
288 }
289
290 if !scalar.is_uninit_valid() {
291 attrs.set(ArgAttribute::NoUndef);
292 }
293
294 let Scalar::Initialized { value: Pointer(_), valid_range } = scalar else { return attrs };
296
297 if !valid_range.contains(0) || drop_target_pointee.is_some() {
300 attrs.set(ArgAttribute::NonNull);
301 }
302
303 let tcx = cx.tcx();
304
305 if let Some(pointee) = layout.pointee_info_at(&cx, offset) {
306 let kind = if let Some(kind) = pointee.safe {
307 Some(kind)
308 } else if let Some(pointee) = drop_target_pointee {
309 Some(PointerKind::MutableRef { unpin: pointee.is_unpin(tcx, cx.typing_env) })
311 } else {
312 None
313 };
314 if let Some(kind) = kind {
315 attrs.pointee_align =
316 Some(pointee.align.min(cx.tcx().sess.target.max_reliable_alignment()));
317
318 attrs.pointee_size = match kind {
319 _ if is_return => Size::ZERO,
324 PointerKind::Box { .. }
331 | PointerKind::SharedRef { frozen: false }
332 | PointerKind::MutableRef { unpin: false } => Size::ZERO,
333 PointerKind::SharedRef { frozen: true }
334 | PointerKind::MutableRef { unpin: true } => pointee.size,
335 };
336
337 let noalias_for_box = tcx.sess.opts.unstable_opts.box_noalias;
341
342 let noalias_mut_ref = tcx.sess.opts.unstable_opts.mutable_noalias;
346
347 let no_alias = match kind {
354 PointerKind::SharedRef { frozen } => frozen,
355 PointerKind::MutableRef { unpin } => unpin && noalias_mut_ref,
356 PointerKind::Box { unpin, global } => unpin && global && noalias_for_box,
357 };
358 if no_alias && !is_return {
361 attrs.set(ArgAttribute::NoAlias);
362 }
363
364 if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return {
365 attrs.set(ArgAttribute::ReadOnly);
366 attrs.set(ArgAttribute::CapturesReadOnly);
367 }
368 }
369 }
370
371 attrs
372}
373
374fn fn_abi_sanity_check<'tcx>(
376 cx: &LayoutCx<'tcx>,
377 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
378 spec_abi: ExternAbi,
379) {
380 fn fn_arg_sanity_check<'tcx>(
381 cx: &LayoutCx<'tcx>,
382 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
383 spec_abi: ExternAbi,
384 arg: &ArgAbi<'tcx, Ty<'tcx>>,
385 ) {
386 let tcx = cx.tcx();
387
388 if spec_abi.is_rustic_abi() {
389 if arg.layout.is_zst() {
390 assert!(arg.is_ignore());
393 }
394 if let PassMode::Indirect { on_stack, .. } = arg.mode {
395 assert!(!on_stack, "rust abi shouldn't use on_stack");
396 }
397 } else if arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
398 assert_matches!(
399 arg.mode,
400 PassMode::Indirect { on_stack: false, .. },
401 "the {spec_abi} ABI does not implement `#[rustc_pass_indirectly_in_non_rustic_abis]`"
402 );
403 }
404
405 match &arg.mode {
406 PassMode::Ignore => {
407 assert!(arg.layout.is_zst());
408 }
409 PassMode::Direct(_) => {
410 match arg.layout.backend_repr {
415 BackendRepr::Scalar(_)
416 | BackendRepr::SimdVector { .. }
417 | BackendRepr::ScalableVector { .. } => {}
418 BackendRepr::ScalarPair(..) => {
419 panic!("`PassMode::Direct` used for ScalarPair type {}", arg.layout.ty)
420 }
421 BackendRepr::Memory { sized } => {
422 assert!(sized, "`PassMode::Direct` for unsized type in ABI: {:#?}", fn_abi);
425
426 assert!(
432 matches!(spec_abi, ExternAbi::Unadjusted),
433 "`PassMode::Direct` for aggregates only allowed for \"unadjusted\"\n\
434 Problematic type: {:#?}",
435 arg.layout,
436 );
437 }
438 }
439 }
440 PassMode::Pair(_, _) => {
441 assert!(
444 matches!(arg.layout.backend_repr, BackendRepr::ScalarPair(..)),
445 "PassMode::Pair for type {}",
446 arg.layout.ty
447 );
448 }
449 PassMode::Cast { .. } => {
450 assert!(arg.layout.is_sized());
452 }
453 PassMode::Indirect { meta_attrs: None, .. } => {
454 assert!(arg.layout.is_sized());
459 }
460 PassMode::Indirect { meta_attrs: Some(_), on_stack, .. } => {
461 assert!(arg.layout.is_unsized() && !on_stack);
463 let tail = tcx.struct_tail_for_codegen(arg.layout.ty, cx.typing_env);
465 if matches!(tail.kind(), ty::Foreign(..)) {
466 panic!("unsized arguments must not be `extern` types");
471 }
472 }
473 }
474 }
475
476 for arg in fn_abi.args.iter() {
477 fn_arg_sanity_check(cx, fn_abi, spec_abi, arg);
478 }
479 fn_arg_sanity_check(cx, fn_abi, spec_abi, &fn_abi.ret);
480}
481
482#[tracing::instrument(level = "debug", skip(cx, instance))]
483fn fn_abi_new_uncached<'tcx>(
484 cx: &LayoutCx<'tcx>,
485 sig: ty::FnSig<'tcx>,
486 extra_args: &[Ty<'tcx>],
487 instance: Option<ty::Instance<'tcx>>,
488) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
489 let tcx = cx.tcx();
490 let (caller_location, determined_fn_def_id, is_virtual_call) = if let Some(instance) = instance
491 {
492 let is_virtual_call = matches!(instance.def, ty::InstanceKind::Virtual(..));
493 let is_tls_shim_call = matches!(instance.def, ty::InstanceKind::ThreadLocalShim(_));
494 (
495 instance.def.requires_caller_location(tcx).then(|| tcx.caller_location_ty()),
496 if is_virtual_call || is_tls_shim_call { None } else { Some(instance.def_id()) },
497 is_virtual_call,
498 )
499 } else {
500 (None, None, false)
501 };
502 let sig = tcx.normalize_erasing_regions(cx.typing_env, sig);
503
504 let abi_map = AbiMap::from_target(&tcx.sess.target);
505 let conv = abi_map.canonize_abi(sig.abi, sig.c_variadic).unwrap();
506
507 let mut inputs = sig.inputs();
508 let extra_args = if sig.abi == ExternAbi::RustCall {
509 assert!(!sig.c_variadic && extra_args.is_empty());
510
511 if let Some(input) = sig.inputs().last()
512 && let ty::Tuple(tupled_arguments) = input.kind()
513 {
514 inputs = &sig.inputs()[0..sig.inputs().len() - 1];
515 tupled_arguments
516 } else {
517 bug!(
518 "argument to function with \"rust-call\" ABI \
519 is not a tuple"
520 );
521 }
522 } else {
523 assert!(sig.c_variadic || extra_args.is_empty());
524 extra_args
525 };
526
527 let is_drop_in_place = determined_fn_def_id.is_some_and(|def_id| {
528 tcx.is_lang_item(def_id, LangItem::DropInPlace)
529 || tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace)
530 });
531
532 let arg_of = |ty: Ty<'tcx>, arg_idx: Option<usize>| -> Result<_, &'tcx FnAbiError<'tcx>> {
533 let span = tracing::debug_span!("arg_of");
534 let _entered = span.enter();
535 let is_return = arg_idx.is_none();
536 let is_drop_target = is_drop_in_place && arg_idx == Some(0);
537 let drop_target_pointee = is_drop_target.then(|| match ty.kind() {
538 ty::RawPtr(ty, _) => *ty,
539 _ => bug!("argument to drop_in_place is not a raw ptr: {:?}", ty),
540 });
541
542 let layout = cx.layout_of(ty).map_err(|err| &*tcx.arena.alloc(FnAbiError::Layout(*err)))?;
543 let layout = if is_virtual_call && arg_idx == Some(0) {
544 make_thin_self_ptr(cx, layout)
548 } else {
549 layout
550 };
551
552 let mut arg = ArgAbi::new(cx, layout, |layout, scalar, offset| {
553 arg_attrs_for_rust_scalar(*cx, scalar, *layout, offset, is_return, drop_target_pointee)
554 });
555
556 if arg.layout.is_zst() {
557 arg.mode = PassMode::Ignore;
558 }
559
560 Ok(arg)
561 };
562
563 let mut fn_abi = FnAbi {
564 ret: arg_of(sig.output(), None)?,
565 args: inputs
566 .iter()
567 .copied()
568 .chain(extra_args.iter().copied())
569 .chain(caller_location)
570 .enumerate()
571 .map(|(i, ty)| arg_of(ty, Some(i)))
572 .collect::<Result<_, _>>()?,
573 c_variadic: sig.c_variadic,
574 fixed_count: inputs.len() as u32,
575 conv,
576 can_unwind: fn_can_unwind(
578 tcx,
579 determined_fn_def_id,
581 sig.abi,
582 ),
583 };
584 fn_abi_adjust_for_abi(
585 cx,
586 &mut fn_abi,
587 sig.abi,
588 determined_fn_def_id,
593 );
594 debug!("fn_abi_new_uncached = {:?}", fn_abi);
595 fn_abi_sanity_check(cx, &fn_abi, sig.abi);
596 Ok(tcx.arena.alloc(fn_abi))
597}
598
599#[tracing::instrument(level = "trace", skip(cx))]
600fn fn_abi_adjust_for_abi<'tcx>(
601 cx: &LayoutCx<'tcx>,
602 fn_abi: &mut FnAbi<'tcx, Ty<'tcx>>,
603 abi: ExternAbi,
604 fn_def_id: Option<DefId>,
605) {
606 if abi == ExternAbi::Unadjusted {
607 fn unadjust<'tcx>(arg: &mut ArgAbi<'tcx, Ty<'tcx>>) {
610 if matches!(arg.layout.backend_repr, BackendRepr::Memory { .. }) {
613 assert!(
614 arg.layout.backend_repr.is_sized(),
615 "'unadjusted' ABI does not support unsized arguments"
616 );
617 }
618 arg.make_direct_deprecated();
619 }
620
621 unadjust(&mut fn_abi.ret);
622 for arg in fn_abi.args.iter_mut() {
623 unadjust(arg);
624 }
625 return;
626 }
627
628 let tcx = cx.tcx();
629
630 if abi.is_rustic_abi() {
631 fn_abi.adjust_for_rust_abi(cx);
632 let deduced =
636 if tcx.sess.opts.optimize != OptLevel::No && tcx.sess.opts.incremental.is_none() {
637 fn_def_id.map(|fn_def_id| tcx.deduced_param_attrs(fn_def_id)).unwrap_or_default()
638 } else {
639 &[]
640 };
641 if !deduced.is_empty() {
642 apply_deduced_attributes(cx, deduced, 0, &mut fn_abi.ret);
643 for (arg_idx, arg) in fn_abi.args.iter_mut().enumerate() {
644 apply_deduced_attributes(cx, deduced, arg_idx + 1, arg);
645 }
646 }
647 } else {
648 fn_abi.adjust_for_foreign_abi(cx, abi);
649 }
650}
651
652fn apply_deduced_attributes<'tcx>(
657 cx: &LayoutCx<'tcx>,
658 deduced: &[DeducedParamAttrs],
659 idx: usize,
660 arg: &mut ArgAbi<'tcx, Ty<'tcx>>,
661) {
662 let PassMode::Indirect { ref mut attrs, .. } = arg.mode else {
664 return;
665 };
666 let Some(deduced) = deduced.get(idx) else {
668 return;
669 };
670 if deduced.read_only(cx.tcx(), cx.typing_env, arg.layout.ty) {
671 debug!("added deduced ReadOnly attribute");
672 attrs.regular.insert(ArgAttribute::ReadOnly);
673 }
674 if deduced.captures_none(cx.tcx(), cx.typing_env, arg.layout.ty) {
675 debug!("added deduced CapturesNone attribute");
676 attrs.regular.insert(ArgAttribute::CapturesNone);
677 }
678}
679
680#[tracing::instrument(level = "debug", skip(cx))]
681fn make_thin_self_ptr<'tcx>(
682 cx: &(impl HasTyCtxt<'tcx> + HasTypingEnv<'tcx>),
683 layout: TyAndLayout<'tcx>,
684) -> TyAndLayout<'tcx> {
685 let tcx = cx.tcx();
686 let wide_pointer_ty = if layout.is_unsized() {
687 Ty::new_mut_ptr(tcx, layout.ty)
690 } else {
691 match layout.backend_repr {
692 BackendRepr::ScalarPair(..) | BackendRepr::Scalar(..) => (),
693 _ => bug!("receiver type has unsupported layout: {:?}", layout),
694 }
695
696 let mut wide_pointer_layout = layout;
702 while !wide_pointer_layout.ty.is_raw_ptr() && !wide_pointer_layout.ty.is_ref() {
703 wide_pointer_layout = wide_pointer_layout
704 .non_1zst_field(cx)
705 .expect("not exactly one non-1-ZST field in a `DispatchFromDyn` type")
706 .1
707 }
708
709 wide_pointer_layout.ty
710 };
711
712 let unit_ptr_ty = Ty::new_mut_ptr(tcx, tcx.types.unit);
716
717 TyAndLayout {
718 ty: wide_pointer_ty,
719
720 ..tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(unit_ptr_ty)).unwrap()
723 }
724}