1use std::assert_matches::assert_matches;
2use std::iter;
3
4use rustc_abi::Primitive::Pointer;
5use rustc_abi::{BackendRepr, ExternAbi, PointerKind, Scalar, Size};
6use rustc_hir as hir;
7use rustc_hir::lang_items::LangItem;
8use rustc_middle::bug;
9use rustc_middle::middle::deduced_param_attrs::DeducedParamAttrs;
10use rustc_middle::query::Providers;
11use rustc_middle::ty::layout::{
12 FnAbiError, HasTyCtxt, HasTypingEnv, LayoutCx, LayoutOf, TyAndLayout, fn_can_unwind,
13};
14use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt};
15use rustc_session::config::OptLevel;
16use rustc_span::DUMMY_SP;
17use rustc_span::def_id::DefId;
18use rustc_target::callconv::{
19 AbiMap, ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, FnAbi, PassMode,
20};
21use tracing::debug;
22
23pub(crate) fn provide(providers: &mut Providers) {
24 *providers = Providers { fn_abi_of_fn_ptr, fn_abi_of_instance, ..*providers };
25}
26
27#[tracing::instrument(level = "debug", skip(tcx, typing_env))]
33fn fn_sig_for_fn_abi<'tcx>(
34 tcx: TyCtxt<'tcx>,
35 instance: ty::Instance<'tcx>,
36 typing_env: ty::TypingEnv<'tcx>,
37) -> ty::FnSig<'tcx> {
38 if let InstanceKind::ThreadLocalShim(..) = instance.def {
39 return tcx.mk_fn_sig(
40 [],
41 tcx.thread_local_ptr_ty(instance.def_id()),
42 false,
43 hir::Safety::Safe,
44 rustc_abi::ExternAbi::Rust,
45 );
46 }
47
48 let ty = instance.ty(tcx, typing_env);
49 match *ty.kind() {
50 ty::FnDef(def_id, args) => {
51 let mut sig = tcx
52 .instantiate_bound_regions_with_erased(tcx.fn_sig(def_id).instantiate(tcx, args));
53
54 if let ty::InstanceKind::VTableShim(..) = instance.def {
56 let mut inputs_and_output = sig.inputs_and_output.to_vec();
57 inputs_and_output[0] = Ty::new_mut_ptr(tcx, inputs_and_output[0]);
58 sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
59 }
60
61 sig
62 }
63 ty::Closure(def_id, args) => {
64 let sig = tcx.instantiate_bound_regions_with_erased(args.as_closure().sig());
65 let env_ty = tcx.closure_env_ty(
66 Ty::new_closure(tcx, def_id, args),
67 args.as_closure().kind(),
68 tcx.lifetimes.re_erased,
69 );
70
71 tcx.mk_fn_sig(
72 iter::once(env_ty).chain(sig.inputs().iter().cloned()),
73 sig.output(),
74 sig.c_variadic,
75 sig.safety,
76 sig.abi,
77 )
78 }
79 ty::CoroutineClosure(def_id, args) => {
80 let coroutine_ty = Ty::new_coroutine_closure(tcx, def_id, args);
81 let sig = args.as_coroutine_closure().coroutine_closure_sig();
82
83 let mut coroutine_kind = args.as_coroutine_closure().kind();
88
89 let env_ty =
90 if let InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref, .. } =
91 instance.def
92 {
93 coroutine_kind = ty::ClosureKind::FnOnce;
94
95 if receiver_by_ref {
98 Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty)
99 } else {
100 coroutine_ty
101 }
102 } else {
103 tcx.closure_env_ty(coroutine_ty, coroutine_kind, tcx.lifetimes.re_erased)
104 };
105
106 let sig = tcx.instantiate_bound_regions_with_erased(sig);
107
108 tcx.mk_fn_sig(
109 iter::once(env_ty).chain([sig.tupled_inputs_ty]),
110 sig.to_coroutine_given_kind_and_upvars(
111 tcx,
112 args.as_coroutine_closure().parent_args(),
113 tcx.coroutine_for_closure(def_id),
114 coroutine_kind,
115 tcx.lifetimes.re_erased,
116 args.as_coroutine_closure().tupled_upvars_ty(),
117 args.as_coroutine_closure().coroutine_captures_by_ref_ty(),
118 ),
119 sig.c_variadic,
120 sig.safety,
121 sig.abi,
122 )
123 }
124 ty::Coroutine(did, args) => {
125 let coroutine_kind = tcx.coroutine_kind(did).unwrap();
126 let sig = args.as_coroutine().sig();
127
128 let env_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
129
130 let pin_did = tcx.require_lang_item(LangItem::Pin, DUMMY_SP);
131 let pin_adt_ref = tcx.adt_def(pin_did);
132 let pin_args = tcx.mk_args(&[env_ty.into()]);
133 let env_ty = match coroutine_kind {
134 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
135 env_ty
138 }
139 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)
140 | hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)
141 | hir::CoroutineKind::Coroutine(_) => Ty::new_adt(tcx, pin_adt_ref, pin_args),
142 };
143
144 let (resume_ty, ret_ty) = match coroutine_kind {
151 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => {
152 assert_eq!(sig.yield_ty, tcx.types.unit);
154
155 let poll_did = tcx.require_lang_item(LangItem::Poll, DUMMY_SP);
156 let poll_adt_ref = tcx.adt_def(poll_did);
157 let poll_args = tcx.mk_args(&[sig.return_ty.into()]);
158 let ret_ty = Ty::new_adt(tcx, poll_adt_ref, poll_args);
159
160 #[cfg(debug_assertions)]
163 {
164 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
165 let expected_adt =
166 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
167 assert_eq!(*resume_ty_adt, expected_adt);
168 } else {
169 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
170 };
171 }
172 let context_mut_ref = Ty::new_task_context(tcx);
173
174 (Some(context_mut_ref), ret_ty)
175 }
176 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
177 let option_did = tcx.require_lang_item(LangItem::Option, DUMMY_SP);
179 let option_adt_ref = tcx.adt_def(option_did);
180 let option_args = tcx.mk_args(&[sig.yield_ty.into()]);
181 let ret_ty = Ty::new_adt(tcx, option_adt_ref, option_args);
182
183 assert_eq!(sig.return_ty, tcx.types.unit);
184 assert_eq!(sig.resume_ty, tcx.types.unit);
185
186 (None, ret_ty)
187 }
188 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => {
189 assert_eq!(sig.return_ty, tcx.types.unit);
192
193 let ret_ty = sig.yield_ty;
195
196 #[cfg(debug_assertions)]
199 {
200 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
201 let expected_adt =
202 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
203 assert_eq!(*resume_ty_adt, expected_adt);
204 } else {
205 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
206 };
207 }
208 let context_mut_ref = Ty::new_task_context(tcx);
209
210 (Some(context_mut_ref), ret_ty)
211 }
212 hir::CoroutineKind::Coroutine(_) => {
213 let state_did = tcx.require_lang_item(LangItem::CoroutineState, DUMMY_SP);
215 let state_adt_ref = tcx.adt_def(state_did);
216 let state_args = tcx.mk_args(&[sig.yield_ty.into(), sig.return_ty.into()]);
217 let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args);
218
219 (Some(sig.resume_ty), ret_ty)
220 }
221 };
222
223 if let Some(resume_ty) = resume_ty {
224 tcx.mk_fn_sig(
225 [env_ty, resume_ty],
226 ret_ty,
227 false,
228 hir::Safety::Safe,
229 rustc_abi::ExternAbi::Rust,
230 )
231 } else {
232 tcx.mk_fn_sig(
234 [env_ty],
235 ret_ty,
236 false,
237 hir::Safety::Safe,
238 rustc_abi::ExternAbi::Rust,
239 )
240 }
241 }
242 _ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
243 }
244}
245
246fn fn_abi_of_fn_ptr<'tcx>(
247 tcx: TyCtxt<'tcx>,
248 query: ty::PseudoCanonicalInput<'tcx, (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
249) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
250 let ty::PseudoCanonicalInput { typing_env, value: (sig, extra_args) } = query;
251 fn_abi_new_uncached(
252 &LayoutCx::new(tcx, typing_env),
253 tcx.instantiate_bound_regions_with_erased(sig),
254 extra_args,
255 None,
256 )
257}
258
259fn fn_abi_of_instance<'tcx>(
260 tcx: TyCtxt<'tcx>,
261 query: ty::PseudoCanonicalInput<'tcx, (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
262) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
263 let ty::PseudoCanonicalInput { typing_env, value: (instance, extra_args) } = query;
264 fn_abi_new_uncached(
265 &LayoutCx::new(tcx, typing_env),
266 fn_sig_for_fn_abi(tcx, instance, typing_env),
267 extra_args,
268 Some(instance),
269 )
270}
271
272fn arg_attrs_for_rust_scalar<'tcx>(
274 cx: LayoutCx<'tcx>,
275 scalar: Scalar,
276 layout: TyAndLayout<'tcx>,
277 offset: Size,
278 is_return: bool,
279 drop_target_pointee: Option<Ty<'tcx>>,
280) -> ArgAttributes {
281 let mut attrs = ArgAttributes::new();
282
283 if scalar.is_bool() {
285 attrs.ext(ArgExtension::Zext);
286 attrs.set(ArgAttribute::NoUndef);
287 return attrs;
288 }
289
290 if !scalar.is_uninit_valid() {
291 attrs.set(ArgAttribute::NoUndef);
292 }
293
294 let Scalar::Initialized { value: Pointer(_), valid_range } = scalar else { return attrs };
296
297 if !valid_range.contains(0) || drop_target_pointee.is_some() {
300 attrs.set(ArgAttribute::NonNull);
301 }
302
303 let tcx = cx.tcx();
304
305 if let Some(pointee) = layout.pointee_info_at(&cx, offset) {
306 let kind = if let Some(kind) = pointee.safe {
307 Some(kind)
308 } else if let Some(pointee) = drop_target_pointee {
309 Some(PointerKind::MutableRef { unpin: pointee.is_unpin(tcx, cx.typing_env) })
311 } else {
312 None
313 };
314 if let Some(kind) = kind {
315 attrs.pointee_align =
316 Some(pointee.align.min(cx.tcx().sess.target.max_reliable_alignment()));
317
318 attrs.pointee_size = match kind {
325 PointerKind::Box { .. }
326 | PointerKind::SharedRef { frozen: false }
327 | PointerKind::MutableRef { unpin: false } => Size::ZERO,
328 PointerKind::SharedRef { frozen: true }
329 | PointerKind::MutableRef { unpin: true } => pointee.size,
330 };
331
332 let noalias_for_box = tcx.sess.opts.unstable_opts.box_noalias;
336
337 let noalias_mut_ref = tcx.sess.opts.unstable_opts.mutable_noalias;
341
342 let no_alias = match kind {
349 PointerKind::SharedRef { frozen } => frozen,
350 PointerKind::MutableRef { unpin } => unpin && noalias_mut_ref,
351 PointerKind::Box { unpin, global } => unpin && global && noalias_for_box,
352 };
353 if no_alias && !is_return {
356 attrs.set(ArgAttribute::NoAlias);
357 }
358
359 if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return {
360 attrs.set(ArgAttribute::ReadOnly);
361 attrs.set(ArgAttribute::CapturesReadOnly);
362 }
363 }
364 }
365
366 attrs
367}
368
369fn fn_abi_sanity_check<'tcx>(
371 cx: &LayoutCx<'tcx>,
372 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
373 spec_abi: ExternAbi,
374) {
375 fn fn_arg_sanity_check<'tcx>(
376 cx: &LayoutCx<'tcx>,
377 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
378 spec_abi: ExternAbi,
379 arg: &ArgAbi<'tcx, Ty<'tcx>>,
380 ) {
381 let tcx = cx.tcx();
382
383 if spec_abi.is_rustic_abi() {
384 if arg.layout.is_zst() {
385 assert!(arg.is_ignore());
388 }
389 if let PassMode::Indirect { on_stack, .. } = arg.mode {
390 assert!(!on_stack, "rust abi shouldn't use on_stack");
391 }
392 } else if arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
393 assert_matches!(
394 arg.mode,
395 PassMode::Indirect { on_stack: false, .. },
396 "the {spec_abi} ABI does not implement `#[rustc_pass_indirectly_in_non_rustic_abis]`"
397 );
398 }
399
400 match &arg.mode {
401 PassMode::Ignore => {
402 assert!(arg.layout.is_zst());
403 }
404 PassMode::Direct(_) => {
405 match arg.layout.backend_repr {
410 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => {}
411 BackendRepr::ScalarPair(..) => {
412 panic!("`PassMode::Direct` used for ScalarPair type {}", arg.layout.ty)
413 }
414 BackendRepr::Memory { sized } => {
415 assert!(sized, "`PassMode::Direct` for unsized type in ABI: {:#?}", fn_abi);
418
419 assert!(
425 matches!(spec_abi, ExternAbi::Unadjusted),
426 "`PassMode::Direct` for aggregates only allowed for \"unadjusted\"\n\
427 Problematic type: {:#?}",
428 arg.layout,
429 );
430 }
431 }
432 }
433 PassMode::Pair(_, _) => {
434 assert!(
437 matches!(arg.layout.backend_repr, BackendRepr::ScalarPair(..)),
438 "PassMode::Pair for type {}",
439 arg.layout.ty
440 );
441 }
442 PassMode::Cast { .. } => {
443 assert!(arg.layout.is_sized());
445 }
446 PassMode::Indirect { meta_attrs: None, .. } => {
447 assert!(arg.layout.is_sized());
452 }
453 PassMode::Indirect { meta_attrs: Some(_), on_stack, .. } => {
454 assert!(arg.layout.is_unsized() && !on_stack);
456 let tail = tcx.struct_tail_for_codegen(arg.layout.ty, cx.typing_env);
458 if matches!(tail.kind(), ty::Foreign(..)) {
459 panic!("unsized arguments must not be `extern` types");
464 }
465 }
466 }
467 }
468
469 for arg in fn_abi.args.iter() {
470 fn_arg_sanity_check(cx, fn_abi, spec_abi, arg);
471 }
472 fn_arg_sanity_check(cx, fn_abi, spec_abi, &fn_abi.ret);
473}
474
475#[tracing::instrument(level = "debug", skip(cx, instance))]
476fn fn_abi_new_uncached<'tcx>(
477 cx: &LayoutCx<'tcx>,
478 sig: ty::FnSig<'tcx>,
479 extra_args: &[Ty<'tcx>],
480 instance: Option<ty::Instance<'tcx>>,
481) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
482 let tcx = cx.tcx();
483 let (caller_location, determined_fn_def_id, is_virtual_call) = if let Some(instance) = instance
484 {
485 let is_virtual_call = matches!(instance.def, ty::InstanceKind::Virtual(..));
486 let is_tls_shim_call = matches!(instance.def, ty::InstanceKind::ThreadLocalShim(_));
487 (
488 instance.def.requires_caller_location(tcx).then(|| tcx.caller_location_ty()),
489 if is_virtual_call || is_tls_shim_call { None } else { Some(instance.def_id()) },
490 is_virtual_call,
491 )
492 } else {
493 (None, None, false)
494 };
495 let sig = tcx.normalize_erasing_regions(cx.typing_env, sig);
496
497 let abi_map = AbiMap::from_target(&tcx.sess.target);
498 let conv = abi_map.canonize_abi(sig.abi, sig.c_variadic).unwrap();
499
500 let mut inputs = sig.inputs();
501 let extra_args = if sig.abi == ExternAbi::RustCall {
502 assert!(!sig.c_variadic && extra_args.is_empty());
503
504 if let Some(input) = sig.inputs().last()
505 && let ty::Tuple(tupled_arguments) = input.kind()
506 {
507 inputs = &sig.inputs()[0..sig.inputs().len() - 1];
508 tupled_arguments
509 } else {
510 bug!(
511 "argument to function with \"rust-call\" ABI \
512 is not a tuple"
513 );
514 }
515 } else {
516 assert!(sig.c_variadic || extra_args.is_empty());
517 extra_args
518 };
519
520 let is_drop_in_place = determined_fn_def_id.is_some_and(|def_id| {
521 tcx.is_lang_item(def_id, LangItem::DropInPlace)
522 || tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace)
523 });
524
525 let arg_of = |ty: Ty<'tcx>, arg_idx: Option<usize>| -> Result<_, &'tcx FnAbiError<'tcx>> {
526 let span = tracing::debug_span!("arg_of");
527 let _entered = span.enter();
528 let is_return = arg_idx.is_none();
529 let is_drop_target = is_drop_in_place && arg_idx == Some(0);
530 let drop_target_pointee = is_drop_target.then(|| match ty.kind() {
531 ty::RawPtr(ty, _) => *ty,
532 _ => bug!("argument to drop_in_place is not a raw ptr: {:?}", ty),
533 });
534
535 let layout = cx.layout_of(ty).map_err(|err| &*tcx.arena.alloc(FnAbiError::Layout(*err)))?;
536 let layout = if is_virtual_call && arg_idx == Some(0) {
537 make_thin_self_ptr(cx, layout)
541 } else {
542 layout
543 };
544
545 let mut arg = ArgAbi::new(cx, layout, |layout, scalar, offset| {
546 arg_attrs_for_rust_scalar(*cx, scalar, *layout, offset, is_return, drop_target_pointee)
547 });
548
549 if arg.layout.is_zst() {
550 arg.mode = PassMode::Ignore;
551 }
552
553 Ok(arg)
554 };
555
556 let mut fn_abi = FnAbi {
557 ret: arg_of(sig.output(), None)?,
558 args: inputs
559 .iter()
560 .copied()
561 .chain(extra_args.iter().copied())
562 .chain(caller_location)
563 .enumerate()
564 .map(|(i, ty)| arg_of(ty, Some(i)))
565 .collect::<Result<_, _>>()?,
566 c_variadic: sig.c_variadic,
567 fixed_count: inputs.len() as u32,
568 conv,
569 can_unwind: fn_can_unwind(
571 tcx,
572 determined_fn_def_id,
574 sig.abi,
575 ),
576 };
577 fn_abi_adjust_for_abi(
578 cx,
579 &mut fn_abi,
580 sig.abi,
581 determined_fn_def_id,
586 );
587 debug!("fn_abi_new_uncached = {:?}", fn_abi);
588 fn_abi_sanity_check(cx, &fn_abi, sig.abi);
589 Ok(tcx.arena.alloc(fn_abi))
590}
591
592#[tracing::instrument(level = "trace", skip(cx))]
593fn fn_abi_adjust_for_abi<'tcx>(
594 cx: &LayoutCx<'tcx>,
595 fn_abi: &mut FnAbi<'tcx, Ty<'tcx>>,
596 abi: ExternAbi,
597 fn_def_id: Option<DefId>,
598) {
599 if abi == ExternAbi::Unadjusted {
600 fn unadjust<'tcx>(arg: &mut ArgAbi<'tcx, Ty<'tcx>>) {
603 if matches!(arg.layout.backend_repr, BackendRepr::Memory { .. }) {
606 assert!(
607 arg.layout.backend_repr.is_sized(),
608 "'unadjusted' ABI does not support unsized arguments"
609 );
610 }
611 arg.make_direct_deprecated();
612 }
613
614 unadjust(&mut fn_abi.ret);
615 for arg in fn_abi.args.iter_mut() {
616 unadjust(arg);
617 }
618 return;
619 }
620
621 let tcx = cx.tcx();
622
623 if abi.is_rustic_abi() {
624 fn_abi.adjust_for_rust_abi(cx);
625 let deduced =
629 if tcx.sess.opts.optimize != OptLevel::No && tcx.sess.opts.incremental.is_none() {
630 fn_def_id.map(|fn_def_id| tcx.deduced_param_attrs(fn_def_id)).unwrap_or_default()
631 } else {
632 &[]
633 };
634 if !deduced.is_empty() {
635 apply_deduced_attributes(cx, deduced, 0, &mut fn_abi.ret);
636 for (arg_idx, arg) in fn_abi.args.iter_mut().enumerate() {
637 apply_deduced_attributes(cx, deduced, arg_idx + 1, arg);
638 }
639 }
640 } else {
641 fn_abi.adjust_for_foreign_abi(cx, abi);
642 }
643}
644
645fn apply_deduced_attributes<'tcx>(
650 cx: &LayoutCx<'tcx>,
651 deduced: &[DeducedParamAttrs],
652 idx: usize,
653 arg: &mut ArgAbi<'tcx, Ty<'tcx>>,
654) {
655 let PassMode::Indirect { ref mut attrs, .. } = arg.mode else {
657 return;
658 };
659 let Some(deduced) = deduced.get(idx) else {
661 return;
662 };
663 if deduced.read_only(cx.tcx(), cx.typing_env, arg.layout.ty) {
664 debug!("added deduced ReadOnly attribute");
665 attrs.regular.insert(ArgAttribute::ReadOnly);
666 }
667 if deduced.captures_none(cx.tcx(), cx.typing_env, arg.layout.ty) {
668 debug!("added deduced CapturesNone attribute");
669 attrs.regular.insert(ArgAttribute::CapturesNone);
670 }
671}
672
673#[tracing::instrument(level = "debug", skip(cx))]
674fn make_thin_self_ptr<'tcx>(
675 cx: &(impl HasTyCtxt<'tcx> + HasTypingEnv<'tcx>),
676 layout: TyAndLayout<'tcx>,
677) -> TyAndLayout<'tcx> {
678 let tcx = cx.tcx();
679 let wide_pointer_ty = if layout.is_unsized() {
680 Ty::new_mut_ptr(tcx, layout.ty)
683 } else {
684 match layout.backend_repr {
685 BackendRepr::ScalarPair(..) | BackendRepr::Scalar(..) => (),
686 _ => bug!("receiver type has unsupported layout: {:?}", layout),
687 }
688
689 let mut wide_pointer_layout = layout;
695 while !wide_pointer_layout.ty.is_raw_ptr() && !wide_pointer_layout.ty.is_ref() {
696 wide_pointer_layout = wide_pointer_layout
697 .non_1zst_field(cx)
698 .expect("not exactly one non-1-ZST field in a `DispatchFromDyn` type")
699 .1
700 }
701
702 wide_pointer_layout.ty
703 };
704
705 let unit_ptr_ty = Ty::new_mut_ptr(tcx, tcx.types.unit);
709
710 TyAndLayout {
711 ty: wide_pointer_ty,
712
713 ..tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(unit_ptr_ty)).unwrap()
716 }
717}