1//! Manages calling a concrete function (with known MIR body) with argument passing,
2//! and returning the return value to the caller.
34use std::borrow::Cow;
56use either::{Left, Right};
7use rustc_abi::{selfas abi, ExternAbi, FieldIdx, Integer, VariantIdx};
8use rustc_data_structures::assert_matches;
9use rustc_errors::msg;
10use rustc_hir::def_id::DefId;
11use rustc_middle::ty::layout::{IntegerExt, TyAndLayout};
12use rustc_middle::ty::{self, AdtDef, Instance, Ty, VariantDef};
13use rustc_middle::{bug, mir, span_bug};
14use rustc_span::sym;
15use rustc_target::callconv::{ArgAbi, FnAbi, PassMode};
16use tracing::field::Empty;
17use tracing::{info, instrument, trace};
1819use super::{
20CtfeProvenance, FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy,
21Projectable, Provenance, ReturnAction, ReturnContinuation, Scalar, StackPopInfo, interp_ok,
22throw_ub, throw_ub_custom, throw_unsup_format,
23};
24use crate::enter_trace_span;
25use crate::interpret::EnteredTraceSpan;
2627/// An argument passed to a function.
28#[derive(#[automatically_derived]
impl<'tcx, Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
FnArg<'tcx, Prov> {
#[inline]
fn clone(&self) -> FnArg<'tcx, Prov> {
match self {
FnArg::Copy(__self_0) =>
FnArg::Copy(::core::clone::Clone::clone(__self_0)),
FnArg::InPlace(__self_0) =>
FnArg::InPlace(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl<'tcx, Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for
FnArg<'tcx, Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
FnArg::Copy(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Copy",
&__self_0),
FnArg::InPlace(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"InPlace", &__self_0),
}
}
}Debug)]
29pub enum FnArg<'tcx, Prov: Provenance = CtfeProvenance> {
30/// Pass a copy of the given operand.
31Copy(OpTy<'tcx, Prov>),
32/// Allow for the argument to be passed in-place: destroy the value originally stored at that
33 /// place and make the place inaccessible for the duration of the function call. This *must* be
34 /// an in-memory place so that we can do the proper alias checks.
35InPlace(MPlaceTy<'tcx, Prov>),
36}
3738impl<'tcx, Prov: Provenance> FnArg<'tcx, Prov> {
39pub fn layout(&self) -> &TyAndLayout<'tcx> {
40match self {
41 FnArg::Copy(op) => &op.layout,
42 FnArg::InPlace(mplace) => &mplace.layout,
43 }
44 }
45}
4647impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
48/// Make a copy of the given fn_arg. Any `InPlace` are degenerated to copies, no protection of the
49 /// original memory occurs.
50pub fn copy_fn_arg(&self, arg: &FnArg<'tcx, M::Provenance>) -> OpTy<'tcx, M::Provenance> {
51match arg {
52 FnArg::Copy(op) => op.clone(),
53 FnArg::InPlace(mplace) => mplace.clone().into(),
54 }
55 }
5657/// Make a copy of the given fn_args. Any `InPlace` are degenerated to copies, no protection of the
58 /// original memory occurs.
59pub fn copy_fn_args(
60&self,
61 args: &[FnArg<'tcx, M::Provenance>],
62 ) -> Vec<OpTy<'tcx, M::Provenance>> {
63args.iter().map(|fn_arg| self.copy_fn_arg(fn_arg)).collect()
64 }
6566/// Helper function for argument untupling.
67pub(super) fn fn_arg_field(
68&self,
69 arg: &FnArg<'tcx, M::Provenance>,
70 field: FieldIdx,
71 ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
72interp_ok(match arg {
73 FnArg::Copy(op) => FnArg::Copy(self.project_field(op, field)?),
74 FnArg::InPlace(mplace) => FnArg::InPlace(self.project_field(mplace, field)?),
75 })
76 }
7778/// Find the wrapped inner type of a transparent wrapper.
79 /// Must not be called on 1-ZST (as they don't have a uniquely defined "wrapped field").
80 ///
81 /// We work with `TyAndLayout` here since that makes it much easier to iterate over all fields.
82fn unfold_transparent(
83&self,
84 layout: TyAndLayout<'tcx>,
85 may_unfold: impl Fn(AdtDef<'tcx>) -> bool,
86 ) -> TyAndLayout<'tcx> {
87match layout.ty.kind() {
88 ty::Adt(adt_def, _) if adt_def.repr().transparent() && may_unfold(*adt_def) => {
89if !!adt_def.is_enum() {
::core::panicking::panic("assertion failed: !adt_def.is_enum()")
};assert!(!adt_def.is_enum());
90// Find the non-1-ZST field, and recurse.
91let (_, field) = layout.non_1zst_field(self).unwrap();
92self.unfold_transparent(field, may_unfold)
93 }
94 ty::Pat(base, _) => self.layout_of(*base).expect(
95"if the layout of a pattern type could be computed, so can the layout of its base",
96 ),
97// Not a transparent type, no further unfolding.
98_ => layout,
99 }
100 }
101102/// Unwrap types that are guaranteed a null-pointer-optimization
103fn unfold_npo(&self, layout: TyAndLayout<'tcx>) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
104// Check if this is an option-like type wrapping some type.
105let ty::Adt(def, args) = layout.ty.kind() else {
106// Not an ADT, so definitely no NPO.
107return interp_ok(layout);
108 };
109if def.variants().len() != 2 {
110// Not a 2-variant enum, so no NPO.
111return interp_ok(layout);
112 }
113if !def.is_enum() {
::core::panicking::panic("assertion failed: def.is_enum()")
};assert!(def.is_enum());
114115let all_fields_1zst = |variant: &VariantDef| -> InterpResult<'tcx, _> {
116for field in &variant.fields {
117let ty = field.ty(*self.tcx, args);
118let layout = self.layout_of(ty)?;
119if !layout.is_1zst() {
120return interp_ok(false);
121 }
122 }
123interp_ok(true)
124 };
125126// If one variant consists entirely of 1-ZST, then the other variant
127 // is the only "relevant" one for this check.
128let var0 = VariantIdx::from_u32(0);
129let var1 = VariantIdx::from_u32(1);
130let relevant_variant = if all_fields_1zst(def.variant(var0))? {
131def.variant(var1)
132 } else if all_fields_1zst(def.variant(var1))? {
133def.variant(var0)
134 } else {
135// No variant is all-1-ZST, so no NPO.
136return interp_ok(layout);
137 };
138// The "relevant" variant must have exactly one field, and its type is the "inner" type.
139if relevant_variant.fields.len() != 1 {
140return interp_ok(layout);
141 }
142let inner = relevant_variant.fields[FieldIdx::from_u32(0)].ty(*self.tcx, args);
143let inner = self.layout_of(inner)?;
144145// Check if the inner type is one of the NPO-guaranteed ones.
146 // For that we first unpeel transparent *structs* (but not unions).
147let is_npo = |def: AdtDef<'tcx>| {
148self.tcx.has_attr(def.did(), sym::rustc_nonnull_optimization_guaranteed)
149 };
150let inner = self.unfold_transparent(inner, /* may_unfold */ |def| {
151// Stop at NPO types so that we don't miss that attribute in the check below!
152def.is_struct() && !is_npo(def)
153 });
154interp_ok(match inner.ty.kind() {
155 ty::Ref(..) | ty::FnPtr(..) => {
156// Option<&T> behaves like &T, and same for fn()
157inner158 }
159 ty::Adt(def, _) if is_npo(*def) => {
160// Once we found a `nonnull_optimization_guaranteed` type, further strip off
161 // newtype structs from it to find the underlying ABI type.
162self.unfold_transparent(inner, /* may_unfold */ |def| def.is_struct())
163 }
164_ => {
165// Everything else we do not unfold.
166layout167 }
168 })
169 }
170171/// Check if these two layouts look like they are fn-ABI-compatible.
172 /// (We also compare the `PassMode`, so this doesn't have to check everything. But it turns out
173 /// that only checking the `PassMode` is insufficient.)
174fn layout_compat(
175&self,
176 caller: TyAndLayout<'tcx>,
177 callee: TyAndLayout<'tcx>,
178 ) -> InterpResult<'tcx, bool> {
179// Fast path: equal types are definitely compatible.
180if caller.ty == callee.ty {
181return interp_ok(true);
182 }
183// 1-ZST are compatible with all 1-ZST (and with nothing else).
184if caller.is_1zst() || callee.is_1zst() {
185return interp_ok(caller.is_1zst() && callee.is_1zst());
186 }
187// Unfold newtypes and NPO optimizations.
188let unfold = |layout: TyAndLayout<'tcx>| {
189self.unfold_npo(self.unfold_transparent(layout, /* may_unfold */ |_def| true))
190 };
191let caller = unfold(caller)?;
192let callee = unfold(callee)?;
193// Now see if these inner types are compatible.
194195 // Compatible pointer types. For thin pointers, we have to accept even non-`repr(transparent)`
196 // things as compatible due to `DispatchFromDyn`. For instance, `Rc<i32>` and `*mut i32`
197 // must be compatible. So we just accept everything with Pointer ABI as compatible,
198 // even if this will accept some code that is not stably guaranteed to work.
199 // This also handles function pointers.
200let thin_pointer = |layout: TyAndLayout<'tcx>| match layout.backend_repr {
201 abi::BackendRepr::Scalar(s) => match s.primitive() {
202 abi::Primitive::Pointer(addr_space) => Some(addr_space),
203_ => None,
204 },
205_ => None,
206 };
207if let (Some(caller), Some(callee)) = (thin_pointer(caller), thin_pointer(callee)) {
208return interp_ok(caller == callee);
209 }
210// For wide pointers we have to get the pointee type.
211let pointee_ty = |ty: Ty<'tcx>| -> InterpResult<'tcx, Option<Ty<'tcx>>> {
212// We cannot use `builtin_deref` here since we need to reject `Box<T, MyAlloc>`.
213interp_ok(Some(match ty.kind() {
214 ty::Ref(_, ty, _) => *ty,
215 ty::RawPtr(ty, _) => *ty,
216// We only accept `Box` with the default allocator.
217_ if ty.is_box_global(*self.tcx) => ty.expect_boxed_ty(),
218_ => return interp_ok(None),
219 }))
220 };
221if let (Some(caller), Some(callee)) = (pointee_ty(caller.ty)?, pointee_ty(callee.ty)?) {
222// This is okay if they have the same metadata type.
223let meta_ty = |ty: Ty<'tcx>| {
224// Even if `ty` is normalized, the search for the unsized tail will project
225 // to fields, which can yield non-normalized types. So we need to provide a
226 // normalization function.
227let normalize = |ty| self.tcx.normalize_erasing_regions(self.typing_env, ty);
228ty.ptr_metadata_ty(*self.tcx, normalize)
229 };
230return interp_ok(meta_ty(caller) == meta_ty(callee));
231 }
232233// Compatible integer types (in particular, usize vs ptr-sized-u32/u64).
234 // `char` counts as `u32.`
235let int_ty = |ty: Ty<'tcx>| {
236Some(match ty.kind() {
237 ty::Int(ity) => (Integer::from_int_ty(&self.tcx, *ity), /* signed */ true),
238 ty::Uint(uty) => (Integer::from_uint_ty(&self.tcx, *uty), /* signed */ false),
239 ty::Char => (Integer::I32, /* signed */ false),
240_ => return None,
241 })
242 };
243if let (Some(caller), Some(callee)) = (int_ty(caller.ty), int_ty(callee.ty)) {
244// This is okay if they are the same integer type.
245return interp_ok(caller == callee);
246 }
247248// Fall back to exact equality.
249interp_ok(caller == callee)
250 }
251252/// Returns a `bool` saying whether the two arguments are ABI-compatible.
253pub fn check_argument_compat(
254&self,
255 caller_abi: &ArgAbi<'tcx, Ty<'tcx>>,
256 callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
257 ) -> InterpResult<'tcx, bool> {
258// We do not want to accept things as ABI-compatible that just "happen to be" compatible on the current target,
259 // so we implement a type-based check that reflects the guaranteed rules for ABI compatibility.
260if self.layout_compat(caller_abi.layout, callee_abi.layout)? {
261// Ensure that our checks imply actual ABI compatibility for this concrete call.
262 // (This can fail e.g. if `#[rustc_nonnull_optimization_guaranteed]` is used incorrectly.)
263if !caller_abi.eq_abi(callee_abi) {
::core::panicking::panic("assertion failed: caller_abi.eq_abi(callee_abi)")
};assert!(caller_abi.eq_abi(callee_abi));
264interp_ok(true)
265 } else {
266{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:266",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(266u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("check_argument_compat: incompatible ABIs:\ncaller: {0:?}\ncallee: {1:?}",
caller_abi, callee_abi) as &dyn Value))])
});
} else { ; }
};trace!(
267"check_argument_compat: incompatible ABIs:\ncaller: {:?}\ncallee: {:?}",
268 caller_abi, callee_abi
269 );
270interp_ok(false)
271 }
272 }
273274/// Initialize a single callee argument, checking the types for compatibility.
275fn pass_argument<'x, 'y>(
276&mut self,
277 caller_args: &mut impl Iterator<
278 Item = (&'x FnArg<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>),
279 >,
280 callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
281 callee_arg_idx: usize,
282 callee_arg: &mir::Place<'tcx>,
283 callee_ty: Ty<'tcx>,
284 already_live: bool,
285 ) -> InterpResult<'tcx>
286where
287'tcx: 'x,
288'tcx: 'y,
289 {
290match (&callee_ty, &callee_abi.layout.ty) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(callee_ty, callee_abi.layout.ty);
291if callee_abi.mode == PassMode::Ignore {
292// This one is skipped. Still must be made live though!
293if !already_live {
294self.storage_live(callee_arg.as_local().unwrap())?;
295 }
296return interp_ok(());
297 }
298// Find next caller arg.
299let Some((caller_arg, caller_abi)) = caller_args.next() else {
300do yeet {
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: ||
rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed("calling a function with fewer arguments than it requires")),
add_args: Box::new(move |mut set_arg| {}),
}))
};throw_ub_custom!(msg!("calling a function with fewer arguments than it requires"));
301 };
302match (&caller_arg.layout().layout, &caller_abi.layout.layout) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(caller_arg.layout().layout, caller_abi.layout.layout);
303// Sadly we cannot assert that `caller_arg.layout().ty` and `caller_abi.layout.ty` are
304 // equal; in closures the types sometimes differ. We just hope that `caller_abi` is the
305 // right type to print to the user.
306307 // Check compatibility
308if !self.check_argument_compat(caller_abi, callee_abi)? {
309do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::AbiMismatchArgument {
arg_idx: callee_arg_idx,
caller_ty: caller_abi.layout.ty,
callee_ty: callee_abi.layout.ty,
});throw_ub!(AbiMismatchArgument {
310 arg_idx: callee_arg_idx,
311 caller_ty: caller_abi.layout.ty,
312 callee_ty: callee_abi.layout.ty
313 });
314 }
315// We work with a copy of the argument for now; if this is in-place argument passing, we
316 // will later protect the source it comes from. This means the callee cannot observe if we
317 // did in-place of by-copy argument passing, except for pointer equality tests.
318let caller_arg_copy = self.copy_fn_arg(caller_arg);
319if !already_live {
320let local = callee_arg.as_local().unwrap();
321let meta = caller_arg_copy.meta();
322// `check_argument_compat` ensures that if metadata is needed, both have the same type,
323 // so we know they will use the metadata the same way.
324if !(!meta.has_meta() || caller_arg_copy.layout.ty == callee_ty) {
::core::panicking::panic("assertion failed: !meta.has_meta() || caller_arg_copy.layout.ty == callee_ty")
};assert!(!meta.has_meta() || caller_arg_copy.layout.ty == callee_ty);
325326self.storage_live_dyn(local, meta)?;
327 }
328// Now we can finally actually evaluate the callee place.
329let callee_arg = self.eval_place(*callee_arg)?;
330// We allow some transmutes here.
331 // FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
332 // is true for all `copy_op`, but there are a lot of special cases for argument passing
333 // specifically.)
334self.copy_op_allow_transmute(&caller_arg_copy, &callee_arg)?;
335// If this was an in-place pass, protect the place it comes from for the duration of the call.
336if let FnArg::InPlace(mplace) = caller_arg {
337 M::protect_in_place_function_argument(self, mplace)?;
338 }
339interp_ok(())
340 }
341342/// The main entry point for creating a new stack frame: performs ABI checks and initializes
343 /// arguments.
344#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("init_stack_frame",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(344u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["instance", "body",
"caller_fn_abi", "args", "with_caller_location",
"destination", "cont"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&instance)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&body)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&caller_fn_abi)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&args)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&with_caller_location
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&destination)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&cont)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
let _trace =
<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(355u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["step", "instance",
"tracing_separate_thread"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"init_stack_frame")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&instance)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
});
let callee_fn_abi =
self.fn_abi_of_instance(instance, ty::List::empty())?;
if callee_fn_abi.c_variadic || caller_fn_abi.c_variadic {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::Unsupported(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("calling a c-variadic function is not supported"))
})));
}
if caller_fn_abi.conv != callee_fn_abi.conv {
do yeet {
let (callee_conv, caller_conv) =
(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}",
callee_fn_abi.conv))
}),
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}",
caller_fn_abi.conv))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: ||
rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed("calling a function with calling convention \"{$callee_conv}\" using calling convention \"{$caller_conv}\"")),
add_args: Box::new(move |mut set_arg|
{
set_arg("callee_conv".into(),
rustc_errors::IntoDiagArg::into_diag_arg(callee_conv,
&mut None));
set_arg("caller_conv".into(),
rustc_errors::IntoDiagArg::into_diag_arg(caller_conv,
&mut None));
}),
}))
}
}
M::check_fn_target_features(self, instance)?;
if !callee_fn_abi.can_unwind {
match &mut cont {
ReturnContinuation::Stop { .. } => {}
ReturnContinuation::Goto { unwind, .. } => {
*unwind = mir::UnwindAction::Unreachable;
}
}
}
let destination_mplace =
self.place_to_op(destination)?.as_mplace_or_imm().left();
self.push_stack_frame_raw(instance, body, destination, cont)?;
let res: InterpResult<'tcx> =
try {
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:400",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(400u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("caller ABI: {0:#?}, args: {1:#?}",
caller_fn_abi,
args.iter().map(|arg|
(arg.layout().ty,
match arg {
FnArg::Copy(op) =>
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("copy({0:?})", op))
}),
FnArg::InPlace(mplace) =>
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("in-place({0:?})",
mplace))
}),
})).collect::<Vec<_>>()) as &dyn Value))])
});
} else { ; }
};
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:413",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(413u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("spread_arg: {0:?}, locals: {1:#?}",
body.spread_arg,
body.args_iter().map(|local|
(local,
self.layout_of_local(self.frame(), local,
None).unwrap().ty)).collect::<Vec<_>>()) as &dyn Value))])
});
} else { ; }
};
match (&(args.len() +
if with_caller_location { 1 } else { 0 }),
&caller_fn_abi.args.len()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val,
::core::option::Option::Some(format_args!("mismatch between caller ABI and caller arguments")));
}
}
};
let mut caller_args =
args.iter().zip(caller_fn_abi.args.iter()).filter(|arg_and_abi|
!#[allow(non_exhaustive_omitted_patterns)] match arg_and_abi.1.mode
{
PassMode::Ignore => true,
_ => false,
});
let mut callee_args_abis =
callee_fn_abi.args.iter().enumerate();
for local in body.args_iter() {
let dest = mir::Place::from(local);
let ty =
self.layout_of_local(self.frame(), local, None)?.ty;
if Some(local) == body.spread_arg {
self.storage_live(local)?;
let ty::Tuple(fields) =
ty.kind() else {
::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("non-tuple type for `spread_arg`: {0}", ty))
};
for (i, field_ty) in fields.iter().enumerate() {
let dest =
dest.project_deeper(&[mir::ProjectionElem::Field(FieldIdx::from_usize(i),
field_ty)], *self.tcx);
let (idx, callee_abi) = callee_args_abis.next().unwrap();
self.pass_argument(&mut caller_args, callee_abi, idx, &dest,
field_ty, true)?;
}
} else {
let (idx, callee_abi) = callee_args_abis.next().unwrap();
self.pass_argument(&mut caller_args, callee_abi, idx, &dest,
ty, false)?;
}
}
if instance.def.requires_caller_location(*self.tcx) {
callee_args_abis.next().unwrap();
}
if !callee_args_abis.next().is_none() {
{
::core::panicking::panic_fmt(format_args!("mismatch between callee ABI and callee body arguments"));
}
};
if caller_args.next().is_some() {
do yeet {
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: ||
rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed("calling a function with more arguments than it expected")),
add_args: Box::new(move |mut set_arg| {}),
}))
};
}
if !self.check_argument_compat(&caller_fn_abi.ret,
&callee_fn_abi.ret)? {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::AbiMismatchReturn {
caller_ty: caller_fn_abi.ret.layout.ty,
callee_ty: callee_fn_abi.ret.layout.ty,
});
}
if let Some(mplace) = destination_mplace {
M::protect_in_place_function_argument(self, &mplace)?;
}
self.storage_live_for_always_live_locals()?;
};
res.inspect_err_kind(|_| { self.stack_mut().pop(); })
}
}
}#[instrument(skip(self), level = "trace")]345pub fn init_stack_frame(
346&mut self,
347 instance: Instance<'tcx>,
348 body: &'tcx mir::Body<'tcx>,
349 caller_fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
350 args: &[FnArg<'tcx, M::Provenance>],
351 with_caller_location: bool,
352 destination: &PlaceTy<'tcx, M::Provenance>,
353mut cont: ReturnContinuation,
354 ) -> InterpResult<'tcx> {
355let _trace = enter_trace_span!(M, step::init_stack_frame, %instance, tracing_separate_thread = Empty);
356357// Compute callee information.
358 // FIXME: for variadic support, do we have to somehow determine callee's extra_args?
359let callee_fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
360361if callee_fn_abi.c_variadic || caller_fn_abi.c_variadic {
362throw_unsup_format!("calling a c-variadic function is not supported");
363 }
364365if caller_fn_abi.conv != callee_fn_abi.conv {
366throw_ub_custom!(
367rustc_errors::msg!(
368"calling a function with calling convention \"{$callee_conv}\" using calling convention \"{$caller_conv}\""
369),
370 callee_conv = format!("{}", callee_fn_abi.conv),
371 caller_conv = format!("{}", caller_fn_abi.conv),
372 )
373 }
374375// Check that all target features required by the callee (i.e., from
376 // the attribute `#[target_feature(enable = ...)]`) are enabled at
377 // compile time.
378M::check_fn_target_features(self, instance)?;
379380if !callee_fn_abi.can_unwind {
381// The callee cannot unwind, so force the `Unreachable` unwind handling.
382match &mut cont {
383 ReturnContinuation::Stop { .. } => {}
384 ReturnContinuation::Goto { unwind, .. } => {
385*unwind = mir::UnwindAction::Unreachable;
386 }
387 }
388 }
389390// *Before* pushing the new frame, determine whether the return destination is in memory.
391 // Need to use `place_to_op` to be *sure* we get the mplace if there is one.
392let destination_mplace = self.place_to_op(destination)?.as_mplace_or_imm().left();
393394// Push the "raw" frame -- this leaves locals uninitialized.
395self.push_stack_frame_raw(instance, body, destination, cont)?;
396397// If an error is raised here, pop the frame again to get an accurate backtrace.
398 // To this end, we wrap it all in a `try` block.
399let res: InterpResult<'tcx> = try {
400trace!(
401"caller ABI: {:#?}, args: {:#?}",
402 caller_fn_abi,
403 args.iter()
404 .map(|arg| (
405 arg.layout().ty,
406match arg {
407 FnArg::Copy(op) => format!("copy({op:?})"),
408 FnArg::InPlace(mplace) => format!("in-place({mplace:?})"),
409 }
410 ))
411 .collect::<Vec<_>>()
412 );
413trace!(
414"spread_arg: {:?}, locals: {:#?}",
415 body.spread_arg,
416 body.args_iter()
417 .map(|local| (
418 local,
419self.layout_of_local(self.frame(), local, None).unwrap().ty,
420 ))
421 .collect::<Vec<_>>()
422 );
423424// In principle, we have two iterators: Where the arguments come from, and where
425 // they go to.
426427 // The "where they come from" part is easy, we expect the caller to do any special handling
428 // that might be required here (e.g. for untupling).
429 // If `with_caller_location` is set we pretend there is an extra argument (that
430 // we will not pass; our `caller_location` intrinsic implementation walks the stack instead).
431assert_eq!(
432 args.len() + if with_caller_location { 1 } else { 0 },
433 caller_fn_abi.args.len(),
434"mismatch between caller ABI and caller arguments",
435 );
436let mut caller_args = args
437 .iter()
438 .zip(caller_fn_abi.args.iter())
439 .filter(|arg_and_abi| !matches!(arg_and_abi.1.mode, PassMode::Ignore));
440441// Now we have to spread them out across the callee's locals,
442 // taking into account the `spread_arg`. If we could write
443 // this is a single iterator (that handles `spread_arg`), then
444 // `pass_argument` would be the loop body. It takes care to
445 // not advance `caller_iter` for ignored arguments.
446let mut callee_args_abis = callee_fn_abi.args.iter().enumerate();
447for local in body.args_iter() {
448// Construct the destination place for this argument. At this point all
449 // locals are still dead, so we cannot construct a `PlaceTy`.
450let dest = mir::Place::from(local);
451// `layout_of_local` does more than just the instantiation we need to get the
452 // type, but the result gets cached so this avoids calling the instantiation
453 // query *again* the next time this local is accessed.
454let ty = self.layout_of_local(self.frame(), local, None)?.ty;
455if Some(local) == body.spread_arg {
456// Make the local live once, then fill in the value field by field.
457self.storage_live(local)?;
458// Must be a tuple
459let ty::Tuple(fields) = ty.kind() else {
460span_bug!(self.cur_span(), "non-tuple type for `spread_arg`: {ty}")
461 };
462for (i, field_ty) in fields.iter().enumerate() {
463let dest = dest.project_deeper(
464&[mir::ProjectionElem::Field(FieldIdx::from_usize(i), field_ty)],
465*self.tcx,
466 );
467let (idx, callee_abi) = callee_args_abis.next().unwrap();
468self.pass_argument(
469&mut caller_args,
470 callee_abi,
471 idx,
472&dest,
473 field_ty,
474/* already_live */ true,
475 )?;
476 }
477 } else {
478// Normal argument. Cannot mark it as live yet, it might be unsized!
479let (idx, callee_abi) = callee_args_abis.next().unwrap();
480self.pass_argument(
481&mut caller_args,
482 callee_abi,
483 idx,
484&dest,
485 ty,
486/* already_live */ false,
487 )?;
488 }
489 }
490// If the callee needs a caller location, pretend we consume one more argument from the ABI.
491if instance.def.requires_caller_location(*self.tcx) {
492 callee_args_abis.next().unwrap();
493 }
494// Now we should have no more caller args or callee arg ABIs
495assert!(
496 callee_args_abis.next().is_none(),
497"mismatch between callee ABI and callee body arguments"
498);
499if caller_args.next().is_some() {
500throw_ub_custom!(msg!("calling a function with more arguments than it expected"));
501 }
502// Don't forget to check the return type!
503if !self.check_argument_compat(&caller_fn_abi.ret, &callee_fn_abi.ret)? {
504throw_ub!(AbiMismatchReturn {
505 caller_ty: caller_fn_abi.ret.layout.ty,
506 callee_ty: callee_fn_abi.ret.layout.ty
507 });
508 }
509510// Protect return place for in-place return value passing.
511 // We only need to protect anything if this is actually an in-memory place.
512if let Some(mplace) = destination_mplace {
513 M::protect_in_place_function_argument(self, &mplace)?;
514 }
515516// Don't forget to mark "initially live" locals as live.
517self.storage_live_for_always_live_locals()?;
518 };
519 res.inspect_err_kind(|_| {
520// Don't show the incomplete stack frame in the error stacktrace.
521self.stack_mut().pop();
522 })
523 }
524525/// Initiate a call to this function -- pushing the stack frame and initializing the arguments.
526 ///
527 /// `caller_fn_abi` is used to determine if all the arguments are passed the proper way.
528 /// However, we also need `caller_abi` to determine if we need to do untupling of arguments.
529 ///
530 /// `with_caller_location` indicates whether the caller passed a caller location. Miri
531 /// implements caller locations without argument passing, but to match `FnAbi` we need to know
532 /// when those arguments are present.
533pub(super) fn init_fn_call(
534&mut self,
535 fn_val: FnVal<'tcx, M::ExtraFnVal>,
536 (caller_abi, caller_fn_abi): (ExternAbi, &FnAbi<'tcx, Ty<'tcx>>),
537 args: &[FnArg<'tcx, M::Provenance>],
538 with_caller_location: bool,
539 destination: &PlaceTy<'tcx, M::Provenance>,
540 target: Option<mir::BasicBlock>,
541 unwind: mir::UnwindAction,
542 ) -> InterpResult<'tcx> {
543let _trace =
544<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(544u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["step",
"tracing_separate_thread", "fn_val"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"init_fn_call")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&fn_val) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, step::init_fn_call, tracing_separate_thread = Empty, ?fn_val)545 .or_if_tracing_disabled(|| {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:545",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(545u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_call: {0:#?}",
fn_val) as &dyn Value))])
});
} else { ; }
}trace!("init_fn_call: {:#?}", fn_val));
546547let instance = match fn_val {
548 FnVal::Instance(instance) => instance,
549 FnVal::Other(extra) => {
550return M::call_extra_fn(
551self,
552extra,
553caller_fn_abi,
554args,
555destination,
556target,
557unwind,
558 );
559 }
560 };
561562match instance.def {
563 ty::InstanceKind::Intrinsic(def_id) => {
564if !self.tcx.intrinsic(def_id).is_some() {
::core::panicking::panic("assertion failed: self.tcx.intrinsic(def_id).is_some()")
};assert!(self.tcx.intrinsic(def_id).is_some());
565// FIXME: Should `InPlace` arguments be reset to uninit?
566if let Some(fallback) = M::call_intrinsic(
567self,
568instance,
569&self.copy_fn_args(args),
570destination,
571target,
572unwind,
573 )? {
574if !!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden {
::core::panicking::panic("assertion failed: !self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden")
};assert!(!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden);
575match fallback.def {
ty::InstanceKind::Item(_) => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"ty::InstanceKind::Item(_)", ::core::option::Option::None);
}
};assert_matches!(fallback.def, ty::InstanceKind::Item(_));
576return self.init_fn_call(
577 FnVal::Instance(fallback),
578 (caller_abi, caller_fn_abi),
579args,
580with_caller_location,
581destination,
582target,
583unwind,
584 );
585 } else {
586interp_ok(())
587 }
588 }
589 ty::InstanceKind::VTableShim(..)
590 | ty::InstanceKind::ReifyShim(..)
591 | ty::InstanceKind::ClosureOnceShim { .. }
592 | ty::InstanceKind::ConstructCoroutineInClosureShim { .. }
593 | ty::InstanceKind::FnPtrShim(..)
594 | ty::InstanceKind::DropGlue(..)
595 | ty::InstanceKind::CloneShim(..)
596 | ty::InstanceKind::FnPtrAddrShim(..)
597 | ty::InstanceKind::ThreadLocalShim(..)
598 | ty::InstanceKind::AsyncDropGlueCtorShim(..)
599 | ty::InstanceKind::AsyncDropGlue(..)
600 | ty::InstanceKind::FutureDropPollShim(..)
601 | ty::InstanceKind::Item(_) => {
602// We need MIR for this fn.
603 // Note that this can be an intrinsic, if we are executing its fallback body.
604let Some((body, instance)) = M::find_mir_or_eval_fn(
605self,
606instance,
607caller_fn_abi,
608args,
609destination,
610target,
611unwind,
612 )?
613else {
614return interp_ok(());
615 };
616617// Special handling for the closure ABI: untuple the last argument.
618let args: Cow<'_, [FnArg<'tcx, M::Provenance>]> =
619if caller_abi == ExternAbi::RustCall && !args.is_empty() {
620// Untuple
621let (untuple_arg, args) = args.split_last().unwrap();
622{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:622",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(622u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_call: Will pass last argument by untupling")
as &dyn Value))])
});
} else { ; }
};trace!("init_fn_call: Will pass last argument by untupling");
623Cow::from(
624args.iter()
625 .map(|a| interp_ok(a.clone()))
626 .chain((0..untuple_arg.layout().fields.count()).map(|i| {
627self.fn_arg_field(untuple_arg, FieldIdx::from_usize(i))
628 }))
629 .collect::<InterpResult<'_, Vec<_>>>()?,
630 )
631 } else {
632// Plain arg passing
633Cow::from(args)
634 };
635636self.init_stack_frame(
637instance,
638body,
639caller_fn_abi,
640&args,
641with_caller_location,
642destination,
643 ReturnContinuation::Goto { ret: target, unwind },
644 )
645 }
646// `InstanceKind::Virtual` does not have callable MIR. Calls to `Virtual` instances must be
647 // codegen'd / interpreted as virtual calls through the vtable.
648ty::InstanceKind::Virtual(def_id, idx) => {
649let mut args = args.to_vec();
650// We have to implement all "dyn-compatible receivers". So we have to go search for a
651 // pointer or `dyn Trait` type, but it could be wrapped in newtypes. So recursively
652 // unwrap those newtypes until we are there.
653 // An `InPlace` does nothing here, we keep the original receiver intact. We can't
654 // really pass the argument in-place anyway, and we are constructing a new
655 // `Immediate` receiver.
656let mut receiver = self.copy_fn_arg(&args[0]);
657let receiver_place = loop {
658match receiver.layout.ty.kind() {
659 ty::Ref(..) | ty::RawPtr(..) => {
660// We do *not* use `deref_pointer` here: we don't want to conceptually
661 // create a place that must be dereferenceable, since the receiver might
662 // be a raw pointer and (for `*const dyn Trait`) we don't need to
663 // actually access memory to resolve this method.
664 // Also see <https://github.com/rust-lang/miri/issues/2786>.
665let val = self.read_immediate(&receiver)?;
666break self.ref_to_mplace(&val)?;
667 }
668 ty::Dynamic(..) => break receiver.assert_mem_place(), // no immediate unsized values
669_ => {
670// Not there yet, search for the only non-ZST field.
671 // (The rules for `DispatchFromDyn` ensure there's exactly one such field.)
672let (idx, _) = receiver.layout.non_1zst_field(self).expect(
673"not exactly one non-1-ZST field in a `DispatchFromDyn` type",
674 );
675receiver = self.project_field(&receiver, idx)?;
676 }
677 }
678 };
679680// Obtain the underlying trait we are working on, and the adjusted receiver argument.
681 // Doesn't have to be a `dyn Trait`, but the unsized tail must be `dyn Trait`.
682 // (For that reason we also cannot use `unpack_dyn_trait`.)
683let receiver_tail =
684self.tcx.struct_tail_for_codegen(receiver_place.layout.ty, self.typing_env);
685let ty::Dynamic(receiver_trait, _) = receiver_tail.kind() else {
686::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("dynamic call on non-`dyn` type {0}", receiver_tail))span_bug!(self.cur_span(), "dynamic call on non-`dyn` type {}", receiver_tail)687 };
688if !receiver_place.layout.is_unsized() {
::core::panicking::panic("assertion failed: receiver_place.layout.is_unsized()")
};assert!(receiver_place.layout.is_unsized());
689690// Get the required information from the vtable.
691let vptr = receiver_place.meta().unwrap_meta().to_pointer(self)?;
692let dyn_ty = self.get_ptr_vtable_ty(vptr, Some(receiver_trait))?;
693let adjusted_recv = receiver_place.ptr();
694695// Now determine the actual method to call. Usually we use the easy way of just
696 // looking up the method at index `idx`.
697let vtable_entries = self.vtable_entries(receiver_trait.principal(), dyn_ty);
698let Some(ty::VtblEntry::Method(fn_inst)) = vtable_entries.get(idx).copied() else {
699// FIXME(fee1-dead) these could be variants of the UB info enum instead of this
700do yeet {
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: ||
rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed("`dyn` call trying to call something that is not a method")),
add_args: Box::new(move |mut set_arg| {}),
}))
};throw_ub_custom!(msg!(
701"`dyn` call trying to call something that is not a method"
702));
703 };
704{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:704",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(704u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Virtual call dispatches to {0:#?}",
fn_inst) as &dyn Value))])
});
} else { ; }
};trace!("Virtual call dispatches to {fn_inst:#?}");
705// We can also do the lookup based on `def_id` and `dyn_ty`, and check that that
706 // produces the same result.
707self.assert_virtual_instance_matches_concrete(dyn_ty, def_id, instance, fn_inst);
708709// Adjust receiver argument. Layout can be any (thin) ptr.
710let receiver_ty = Ty::new_mut_ptr(self.tcx.tcx, dyn_ty);
711args[0] = FnArg::Copy(
712ImmTy::from_immediate(
713Scalar::from_maybe_pointer(adjusted_recv, self).into(),
714self.layout_of(receiver_ty)?,
715 )
716 .into(),
717 );
718{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:718",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(718u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Patched receiver operand to {0:#?}",
args[0]) as &dyn Value))])
});
} else { ; }
};trace!("Patched receiver operand to {:#?}", args[0]);
719// Need to also adjust the type in the ABI. Strangely, the layout there is actually
720 // already fine! Just the type is bogus. This is due to what `force_thin_self_ptr`
721 // does in `fn_abi_new_uncached`; supposedly, codegen relies on having the bogus
722 // type, so we just patch this up locally.
723let mut caller_fn_abi = caller_fn_abi.clone();
724caller_fn_abi.args[0].layout.ty = receiver_ty;
725726// recurse with concrete function
727self.init_fn_call(
728 FnVal::Instance(fn_inst),
729 (caller_abi, &caller_fn_abi),
730&args,
731with_caller_location,
732destination,
733target,
734unwind,
735 )
736 }
737 }
738 }
739740fn assert_virtual_instance_matches_concrete(
741&self,
742 dyn_ty: Ty<'tcx>,
743 def_id: DefId,
744 virtual_instance: ty::Instance<'tcx>,
745 concrete_instance: ty::Instance<'tcx>,
746 ) {
747let tcx = *self.tcx;
748749let trait_def_id = tcx.parent(def_id);
750let virtual_trait_ref = ty::TraitRef::from_assoc(tcx, trait_def_id, virtual_instance.args);
751let existential_trait_ref = ty::ExistentialTraitRef::erase_self_ty(tcx, virtual_trait_ref);
752let concrete_trait_ref = existential_trait_ref.with_self_ty(tcx, dyn_ty);
753754let concrete_method = {
755let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(755u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["resolve", "def_id"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"expect_resolve_for_vtable")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&def_id) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::expect_resolve_for_vtable, ?def_id);
756Instance::expect_resolve_for_vtable(
757tcx,
758self.typing_env,
759def_id,
760virtual_instance.args.rebase_onto(tcx, trait_def_id, concrete_trait_ref.args),
761self.cur_span(),
762 )
763 };
764match (&concrete_instance, &concrete_method) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(concrete_instance, concrete_method);
765 }
766767/// Initiate a tail call to this function -- popping the current stack frame, pushing the new
768 /// stack frame and initializing the arguments.
769pub(super) fn init_fn_tail_call(
770&mut self,
771 fn_val: FnVal<'tcx, M::ExtraFnVal>,
772 (caller_abi, caller_fn_abi): (ExternAbi, &FnAbi<'tcx, Ty<'tcx>>),
773 args: &[FnArg<'tcx, M::Provenance>],
774 with_caller_location: bool,
775 ) -> InterpResult<'tcx> {
776{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:776",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(776u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_tail_call: {0:#?}",
fn_val) as &dyn Value))])
});
} else { ; }
};trace!("init_fn_tail_call: {:#?}", fn_val);
777778// This is the "canonical" implementation of tails calls,
779 // a pop of the current stack frame, followed by a normal call
780 // which pushes a new stack frame, with the return address from
781 // the popped stack frame.
782 //
783 // Note that we are using `pop_stack_frame_raw` and not `return_from_current_stack_frame`,
784 // as the latter "executes" the goto to the return block, but we don't want to,
785 // only the tail called function should return to the current return block.
786let StackPopInfo { return_action, return_cont, return_place } =
787self.pop_stack_frame_raw(false, |_this, _return_place| {
788// This function's return value is just discarded, the tail-callee will fill in the return place instead.
789interp_ok(())
790 })?;
791792match (&return_action, &ReturnAction::Normal) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(return_action, ReturnAction::Normal);
793794// Take the "stack pop cleanup" info, and use that to initiate the next call.
795let ReturnContinuation::Goto { ret, unwind } = return_contelse {
796::rustc_middle::util::bug::bug_fmt(format_args!("can\'t tailcall as root"));bug!("can't tailcall as root");
797 };
798799// FIXME(explicit_tail_calls):
800 // we should check if both caller&callee can/n't unwind,
801 // see <https://github.com/rust-lang/rust/pull/113128#issuecomment-1614979803>
802803self.init_fn_call(
804fn_val,
805 (caller_abi, caller_fn_abi),
806args,
807with_caller_location,
808&return_place,
809ret,
810unwind,
811 )
812 }
813814pub(super) fn init_drop_in_place_call(
815&mut self,
816 place: &PlaceTy<'tcx, M::Provenance>,
817 instance: ty::Instance<'tcx>,
818 target: mir::BasicBlock,
819 unwind: mir::UnwindAction,
820 ) -> InterpResult<'tcx> {
821{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:821",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(821u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_drop_in_place_call: {0:?},\n instance={1:?}",
place, instance) as &dyn Value))])
});
} else { ; }
};trace!("init_drop_in_place_call: {:?},\n instance={:?}", place, instance);
822// We take the address of the object. This may well be unaligned, which is fine
823 // for us here. However, unaligned accesses will probably make the actual drop
824 // implementation fail -- a problem shared by rustc.
825let place = self.force_allocation(place)?;
826827// We behave a bit different from codegen here.
828 // Codegen creates an `InstanceKind::Virtual` with index 0 (the slot of the drop method) and
829 // then dispatches that to the normal call machinery. However, our call machinery currently
830 // only supports calling `VtblEntry::Method`; it would choke on a `MetadataDropInPlace`. So
831 // instead we do the virtual call stuff ourselves. It's easier here than in `eval_fn_call`
832 // since we can just get a place of the underlying type and use `mplace_to_ref`.
833let place = match place.layout.ty.kind() {
834 ty::Dynamic(data, _) => {
835// Dropping a trait object. Need to find actual drop fn.
836self.unpack_dyn_trait(&place, data)?
837}
838_ => {
839if true {
match (&instance,
&ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty))
{
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_eq!(
840 instance,
841 ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
842 );
843place844 }
845 };
846let instance = {
847let _trace =
848<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(848u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["resolve", "ty"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"resolve_drop_in_place")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&place.layout.ty)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::resolve_drop_in_place, ty = ?place.layout.ty);
849 ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
850 };
851let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
852853let arg = self.mplace_to_ref(&place)?;
854let ret = MPlaceTy::fake_alloc_zst(self.layout_of(self.tcx.types.unit)?);
855856self.init_fn_call(
857 FnVal::Instance(instance),
858 (ExternAbi::Rust, fn_abi),
859&[FnArg::Copy(arg.into())],
860false,
861&ret.into(),
862Some(target),
863unwind,
864 )
865 }
866867/// Pops the current frame from the stack, copies the return value to the caller, deallocates
868 /// the memory for allocated locals, and jumps to an appropriate place.
869 ///
870 /// If `unwinding` is `false`, then we are performing a normal return
871 /// from a function. In this case, we jump back into the frame of the caller,
872 /// and continue execution as normal.
873 ///
874 /// If `unwinding` is `true`, then we are in the middle of a panic,
875 /// and need to unwind this frame. In this case, we jump to the
876 /// `cleanup` block for the function, which is responsible for running
877 /// `Drop` impls for any locals that have been initialized at this point.
878 /// The cleanup block ends with a special `Resume` terminator, which will
879 /// cause us to continue unwinding.
880#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("return_from_current_stack_frame",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(880u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["unwinding"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&unwinding as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:885",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(885u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("popping stack frame ({0})",
if unwinding {
"during unwinding"
} else { "returning from function" }) as &dyn Value))])
});
} else { ; }
};
match (&unwinding,
&match self.frame().loc {
Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
Right(_) => true,
}) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
if unwinding && self.frame_idx() == 0 {
do yeet {
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: ||
rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed("unwinding past the topmost frame of the stack")),
add_args: Box::new(move |mut set_arg| {}),
}))
};
}
let return_op =
self.local_to_op(mir::RETURN_PLACE,
None).expect("return place should always be live");
let stack_pop_info =
self.pop_stack_frame_raw(unwinding,
|this, return_place|
{
this.copy_op_allow_transmute(&return_op, return_place)?;
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:909",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(909u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("return value: {0:?}",
this.dump_place(return_place)) as &dyn Value))])
});
} else { ; }
};
interp_ok(())
})?;
match stack_pop_info.return_action {
ReturnAction::Normal => {}
ReturnAction::NoJump => { return interp_ok(()); }
ReturnAction::NoCleanup => {
if !self.stack().is_empty() {
{
::core::panicking::panic_fmt(format_args!("only the topmost frame should ever be leaked"));
}
};
if !!unwinding {
{
::core::panicking::panic_fmt(format_args!("tried to skip cleanup during unwinding"));
}
};
return interp_ok(());
}
}
if unwinding {
match stack_pop_info.return_cont {
ReturnContinuation::Goto { unwind, .. } => {
self.unwind_to_block(unwind)
}
ReturnContinuation::Stop { .. } => {
{
::core::panicking::panic_fmt(format_args!("encountered ReturnContinuation::Stop when unwinding!"));
}
}
}
} else {
match stack_pop_info.return_cont {
ReturnContinuation::Goto { ret, .. } =>
self.return_to_block(ret),
ReturnContinuation::Stop { .. } => {
if !self.stack().is_empty() {
{
::core::panicking::panic_fmt(format_args!("only the bottommost frame can have ReturnContinuation::Stop"));
}
};
interp_ok(())
}
}
}
}
}
}#[instrument(skip(self), level = "trace")]881pub(super) fn return_from_current_stack_frame(
882&mut self,
883 unwinding: bool,
884 ) -> InterpResult<'tcx> {
885info!(
886"popping stack frame ({})",
887if unwinding { "during unwinding" } else { "returning from function" }
888 );
889890// Check `unwinding`.
891assert_eq!(
892 unwinding,
893match self.frame().loc {
894 Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
895 Right(_) => true,
896 }
897 );
898if unwinding && self.frame_idx() == 0 {
899throw_ub_custom!(msg!("unwinding past the topmost frame of the stack"));
900 }
901902// Get out the return value. Must happen *before* the frame is popped as we have to get the
903 // local's value out.
904let return_op =
905self.local_to_op(mir::RETURN_PLACE, None).expect("return place should always be live");
906// Do the actual pop + copy.
907let stack_pop_info = self.pop_stack_frame_raw(unwinding, |this, return_place| {
908 this.copy_op_allow_transmute(&return_op, return_place)?;
909trace!("return value: {:?}", this.dump_place(return_place));
910 interp_ok(())
911 })?;
912913match stack_pop_info.return_action {
914 ReturnAction::Normal => {}
915 ReturnAction::NoJump => {
916// The hook already did everything.
917return interp_ok(());
918 }
919 ReturnAction::NoCleanup => {
920// If we are not doing cleanup, also skip everything else.
921assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
922assert!(!unwinding, "tried to skip cleanup during unwinding");
923// Don't jump anywhere.
924return interp_ok(());
925 }
926 }
927928// Normal return, figure out where to jump.
929if unwinding {
930// Follow the unwind edge.
931match stack_pop_info.return_cont {
932 ReturnContinuation::Goto { unwind, .. } => {
933// This must be the very last thing that happens, since it can in fact push a new stack frame.
934self.unwind_to_block(unwind)
935 }
936 ReturnContinuation::Stop { .. } => {
937panic!("encountered ReturnContinuation::Stop when unwinding!")
938 }
939 }
940 } else {
941// Follow the normal return edge.
942match stack_pop_info.return_cont {
943 ReturnContinuation::Goto { ret, .. } => self.return_to_block(ret),
944 ReturnContinuation::Stop { .. } => {
945assert!(
946self.stack().is_empty(),
947"only the bottommost frame can have ReturnContinuation::Stop"
948);
949 interp_ok(())
950 }
951 }
952 }
953 }
954}