1//! Manages calling a concrete function (with known MIR body) with argument passing,
2//! and returning the return value to the caller.
3use std::borrow::Cow;
45use either::{Left, Right};
6use rustc_abi::{selfas abi, ExternAbi, FieldIdx, Integer, VariantIdx};
7use rustc_data_structures::assert_matches;
8use rustc_hir::def_id::DefId;
9use rustc_middle::ty::layout::{IntegerExt, TyAndLayout};
10use rustc_middle::ty::{self, AdtDef, Instance, Ty, VariantDef};
11use rustc_middle::{bug, mir, span_bug};
12use rustc_span::sym;
13use rustc_target::callconv::{ArgAbi, FnAbi, PassMode};
14use tracing::field::Empty;
15use tracing::{info, instrument, trace};
1617use super::{
18CtfeProvenance, FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy,
19Projectable, Provenance, ReturnAction, ReturnContinuation, Scalar, StackPopInfo, interp_ok,
20throw_ub, throw_ub_custom, throw_unsup_format,
21};
22use crate::interpret::EnteredTraceSpan;
23use crate::{enter_trace_span, fluent_generatedas fluent};
2425/// An argument passed to a function.
26#[derive(#[automatically_derived]
impl<'tcx, Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
FnArg<'tcx, Prov> {
#[inline]
fn clone(&self) -> FnArg<'tcx, Prov> {
match self {
FnArg::Copy(__self_0) =>
FnArg::Copy(::core::clone::Clone::clone(__self_0)),
FnArg::InPlace(__self_0) =>
FnArg::InPlace(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl<'tcx, Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for
FnArg<'tcx, Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
FnArg::Copy(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Copy",
&__self_0),
FnArg::InPlace(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"InPlace", &__self_0),
}
}
}Debug)]
27pub enum FnArg<'tcx, Prov: Provenance = CtfeProvenance> {
28/// Pass a copy of the given operand.
29Copy(OpTy<'tcx, Prov>),
30/// Allow for the argument to be passed in-place: destroy the value originally stored at that
31 /// place and make the place inaccessible for the duration of the function call. This *must* be
32 /// an in-memory place so that we can do the proper alias checks.
33InPlace(MPlaceTy<'tcx, Prov>),
34}
3536impl<'tcx, Prov: Provenance> FnArg<'tcx, Prov> {
37pub fn layout(&self) -> &TyAndLayout<'tcx> {
38match self {
39 FnArg::Copy(op) => &op.layout,
40 FnArg::InPlace(mplace) => &mplace.layout,
41 }
42 }
43}
4445impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
46/// Make a copy of the given fn_arg. Any `InPlace` are degenerated to copies, no protection of the
47 /// original memory occurs.
48pub fn copy_fn_arg(&self, arg: &FnArg<'tcx, M::Provenance>) -> OpTy<'tcx, M::Provenance> {
49match arg {
50 FnArg::Copy(op) => op.clone(),
51 FnArg::InPlace(mplace) => mplace.clone().into(),
52 }
53 }
5455/// Make a copy of the given fn_args. Any `InPlace` are degenerated to copies, no protection of the
56 /// original memory occurs.
57pub fn copy_fn_args(
58&self,
59 args: &[FnArg<'tcx, M::Provenance>],
60 ) -> Vec<OpTy<'tcx, M::Provenance>> {
61args.iter().map(|fn_arg| self.copy_fn_arg(fn_arg)).collect()
62 }
6364/// Helper function for argument untupling.
65pub(super) fn fn_arg_field(
66&self,
67 arg: &FnArg<'tcx, M::Provenance>,
68 field: FieldIdx,
69 ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
70interp_ok(match arg {
71 FnArg::Copy(op) => FnArg::Copy(self.project_field(op, field)?),
72 FnArg::InPlace(mplace) => FnArg::InPlace(self.project_field(mplace, field)?),
73 })
74 }
7576/// Find the wrapped inner type of a transparent wrapper.
77 /// Must not be called on 1-ZST (as they don't have a uniquely defined "wrapped field").
78 ///
79 /// We work with `TyAndLayout` here since that makes it much easier to iterate over all fields.
80fn unfold_transparent(
81&self,
82 layout: TyAndLayout<'tcx>,
83 may_unfold: impl Fn(AdtDef<'tcx>) -> bool,
84 ) -> TyAndLayout<'tcx> {
85match layout.ty.kind() {
86 ty::Adt(adt_def, _) if adt_def.repr().transparent() && may_unfold(*adt_def) => {
87if !!adt_def.is_enum() {
::core::panicking::panic("assertion failed: !adt_def.is_enum()")
};assert!(!adt_def.is_enum());
88// Find the non-1-ZST field, and recurse.
89let (_, field) = layout.non_1zst_field(self).unwrap();
90self.unfold_transparent(field, may_unfold)
91 }
92// Not a transparent type, no further unfolding.
93_ => layout,
94 }
95 }
9697/// Unwrap types that are guaranteed a null-pointer-optimization
98fn unfold_npo(&self, layout: TyAndLayout<'tcx>) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
99// Check if this is an option-like type wrapping some type.
100let ty::Adt(def, args) = layout.ty.kind() else {
101// Not an ADT, so definitely no NPO.
102return interp_ok(layout);
103 };
104if def.variants().len() != 2 {
105// Not a 2-variant enum, so no NPO.
106return interp_ok(layout);
107 }
108if !def.is_enum() {
::core::panicking::panic("assertion failed: def.is_enum()")
};assert!(def.is_enum());
109110let all_fields_1zst = |variant: &VariantDef| -> InterpResult<'tcx, _> {
111for field in &variant.fields {
112let ty = field.ty(*self.tcx, args);
113let layout = self.layout_of(ty)?;
114if !layout.is_1zst() {
115return interp_ok(false);
116 }
117 }
118interp_ok(true)
119 };
120121// If one variant consists entirely of 1-ZST, then the other variant
122 // is the only "relevant" one for this check.
123let var0 = VariantIdx::from_u32(0);
124let var1 = VariantIdx::from_u32(1);
125let relevant_variant = if all_fields_1zst(def.variant(var0))? {
126def.variant(var1)
127 } else if all_fields_1zst(def.variant(var1))? {
128def.variant(var0)
129 } else {
130// No variant is all-1-ZST, so no NPO.
131return interp_ok(layout);
132 };
133// The "relevant" variant must have exactly one field, and its type is the "inner" type.
134if relevant_variant.fields.len() != 1 {
135return interp_ok(layout);
136 }
137let inner = relevant_variant.fields[FieldIdx::from_u32(0)].ty(*self.tcx, args);
138let inner = self.layout_of(inner)?;
139140// Check if the inner type is one of the NPO-guaranteed ones.
141 // For that we first unpeel transparent *structs* (but not unions).
142let is_npo = |def: AdtDef<'tcx>| {
143self.tcx.has_attr(def.did(), sym::rustc_nonnull_optimization_guaranteed)
144 };
145let inner = self.unfold_transparent(inner, /* may_unfold */ |def| {
146// Stop at NPO types so that we don't miss that attribute in the check below!
147def.is_struct() && !is_npo(def)
148 });
149interp_ok(match inner.ty.kind() {
150 ty::Ref(..) | ty::FnPtr(..) => {
151// Option<&T> behaves like &T, and same for fn()
152inner153 }
154 ty::Adt(def, _) if is_npo(*def) => {
155// Once we found a `nonnull_optimization_guaranteed` type, further strip off
156 // newtype structs from it to find the underlying ABI type.
157self.unfold_transparent(inner, /* may_unfold */ |def| def.is_struct())
158 }
159_ => {
160// Everything else we do not unfold.
161layout162 }
163 })
164 }
165166/// Check if these two layouts look like they are fn-ABI-compatible.
167 /// (We also compare the `PassMode`, so this doesn't have to check everything. But it turns out
168 /// that only checking the `PassMode` is insufficient.)
169fn layout_compat(
170&self,
171 caller: TyAndLayout<'tcx>,
172 callee: TyAndLayout<'tcx>,
173 ) -> InterpResult<'tcx, bool> {
174// Fast path: equal types are definitely compatible.
175if caller.ty == callee.ty {
176return interp_ok(true);
177 }
178// 1-ZST are compatible with all 1-ZST (and with nothing else).
179if caller.is_1zst() || callee.is_1zst() {
180return interp_ok(caller.is_1zst() && callee.is_1zst());
181 }
182// Unfold newtypes and NPO optimizations.
183let unfold = |layout: TyAndLayout<'tcx>| {
184self.unfold_npo(self.unfold_transparent(layout, /* may_unfold */ |_def| true))
185 };
186let caller = unfold(caller)?;
187let callee = unfold(callee)?;
188// Now see if these inner types are compatible.
189190 // Compatible pointer types. For thin pointers, we have to accept even non-`repr(transparent)`
191 // things as compatible due to `DispatchFromDyn`. For instance, `Rc<i32>` and `*mut i32`
192 // must be compatible. So we just accept everything with Pointer ABI as compatible,
193 // even if this will accept some code that is not stably guaranteed to work.
194 // This also handles function pointers.
195let thin_pointer = |layout: TyAndLayout<'tcx>| match layout.backend_repr {
196 abi::BackendRepr::Scalar(s) => match s.primitive() {
197 abi::Primitive::Pointer(addr_space) => Some(addr_space),
198_ => None,
199 },
200_ => None,
201 };
202if let (Some(caller), Some(callee)) = (thin_pointer(caller), thin_pointer(callee)) {
203return interp_ok(caller == callee);
204 }
205// For wide pointers we have to get the pointee type.
206let pointee_ty = |ty: Ty<'tcx>| -> InterpResult<'tcx, Option<Ty<'tcx>>> {
207// We cannot use `builtin_deref` here since we need to reject `Box<T, MyAlloc>`.
208interp_ok(Some(match ty.kind() {
209 ty::Ref(_, ty, _) => *ty,
210 ty::RawPtr(ty, _) => *ty,
211// We only accept `Box` with the default allocator.
212_ if ty.is_box_global(*self.tcx) => ty.expect_boxed_ty(),
213_ => return interp_ok(None),
214 }))
215 };
216if let (Some(caller), Some(callee)) = (pointee_ty(caller.ty)?, pointee_ty(callee.ty)?) {
217// This is okay if they have the same metadata type.
218let meta_ty = |ty: Ty<'tcx>| {
219// Even if `ty` is normalized, the search for the unsized tail will project
220 // to fields, which can yield non-normalized types. So we need to provide a
221 // normalization function.
222let normalize = |ty| self.tcx.normalize_erasing_regions(self.typing_env, ty);
223ty.ptr_metadata_ty(*self.tcx, normalize)
224 };
225return interp_ok(meta_ty(caller) == meta_ty(callee));
226 }
227228// Compatible integer types (in particular, usize vs ptr-sized-u32/u64).
229 // `char` counts as `u32.`
230let int_ty = |ty: Ty<'tcx>| {
231Some(match ty.kind() {
232 ty::Int(ity) => (Integer::from_int_ty(&self.tcx, *ity), /* signed */ true),
233 ty::Uint(uty) => (Integer::from_uint_ty(&self.tcx, *uty), /* signed */ false),
234 ty::Char => (Integer::I32, /* signed */ false),
235_ => return None,
236 })
237 };
238if let (Some(caller), Some(callee)) = (int_ty(caller.ty), int_ty(callee.ty)) {
239// This is okay if they are the same integer type.
240return interp_ok(caller == callee);
241 }
242243// Fall back to exact equality.
244interp_ok(caller == callee)
245 }
246247/// Returns a `bool` saying whether the two arguments are ABI-compatible.
248pub fn check_argument_compat(
249&self,
250 caller_abi: &ArgAbi<'tcx, Ty<'tcx>>,
251 callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
252 ) -> InterpResult<'tcx, bool> {
253// We do not want to accept things as ABI-compatible that just "happen to be" compatible on the current target,
254 // so we implement a type-based check that reflects the guaranteed rules for ABI compatibility.
255if self.layout_compat(caller_abi.layout, callee_abi.layout)? {
256// Ensure that our checks imply actual ABI compatibility for this concrete call.
257 // (This can fail e.g. if `#[rustc_nonnull_optimization_guaranteed]` is used incorrectly.)
258if !caller_abi.eq_abi(callee_abi) {
::core::panicking::panic("assertion failed: caller_abi.eq_abi(callee_abi)")
};assert!(caller_abi.eq_abi(callee_abi));
259interp_ok(true)
260 } else {
261{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:261",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(261u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("check_argument_compat: incompatible ABIs:\ncaller: {0:?}\ncallee: {1:?}",
caller_abi, callee_abi) as &dyn Value))])
});
} else { ; }
};trace!(
262"check_argument_compat: incompatible ABIs:\ncaller: {:?}\ncallee: {:?}",
263 caller_abi, callee_abi
264 );
265interp_ok(false)
266 }
267 }
268269/// Initialize a single callee argument, checking the types for compatibility.
270fn pass_argument<'x, 'y>(
271&mut self,
272 caller_args: &mut impl Iterator<
273 Item = (&'x FnArg<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>),
274 >,
275 callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
276 callee_arg_idx: usize,
277 callee_arg: &mir::Place<'tcx>,
278 callee_ty: Ty<'tcx>,
279 already_live: bool,
280 ) -> InterpResult<'tcx>
281where
282'tcx: 'x,
283'tcx: 'y,
284 {
285match (&callee_ty, &callee_abi.layout.ty) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(callee_ty, callee_abi.layout.ty);
286if callee_abi.mode == PassMode::Ignore {
287// This one is skipped. Still must be made live though!
288if !already_live {
289self.storage_live(callee_arg.as_local().unwrap())?;
290 }
291return interp_ok(());
292 }
293// Find next caller arg.
294let Some((caller_arg, caller_abi)) = caller_args.next() else {
295do yeet {
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: || fluent::const_eval_not_enough_caller_args,
add_args: Box::new(move |mut set_arg| {}),
}))
};throw_ub_custom!(fluent::const_eval_not_enough_caller_args);
296 };
297match (&caller_arg.layout().layout, &caller_abi.layout.layout) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(caller_arg.layout().layout, caller_abi.layout.layout);
298// Sadly we cannot assert that `caller_arg.layout().ty` and `caller_abi.layout.ty` are
299 // equal; in closures the types sometimes differ. We just hope that `caller_abi` is the
300 // right type to print to the user.
301302 // Check compatibility
303if !self.check_argument_compat(caller_abi, callee_abi)? {
304do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::AbiMismatchArgument {
arg_idx: callee_arg_idx,
caller_ty: caller_abi.layout.ty,
callee_ty: callee_abi.layout.ty,
});throw_ub!(AbiMismatchArgument {
305 arg_idx: callee_arg_idx,
306 caller_ty: caller_abi.layout.ty,
307 callee_ty: callee_abi.layout.ty
308 });
309 }
310// We work with a copy of the argument for now; if this is in-place argument passing, we
311 // will later protect the source it comes from. This means the callee cannot observe if we
312 // did in-place of by-copy argument passing, except for pointer equality tests.
313let caller_arg_copy = self.copy_fn_arg(caller_arg);
314if !already_live {
315let local = callee_arg.as_local().unwrap();
316let meta = caller_arg_copy.meta();
317// `check_argument_compat` ensures that if metadata is needed, both have the same type,
318 // so we know they will use the metadata the same way.
319if !(!meta.has_meta() || caller_arg_copy.layout.ty == callee_ty) {
::core::panicking::panic("assertion failed: !meta.has_meta() || caller_arg_copy.layout.ty == callee_ty")
};assert!(!meta.has_meta() || caller_arg_copy.layout.ty == callee_ty);
320321self.storage_live_dyn(local, meta)?;
322 }
323// Now we can finally actually evaluate the callee place.
324let callee_arg = self.eval_place(*callee_arg)?;
325// We allow some transmutes here.
326 // FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
327 // is true for all `copy_op`, but there are a lot of special cases for argument passing
328 // specifically.)
329self.copy_op_allow_transmute(&caller_arg_copy, &callee_arg)?;
330// If this was an in-place pass, protect the place it comes from for the duration of the call.
331if let FnArg::InPlace(mplace) = caller_arg {
332 M::protect_in_place_function_argument(self, mplace)?;
333 }
334interp_ok(())
335 }
336337/// The main entry point for creating a new stack frame: performs ABI checks and initializes
338 /// arguments.
339#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("init_stack_frame",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(339u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["instance", "body",
"caller_fn_abi", "args", "with_caller_location",
"destination", "cont"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&instance)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&body)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&caller_fn_abi)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&args)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&with_caller_location
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&destination)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&cont)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
let _trace =
<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(350u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["step", "instance",
"tracing_separate_thread"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"init_stack_frame")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&instance)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
});
let callee_fn_abi =
self.fn_abi_of_instance(instance, ty::List::empty())?;
if callee_fn_abi.c_variadic || caller_fn_abi.c_variadic {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::Unsupported(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("calling a c-variadic function is not supported"))
})));
}
if caller_fn_abi.conv != callee_fn_abi.conv {
do yeet {
let (callee_conv, caller_conv) =
(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}",
callee_fn_abi.conv))
}),
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}",
caller_fn_abi.conv))
}));
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: || fluent::const_eval_incompatible_calling_conventions,
add_args: Box::new(move |mut set_arg|
{
set_arg("callee_conv".into(),
rustc_errors::IntoDiagArg::into_diag_arg(callee_conv,
&mut None));
set_arg("caller_conv".into(),
rustc_errors::IntoDiagArg::into_diag_arg(caller_conv,
&mut None));
}),
}))
}
}
M::check_fn_target_features(self, instance)?;
if !callee_fn_abi.can_unwind {
match &mut cont {
ReturnContinuation::Stop { .. } => {}
ReturnContinuation::Goto { unwind, .. } => {
*unwind = mir::UnwindAction::Unreachable;
}
}
}
let destination_mplace =
self.place_to_op(destination)?.as_mplace_or_imm().left();
self.push_stack_frame_raw(instance, body, destination, cont)?;
let res: InterpResult<'tcx> =
try {
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:393",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(393u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("caller ABI: {0:#?}, args: {1:#?}",
caller_fn_abi,
args.iter().map(|arg|
(arg.layout().ty,
match arg {
FnArg::Copy(op) =>
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("copy({0:?})", op))
}),
FnArg::InPlace(mplace) =>
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("in-place({0:?})",
mplace))
}),
})).collect::<Vec<_>>()) as &dyn Value))])
});
} else { ; }
};
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:406",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(406u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("spread_arg: {0:?}, locals: {1:#?}",
body.spread_arg,
body.args_iter().map(|local|
(local,
self.layout_of_local(self.frame(), local,
None).unwrap().ty)).collect::<Vec<_>>()) as &dyn Value))])
});
} else { ; }
};
match (&(args.len() +
if with_caller_location { 1 } else { 0 }),
&caller_fn_abi.args.len()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val,
::core::option::Option::Some(format_args!("mismatch between caller ABI and caller arguments")));
}
}
};
let mut caller_args =
args.iter().zip(caller_fn_abi.args.iter()).filter(|arg_and_abi|
!#[allow(non_exhaustive_omitted_patterns)] match arg_and_abi.1.mode
{
PassMode::Ignore => true,
_ => false,
});
let mut callee_args_abis =
callee_fn_abi.args.iter().enumerate();
for local in body.args_iter() {
let dest = mir::Place::from(local);
let ty =
self.layout_of_local(self.frame(), local, None)?.ty;
if Some(local) == body.spread_arg {
self.storage_live(local)?;
let ty::Tuple(fields) =
ty.kind() else {
::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("non-tuple type for `spread_arg`: {0}", ty))
};
for (i, field_ty) in fields.iter().enumerate() {
let dest =
dest.project_deeper(&[mir::ProjectionElem::Field(FieldIdx::from_usize(i),
field_ty)], *self.tcx);
let (idx, callee_abi) = callee_args_abis.next().unwrap();
self.pass_argument(&mut caller_args, callee_abi, idx, &dest,
field_ty, true)?;
}
} else {
let (idx, callee_abi) = callee_args_abis.next().unwrap();
self.pass_argument(&mut caller_args, callee_abi, idx, &dest,
ty, false)?;
}
}
if instance.def.requires_caller_location(*self.tcx) {
callee_args_abis.next().unwrap();
}
if !callee_args_abis.next().is_none() {
{
::core::panicking::panic_fmt(format_args!("mismatch between callee ABI and callee body arguments"));
}
};
if caller_args.next().is_some() {
do yeet {
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: || fluent::const_eval_too_many_caller_args,
add_args: Box::new(move |mut set_arg| {}),
}))
};
}
if !self.check_argument_compat(&caller_fn_abi.ret,
&callee_fn_abi.ret)? {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::AbiMismatchReturn {
caller_ty: caller_fn_abi.ret.layout.ty,
callee_ty: callee_fn_abi.ret.layout.ty,
});
}
if let Some(mplace) = destination_mplace {
M::protect_in_place_function_argument(self, &mplace)?;
}
self.storage_live_for_always_live_locals()?;
};
res.inspect_err_kind(|_| { self.stack_mut().pop(); })
}
}
}#[instrument(skip(self), level = "trace")]340pub fn init_stack_frame(
341&mut self,
342 instance: Instance<'tcx>,
343 body: &'tcx mir::Body<'tcx>,
344 caller_fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
345 args: &[FnArg<'tcx, M::Provenance>],
346 with_caller_location: bool,
347 destination: &PlaceTy<'tcx, M::Provenance>,
348mut cont: ReturnContinuation,
349 ) -> InterpResult<'tcx> {
350let _trace = enter_trace_span!(M, step::init_stack_frame, %instance, tracing_separate_thread = Empty);
351352// Compute callee information.
353 // FIXME: for variadic support, do we have to somehow determine callee's extra_args?
354let callee_fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
355356if callee_fn_abi.c_variadic || caller_fn_abi.c_variadic {
357throw_unsup_format!("calling a c-variadic function is not supported");
358 }
359360if caller_fn_abi.conv != callee_fn_abi.conv {
361throw_ub_custom!(
362 fluent::const_eval_incompatible_calling_conventions,
363 callee_conv = format!("{}", callee_fn_abi.conv),
364 caller_conv = format!("{}", caller_fn_abi.conv),
365 )
366 }
367368// Check that all target features required by the callee (i.e., from
369 // the attribute `#[target_feature(enable = ...)]`) are enabled at
370 // compile time.
371M::check_fn_target_features(self, instance)?;
372373if !callee_fn_abi.can_unwind {
374// The callee cannot unwind, so force the `Unreachable` unwind handling.
375match &mut cont {
376 ReturnContinuation::Stop { .. } => {}
377 ReturnContinuation::Goto { unwind, .. } => {
378*unwind = mir::UnwindAction::Unreachable;
379 }
380 }
381 }
382383// *Before* pushing the new frame, determine whether the return destination is in memory.
384 // Need to use `place_to_op` to be *sure* we get the mplace if there is one.
385let destination_mplace = self.place_to_op(destination)?.as_mplace_or_imm().left();
386387// Push the "raw" frame -- this leaves locals uninitialized.
388self.push_stack_frame_raw(instance, body, destination, cont)?;
389390// If an error is raised here, pop the frame again to get an accurate backtrace.
391 // To this end, we wrap it all in a `try` block.
392let res: InterpResult<'tcx> = try {
393trace!(
394"caller ABI: {:#?}, args: {:#?}",
395 caller_fn_abi,
396 args.iter()
397 .map(|arg| (
398 arg.layout().ty,
399match arg {
400 FnArg::Copy(op) => format!("copy({op:?})"),
401 FnArg::InPlace(mplace) => format!("in-place({mplace:?})"),
402 }
403 ))
404 .collect::<Vec<_>>()
405 );
406trace!(
407"spread_arg: {:?}, locals: {:#?}",
408 body.spread_arg,
409 body.args_iter()
410 .map(|local| (
411 local,
412self.layout_of_local(self.frame(), local, None).unwrap().ty,
413 ))
414 .collect::<Vec<_>>()
415 );
416417// In principle, we have two iterators: Where the arguments come from, and where
418 // they go to.
419420 // The "where they come from" part is easy, we expect the caller to do any special handling
421 // that might be required here (e.g. for untupling).
422 // If `with_caller_location` is set we pretend there is an extra argument (that
423 // we will not pass; our `caller_location` intrinsic implementation walks the stack instead).
424assert_eq!(
425 args.len() + if with_caller_location { 1 } else { 0 },
426 caller_fn_abi.args.len(),
427"mismatch between caller ABI and caller arguments",
428 );
429let mut caller_args = args
430 .iter()
431 .zip(caller_fn_abi.args.iter())
432 .filter(|arg_and_abi| !matches!(arg_and_abi.1.mode, PassMode::Ignore));
433434// Now we have to spread them out across the callee's locals,
435 // taking into account the `spread_arg`. If we could write
436 // this is a single iterator (that handles `spread_arg`), then
437 // `pass_argument` would be the loop body. It takes care to
438 // not advance `caller_iter` for ignored arguments.
439let mut callee_args_abis = callee_fn_abi.args.iter().enumerate();
440for local in body.args_iter() {
441// Construct the destination place for this argument. At this point all
442 // locals are still dead, so we cannot construct a `PlaceTy`.
443let dest = mir::Place::from(local);
444// `layout_of_local` does more than just the instantiation we need to get the
445 // type, but the result gets cached so this avoids calling the instantiation
446 // query *again* the next time this local is accessed.
447let ty = self.layout_of_local(self.frame(), local, None)?.ty;
448if Some(local) == body.spread_arg {
449// Make the local live once, then fill in the value field by field.
450self.storage_live(local)?;
451// Must be a tuple
452let ty::Tuple(fields) = ty.kind() else {
453span_bug!(self.cur_span(), "non-tuple type for `spread_arg`: {ty}")
454 };
455for (i, field_ty) in fields.iter().enumerate() {
456let dest = dest.project_deeper(
457&[mir::ProjectionElem::Field(FieldIdx::from_usize(i), field_ty)],
458*self.tcx,
459 );
460let (idx, callee_abi) = callee_args_abis.next().unwrap();
461self.pass_argument(
462&mut caller_args,
463 callee_abi,
464 idx,
465&dest,
466 field_ty,
467/* already_live */ true,
468 )?;
469 }
470 } else {
471// Normal argument. Cannot mark it as live yet, it might be unsized!
472let (idx, callee_abi) = callee_args_abis.next().unwrap();
473self.pass_argument(
474&mut caller_args,
475 callee_abi,
476 idx,
477&dest,
478 ty,
479/* already_live */ false,
480 )?;
481 }
482 }
483// If the callee needs a caller location, pretend we consume one more argument from the ABI.
484if instance.def.requires_caller_location(*self.tcx) {
485 callee_args_abis.next().unwrap();
486 }
487// Now we should have no more caller args or callee arg ABIs
488assert!(
489 callee_args_abis.next().is_none(),
490"mismatch between callee ABI and callee body arguments"
491);
492if caller_args.next().is_some() {
493throw_ub_custom!(fluent::const_eval_too_many_caller_args);
494 }
495// Don't forget to check the return type!
496if !self.check_argument_compat(&caller_fn_abi.ret, &callee_fn_abi.ret)? {
497throw_ub!(AbiMismatchReturn {
498 caller_ty: caller_fn_abi.ret.layout.ty,
499 callee_ty: callee_fn_abi.ret.layout.ty
500 });
501 }
502503// Protect return place for in-place return value passing.
504 // We only need to protect anything if this is actually an in-memory place.
505if let Some(mplace) = destination_mplace {
506 M::protect_in_place_function_argument(self, &mplace)?;
507 }
508509// Don't forget to mark "initially live" locals as live.
510self.storage_live_for_always_live_locals()?;
511 };
512 res.inspect_err_kind(|_| {
513// Don't show the incomplete stack frame in the error stacktrace.
514self.stack_mut().pop();
515 })
516 }
517518/// Initiate a call to this function -- pushing the stack frame and initializing the arguments.
519 ///
520 /// `caller_fn_abi` is used to determine if all the arguments are passed the proper way.
521 /// However, we also need `caller_abi` to determine if we need to do untupling of arguments.
522 ///
523 /// `with_caller_location` indicates whether the caller passed a caller location. Miri
524 /// implements caller locations without argument passing, but to match `FnAbi` we need to know
525 /// when those arguments are present.
526pub(super) fn init_fn_call(
527&mut self,
528 fn_val: FnVal<'tcx, M::ExtraFnVal>,
529 (caller_abi, caller_fn_abi): (ExternAbi, &FnAbi<'tcx, Ty<'tcx>>),
530 args: &[FnArg<'tcx, M::Provenance>],
531 with_caller_location: bool,
532 destination: &PlaceTy<'tcx, M::Provenance>,
533 target: Option<mir::BasicBlock>,
534 unwind: mir::UnwindAction,
535 ) -> InterpResult<'tcx> {
536let _trace =
537<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(537u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["step",
"tracing_separate_thread", "fn_val"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"init_fn_call")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&fn_val) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, step::init_fn_call, tracing_separate_thread = Empty, ?fn_val)538 .or_if_tracing_disabled(|| {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:538",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(538u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_call: {0:#?}",
fn_val) as &dyn Value))])
});
} else { ; }
}trace!("init_fn_call: {:#?}", fn_val));
539540let instance = match fn_val {
541 FnVal::Instance(instance) => instance,
542 FnVal::Other(extra) => {
543return M::call_extra_fn(
544self,
545extra,
546caller_fn_abi,
547args,
548destination,
549target,
550unwind,
551 );
552 }
553 };
554555match instance.def {
556 ty::InstanceKind::Intrinsic(def_id) => {
557if !self.tcx.intrinsic(def_id).is_some() {
::core::panicking::panic("assertion failed: self.tcx.intrinsic(def_id).is_some()")
};assert!(self.tcx.intrinsic(def_id).is_some());
558// FIXME: Should `InPlace` arguments be reset to uninit?
559if let Some(fallback) = M::call_intrinsic(
560self,
561instance,
562&self.copy_fn_args(args),
563destination,
564target,
565unwind,
566 )? {
567if !!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden {
::core::panicking::panic("assertion failed: !self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden")
};assert!(!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden);
568match fallback.def {
ty::InstanceKind::Item(_) => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"ty::InstanceKind::Item(_)", ::core::option::Option::None);
}
};assert_matches!(fallback.def, ty::InstanceKind::Item(_));
569return self.init_fn_call(
570 FnVal::Instance(fallback),
571 (caller_abi, caller_fn_abi),
572args,
573with_caller_location,
574destination,
575target,
576unwind,
577 );
578 } else {
579interp_ok(())
580 }
581 }
582 ty::InstanceKind::VTableShim(..)
583 | ty::InstanceKind::ReifyShim(..)
584 | ty::InstanceKind::ClosureOnceShim { .. }
585 | ty::InstanceKind::ConstructCoroutineInClosureShim { .. }
586 | ty::InstanceKind::FnPtrShim(..)
587 | ty::InstanceKind::DropGlue(..)
588 | ty::InstanceKind::CloneShim(..)
589 | ty::InstanceKind::FnPtrAddrShim(..)
590 | ty::InstanceKind::ThreadLocalShim(..)
591 | ty::InstanceKind::AsyncDropGlueCtorShim(..)
592 | ty::InstanceKind::AsyncDropGlue(..)
593 | ty::InstanceKind::FutureDropPollShim(..)
594 | ty::InstanceKind::Item(_) => {
595// We need MIR for this fn.
596 // Note that this can be an intrinsic, if we are executing its fallback body.
597let Some((body, instance)) = M::find_mir_or_eval_fn(
598self,
599instance,
600caller_fn_abi,
601args,
602destination,
603target,
604unwind,
605 )?
606else {
607return interp_ok(());
608 };
609610// Special handling for the closure ABI: untuple the last argument.
611let args: Cow<'_, [FnArg<'tcx, M::Provenance>]> =
612if caller_abi == ExternAbi::RustCall && !args.is_empty() {
613// Untuple
614let (untuple_arg, args) = args.split_last().unwrap();
615{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:615",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(615u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_call: Will pass last argument by untupling")
as &dyn Value))])
});
} else { ; }
};trace!("init_fn_call: Will pass last argument by untupling");
616Cow::from(
617args.iter()
618 .map(|a| interp_ok(a.clone()))
619 .chain((0..untuple_arg.layout().fields.count()).map(|i| {
620self.fn_arg_field(untuple_arg, FieldIdx::from_usize(i))
621 }))
622 .collect::<InterpResult<'_, Vec<_>>>()?,
623 )
624 } else {
625// Plain arg passing
626Cow::from(args)
627 };
628629self.init_stack_frame(
630instance,
631body,
632caller_fn_abi,
633&args,
634with_caller_location,
635destination,
636 ReturnContinuation::Goto { ret: target, unwind },
637 )
638 }
639// `InstanceKind::Virtual` does not have callable MIR. Calls to `Virtual` instances must be
640 // codegen'd / interpreted as virtual calls through the vtable.
641ty::InstanceKind::Virtual(def_id, idx) => {
642let mut args = args.to_vec();
643// We have to implement all "dyn-compatible receivers". So we have to go search for a
644 // pointer or `dyn Trait` type, but it could be wrapped in newtypes. So recursively
645 // unwrap those newtypes until we are there.
646 // An `InPlace` does nothing here, we keep the original receiver intact. We can't
647 // really pass the argument in-place anyway, and we are constructing a new
648 // `Immediate` receiver.
649let mut receiver = self.copy_fn_arg(&args[0]);
650let receiver_place = loop {
651match receiver.layout.ty.kind() {
652 ty::Ref(..) | ty::RawPtr(..) => {
653// We do *not* use `deref_pointer` here: we don't want to conceptually
654 // create a place that must be dereferenceable, since the receiver might
655 // be a raw pointer and (for `*const dyn Trait`) we don't need to
656 // actually access memory to resolve this method.
657 // Also see <https://github.com/rust-lang/miri/issues/2786>.
658let val = self.read_immediate(&receiver)?;
659break self.ref_to_mplace(&val)?;
660 }
661 ty::Dynamic(..) => break receiver.assert_mem_place(), // no immediate unsized values
662_ => {
663// Not there yet, search for the only non-ZST field.
664 // (The rules for `DispatchFromDyn` ensure there's exactly one such field.)
665let (idx, _) = receiver.layout.non_1zst_field(self).expect(
666"not exactly one non-1-ZST field in a `DispatchFromDyn` type",
667 );
668receiver = self.project_field(&receiver, idx)?;
669 }
670 }
671 };
672673// Obtain the underlying trait we are working on, and the adjusted receiver argument.
674 // Doesn't have to be a `dyn Trait`, but the unsized tail must be `dyn Trait`.
675 // (For that reason we also cannot use `unpack_dyn_trait`.)
676let receiver_tail =
677self.tcx.struct_tail_for_codegen(receiver_place.layout.ty, self.typing_env);
678let ty::Dynamic(receiver_trait, _) = receiver_tail.kind() else {
679::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("dynamic call on non-`dyn` type {0}", receiver_tail))span_bug!(self.cur_span(), "dynamic call on non-`dyn` type {}", receiver_tail)680 };
681if !receiver_place.layout.is_unsized() {
::core::panicking::panic("assertion failed: receiver_place.layout.is_unsized()")
};assert!(receiver_place.layout.is_unsized());
682683// Get the required information from the vtable.
684let vptr = receiver_place.meta().unwrap_meta().to_pointer(self)?;
685let dyn_ty = self.get_ptr_vtable_ty(vptr, Some(receiver_trait))?;
686let adjusted_recv = receiver_place.ptr();
687688// Now determine the actual method to call. Usually we use the easy way of just
689 // looking up the method at index `idx`.
690let vtable_entries = self.vtable_entries(receiver_trait.principal(), dyn_ty);
691let Some(ty::VtblEntry::Method(fn_inst)) = vtable_entries.get(idx).copied() else {
692// FIXME(fee1-dead) these could be variants of the UB info enum instead of this
693do yeet {
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: || fluent::const_eval_dyn_call_not_a_method,
add_args: Box::new(move |mut set_arg| {}),
}))
};throw_ub_custom!(fluent::const_eval_dyn_call_not_a_method);
694 };
695{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:695",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(695u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Virtual call dispatches to {0:#?}",
fn_inst) as &dyn Value))])
});
} else { ; }
};trace!("Virtual call dispatches to {fn_inst:#?}");
696// We can also do the lookup based on `def_id` and `dyn_ty`, and check that that
697 // produces the same result.
698self.assert_virtual_instance_matches_concrete(dyn_ty, def_id, instance, fn_inst);
699700// Adjust receiver argument. Layout can be any (thin) ptr.
701let receiver_ty = Ty::new_mut_ptr(self.tcx.tcx, dyn_ty);
702args[0] = FnArg::Copy(
703ImmTy::from_immediate(
704Scalar::from_maybe_pointer(adjusted_recv, self).into(),
705self.layout_of(receiver_ty)?,
706 )
707 .into(),
708 );
709{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:709",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(709u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Patched receiver operand to {0:#?}",
args[0]) as &dyn Value))])
});
} else { ; }
};trace!("Patched receiver operand to {:#?}", args[0]);
710// Need to also adjust the type in the ABI. Strangely, the layout there is actually
711 // already fine! Just the type is bogus. This is due to what `force_thin_self_ptr`
712 // does in `fn_abi_new_uncached`; supposedly, codegen relies on having the bogus
713 // type, so we just patch this up locally.
714let mut caller_fn_abi = caller_fn_abi.clone();
715caller_fn_abi.args[0].layout.ty = receiver_ty;
716717// recurse with concrete function
718self.init_fn_call(
719 FnVal::Instance(fn_inst),
720 (caller_abi, &caller_fn_abi),
721&args,
722with_caller_location,
723destination,
724target,
725unwind,
726 )
727 }
728 }
729 }
730731fn assert_virtual_instance_matches_concrete(
732&self,
733 dyn_ty: Ty<'tcx>,
734 def_id: DefId,
735 virtual_instance: ty::Instance<'tcx>,
736 concrete_instance: ty::Instance<'tcx>,
737 ) {
738let tcx = *self.tcx;
739740let trait_def_id = tcx.parent(def_id);
741let virtual_trait_ref = ty::TraitRef::from_assoc(tcx, trait_def_id, virtual_instance.args);
742let existential_trait_ref = ty::ExistentialTraitRef::erase_self_ty(tcx, virtual_trait_ref);
743let concrete_trait_ref = existential_trait_ref.with_self_ty(tcx, dyn_ty);
744745let concrete_method = {
746let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(746u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["resolve", "def_id"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"expect_resolve_for_vtable")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&def_id) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::expect_resolve_for_vtable, ?def_id);
747Instance::expect_resolve_for_vtable(
748tcx,
749self.typing_env,
750def_id,
751virtual_instance.args.rebase_onto(tcx, trait_def_id, concrete_trait_ref.args),
752self.cur_span(),
753 )
754 };
755match (&concrete_instance, &concrete_method) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(concrete_instance, concrete_method);
756 }
757758/// Initiate a tail call to this function -- popping the current stack frame, pushing the new
759 /// stack frame and initializing the arguments.
760pub(super) fn init_fn_tail_call(
761&mut self,
762 fn_val: FnVal<'tcx, M::ExtraFnVal>,
763 (caller_abi, caller_fn_abi): (ExternAbi, &FnAbi<'tcx, Ty<'tcx>>),
764 args: &[FnArg<'tcx, M::Provenance>],
765 with_caller_location: bool,
766 ) -> InterpResult<'tcx> {
767{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:767",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(767u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_tail_call: {0:#?}",
fn_val) as &dyn Value))])
});
} else { ; }
};trace!("init_fn_tail_call: {:#?}", fn_val);
768769// This is the "canonical" implementation of tails calls,
770 // a pop of the current stack frame, followed by a normal call
771 // which pushes a new stack frame, with the return address from
772 // the popped stack frame.
773 //
774 // Note that we are using `pop_stack_frame_raw` and not `return_from_current_stack_frame`,
775 // as the latter "executes" the goto to the return block, but we don't want to,
776 // only the tail called function should return to the current return block.
777let StackPopInfo { return_action, return_cont, return_place } =
778self.pop_stack_frame_raw(false, |_this, _return_place| {
779// This function's return value is just discarded, the tail-callee will fill in the return place instead.
780interp_ok(())
781 })?;
782783match (&return_action, &ReturnAction::Normal) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(return_action, ReturnAction::Normal);
784785// Take the "stack pop cleanup" info, and use that to initiate the next call.
786let ReturnContinuation::Goto { ret, unwind } = return_contelse {
787::rustc_middle::util::bug::bug_fmt(format_args!("can\'t tailcall as root"));bug!("can't tailcall as root");
788 };
789790// FIXME(explicit_tail_calls):
791 // we should check if both caller&callee can/n't unwind,
792 // see <https://github.com/rust-lang/rust/pull/113128#issuecomment-1614979803>
793794self.init_fn_call(
795fn_val,
796 (caller_abi, caller_fn_abi),
797args,
798with_caller_location,
799&return_place,
800ret,
801unwind,
802 )
803 }
804805pub(super) fn init_drop_in_place_call(
806&mut self,
807 place: &PlaceTy<'tcx, M::Provenance>,
808 instance: ty::Instance<'tcx>,
809 target: mir::BasicBlock,
810 unwind: mir::UnwindAction,
811 ) -> InterpResult<'tcx> {
812{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:812",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(812u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_drop_in_place_call: {0:?},\n instance={1:?}",
place, instance) as &dyn Value))])
});
} else { ; }
};trace!("init_drop_in_place_call: {:?},\n instance={:?}", place, instance);
813// We take the address of the object. This may well be unaligned, which is fine
814 // for us here. However, unaligned accesses will probably make the actual drop
815 // implementation fail -- a problem shared by rustc.
816let place = self.force_allocation(place)?;
817818// We behave a bit different from codegen here.
819 // Codegen creates an `InstanceKind::Virtual` with index 0 (the slot of the drop method) and
820 // then dispatches that to the normal call machinery. However, our call machinery currently
821 // only supports calling `VtblEntry::Method`; it would choke on a `MetadataDropInPlace`. So
822 // instead we do the virtual call stuff ourselves. It's easier here than in `eval_fn_call`
823 // since we can just get a place of the underlying type and use `mplace_to_ref`.
824let place = match place.layout.ty.kind() {
825 ty::Dynamic(data, _) => {
826// Dropping a trait object. Need to find actual drop fn.
827self.unpack_dyn_trait(&place, data)?
828}
829_ => {
830if true {
match (&instance,
&ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty))
{
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_eq!(
831 instance,
832 ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
833 );
834place835 }
836 };
837let instance = {
838let _trace =
839<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(839u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["resolve", "ty"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"resolve_drop_in_place")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&place.layout.ty)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::resolve_drop_in_place, ty = ?place.layout.ty);
840 ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
841 };
842let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
843844let arg = self.mplace_to_ref(&place)?;
845let ret = MPlaceTy::fake_alloc_zst(self.layout_of(self.tcx.types.unit)?);
846847self.init_fn_call(
848 FnVal::Instance(instance),
849 (ExternAbi::Rust, fn_abi),
850&[FnArg::Copy(arg.into())],
851false,
852&ret.into(),
853Some(target),
854unwind,
855 )
856 }
857858/// Pops the current frame from the stack, copies the return value to the caller, deallocates
859 /// the memory for allocated locals, and jumps to an appropriate place.
860 ///
861 /// If `unwinding` is `false`, then we are performing a normal return
862 /// from a function. In this case, we jump back into the frame of the caller,
863 /// and continue execution as normal.
864 ///
865 /// If `unwinding` is `true`, then we are in the middle of a panic,
866 /// and need to unwind this frame. In this case, we jump to the
867 /// `cleanup` block for the function, which is responsible for running
868 /// `Drop` impls for any locals that have been initialized at this point.
869 /// The cleanup block ends with a special `Resume` terminator, which will
870 /// cause us to continue unwinding.
871#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("return_from_current_stack_frame",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(871u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["unwinding"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&unwinding as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:876",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(876u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("popping stack frame ({0})",
if unwinding {
"during unwinding"
} else { "returning from function" }) as &dyn Value))])
});
} else { ; }
};
match (&unwinding,
&match self.frame().loc {
Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
Right(_) => true,
}) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
if unwinding && self.frame_idx() == 0 {
do yeet {
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: || fluent::const_eval_unwind_past_top,
add_args: Box::new(move |mut set_arg| {}),
}))
};
}
let return_op =
self.local_to_op(mir::RETURN_PLACE,
None).expect("return place should always be live");
let stack_pop_info =
self.pop_stack_frame_raw(unwinding,
|this, return_place|
{
this.copy_op_allow_transmute(&return_op, return_place)?;
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:900",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(900u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("return value: {0:?}",
this.dump_place(return_place)) as &dyn Value))])
});
} else { ; }
};
interp_ok(())
})?;
match stack_pop_info.return_action {
ReturnAction::Normal => {}
ReturnAction::NoJump => { return interp_ok(()); }
ReturnAction::NoCleanup => {
if !self.stack().is_empty() {
{
::core::panicking::panic_fmt(format_args!("only the topmost frame should ever be leaked"));
}
};
if !!unwinding {
{
::core::panicking::panic_fmt(format_args!("tried to skip cleanup during unwinding"));
}
};
return interp_ok(());
}
}
if unwinding {
match stack_pop_info.return_cont {
ReturnContinuation::Goto { unwind, .. } => {
self.unwind_to_block(unwind)
}
ReturnContinuation::Stop { .. } => {
{
::core::panicking::panic_fmt(format_args!("encountered ReturnContinuation::Stop when unwinding!"));
}
}
}
} else {
match stack_pop_info.return_cont {
ReturnContinuation::Goto { ret, .. } =>
self.return_to_block(ret),
ReturnContinuation::Stop { .. } => {
if !self.stack().is_empty() {
{
::core::panicking::panic_fmt(format_args!("only the bottommost frame can have ReturnContinuation::Stop"));
}
};
interp_ok(())
}
}
}
}
}
}#[instrument(skip(self), level = "trace")]872pub(super) fn return_from_current_stack_frame(
873&mut self,
874 unwinding: bool,
875 ) -> InterpResult<'tcx> {
876info!(
877"popping stack frame ({})",
878if unwinding { "during unwinding" } else { "returning from function" }
879 );
880881// Check `unwinding`.
882assert_eq!(
883 unwinding,
884match self.frame().loc {
885 Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
886 Right(_) => true,
887 }
888 );
889if unwinding && self.frame_idx() == 0 {
890throw_ub_custom!(fluent::const_eval_unwind_past_top);
891 }
892893// Get out the return value. Must happen *before* the frame is popped as we have to get the
894 // local's value out.
895let return_op =
896self.local_to_op(mir::RETURN_PLACE, None).expect("return place should always be live");
897// Do the actual pop + copy.
898let stack_pop_info = self.pop_stack_frame_raw(unwinding, |this, return_place| {
899 this.copy_op_allow_transmute(&return_op, return_place)?;
900trace!("return value: {:?}", this.dump_place(return_place));
901 interp_ok(())
902 })?;
903904match stack_pop_info.return_action {
905 ReturnAction::Normal => {}
906 ReturnAction::NoJump => {
907// The hook already did everything.
908return interp_ok(());
909 }
910 ReturnAction::NoCleanup => {
911// If we are not doing cleanup, also skip everything else.
912assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
913assert!(!unwinding, "tried to skip cleanup during unwinding");
914// Don't jump anywhere.
915return interp_ok(());
916 }
917 }
918919// Normal return, figure out where to jump.
920if unwinding {
921// Follow the unwind edge.
922match stack_pop_info.return_cont {
923 ReturnContinuation::Goto { unwind, .. } => {
924// This must be the very last thing that happens, since it can in fact push a new stack frame.
925self.unwind_to_block(unwind)
926 }
927 ReturnContinuation::Stop { .. } => {
928panic!("encountered ReturnContinuation::Stop when unwinding!")
929 }
930 }
931 } else {
932// Follow the normal return edge.
933match stack_pop_info.return_cont {
934 ReturnContinuation::Goto { ret, .. } => self.return_to_block(ret),
935 ReturnContinuation::Stop { .. } => {
936assert!(
937self.stack().is_empty(),
938"only the bottommost frame can have ReturnContinuation::Stop"
939);
940 interp_ok(())
941 }
942 }
943 }
944 }
945}