1//! Manages calling a concrete function (with known MIR body) with argument passing,
2//! and returning the return value to the caller.
34use std::assert_matches;
5use std::borrow::Cow;
67use either::{Left, Right};
8use rustc_abi::{selfas abi, ExternAbi, FieldIdx, Integer, VariantIdx};
9use rustc_hir::def_id::DefId;
10use rustc_hir::find_attr;
11use rustc_middle::ty::layout::{IntegerExt, TyAndLayout};
12use rustc_middle::ty::{self, AdtDef, Instance, Ty, Unnormalized, VariantDef};
13use rustc_middle::{bug, mir, span_bug};
14use rustc_target::callconv::{ArgAbi, FnAbi};
15use tracing::field::Empty;
16use tracing::{info, instrument, trace};
1718use super::{
19CtfeProvenance, FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy,
20Projectable, Provenance, ReturnAction, ReturnContinuation, Scalar, interp_ok, throw_ub,
21throw_ub_format,
22};
23use crate::enter_trace_span;
24use crate::interpret::EnteredTraceSpan;
2526/// An argument passed to a function.
27#[derive(#[automatically_derived]
impl<'tcx, Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
FnArg<'tcx, Prov> {
#[inline]
fn clone(&self) -> FnArg<'tcx, Prov> {
match self {
FnArg::Copy(__self_0) =>
FnArg::Copy(::core::clone::Clone::clone(__self_0)),
FnArg::InPlace(__self_0) =>
FnArg::InPlace(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl<'tcx, Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for
FnArg<'tcx, Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
FnArg::Copy(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Copy",
&__self_0),
FnArg::InPlace(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"InPlace", &__self_0),
}
}
}Debug)]
28pub enum FnArg<'tcx, Prov: Provenance = CtfeProvenance> {
29/// Pass a copy of the given operand.
30Copy(OpTy<'tcx, Prov>),
31/// Allow for the argument to be passed in-place: destroy the value originally stored at that
32 /// place and make the place inaccessible for the duration of the function call. This *must* be
33 /// an in-memory place so that we can do the proper alias checks.
34InPlace(MPlaceTy<'tcx, Prov>),
35}
3637impl<'tcx, Prov: Provenance> FnArg<'tcx, Prov> {
38pub fn layout(&self) -> &TyAndLayout<'tcx> {
39match self {
40 FnArg::Copy(op) => &op.layout,
41 FnArg::InPlace(mplace) => &mplace.layout,
42 }
43 }
4445/// Make a copy of the given fn_arg. Any `InPlace` are degenerated to copies, no protection of the
46 /// original memory occurs.
47pub fn copy_fn_arg(&self) -> OpTy<'tcx, Prov> {
48match self {
49 FnArg::Copy(op) => op.clone(),
50 FnArg::InPlace(mplace) => mplace.clone().into(),
51 }
52 }
53}
5455impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
56/// Make a copy of the given fn_args. Any `InPlace` are degenerated to copies, no protection of the
57 /// original memory occurs.
58pub fn copy_fn_args(args: &[FnArg<'tcx, M::Provenance>]) -> Vec<OpTy<'tcx, M::Provenance>> {
59args.iter().map(|fn_arg| fn_arg.copy_fn_arg()).collect()
60 }
6162/// Helper function for argument untupling.
63fn fn_arg_project_field(
64&self,
65 arg: &FnArg<'tcx, M::Provenance>,
66 field: FieldIdx,
67 ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
68interp_ok(match arg {
69 FnArg::Copy(op) => FnArg::Copy(self.project_field(op, field)?),
70 FnArg::InPlace(mplace) => FnArg::InPlace(self.project_field(mplace, field)?),
71 })
72 }
7374/// Find the wrapped inner type of a transparent wrapper.
75 /// Must not be called on 1-ZST (as they don't have a uniquely defined "wrapped field").
76 ///
77 /// We work with `TyAndLayout` here since that makes it much easier to iterate over all fields.
78fn unfold_transparent(
79&self,
80 layout: TyAndLayout<'tcx>,
81 may_unfold: impl Fn(AdtDef<'tcx>) -> bool,
82 ) -> TyAndLayout<'tcx> {
83match layout.ty.kind() {
84 ty::Adt(adt_def, _) if adt_def.repr().transparent() && may_unfold(*adt_def) => {
85if !!adt_def.is_enum() {
::core::panicking::panic("assertion failed: !adt_def.is_enum()")
};assert!(!adt_def.is_enum());
86// Find the non-1-ZST field, and recurse.
87let (_, field) = layout.non_1zst_field(self).unwrap();
88self.unfold_transparent(field, may_unfold)
89 }
90 ty::Pat(base, _) => self.layout_of(*base).expect(
91"if the layout of a pattern type could be computed, so can the layout of its base",
92 ),
93// Not a transparent type, no further unfolding.
94_ => layout,
95 }
96 }
9798/// Unwrap types that are guaranteed a null-pointer-optimization
99fn unfold_npo(&self, layout: TyAndLayout<'tcx>) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
100// Check if this is an option-like type wrapping some type.
101let ty::Adt(def, args) = layout.ty.kind() else {
102// Not an ADT, so definitely no NPO.
103return interp_ok(layout);
104 };
105if def.variants().len() != 2 {
106// Not a 2-variant enum, so no NPO.
107return interp_ok(layout);
108 }
109if !def.is_enum() {
::core::panicking::panic("assertion failed: def.is_enum()")
};assert!(def.is_enum());
110111let all_fields_1zst = |variant: &VariantDef| -> InterpResult<'tcx, _> {
112for field in &variant.fields {
113let ty = field.ty(*self.tcx, args);
114let layout = self.layout_of(ty)?;
115if !layout.is_1zst() {
116return interp_ok(false);
117 }
118 }
119interp_ok(true)
120 };
121122// If one variant consists entirely of 1-ZST, then the other variant
123 // is the only "relevant" one for this check.
124let var0 = VariantIdx::from_u32(0);
125let var1 = VariantIdx::from_u32(1);
126let relevant_variant = if all_fields_1zst(def.variant(var0))? {
127def.variant(var1)
128 } else if all_fields_1zst(def.variant(var1))? {
129def.variant(var0)
130 } else {
131// No variant is all-1-ZST, so no NPO.
132return interp_ok(layout);
133 };
134// The "relevant" variant must have exactly one field, and its type is the "inner" type.
135if relevant_variant.fields.len() != 1 {
136return interp_ok(layout);
137 }
138let inner = relevant_variant.fields[FieldIdx::from_u32(0)].ty(*self.tcx, args);
139let inner = self.layout_of(inner)?;
140141// Check if the inner type is one of the NPO-guaranteed ones.
142 // For that we first unpeel transparent *structs* (but not unions).
143let is_npo =
144 |def: AdtDef<'tcx>| {
{
'done:
{
for i in
::rustc_hir::attrs::HasAttrs::get_attrs(def.did(),
&self.tcx) {
#[allow(unused_imports)]
use rustc_hir::attrs::AttributeKind::*;
let i: &rustc_hir::Attribute = i;
match i {
rustc_hir::Attribute::Parsed(RustcNonnullOptimizationGuaranteed)
=> {
break 'done Some(());
}
rustc_hir::Attribute::Unparsed(..) =>
{}
#[deny(unreachable_patterns)]
_ => {}
}
}
None
}
}
}.is_some()find_attr!(self.tcx, def.did(), RustcNonnullOptimizationGuaranteed);
145let inner = self.unfold_transparent(inner, /* may_unfold */ |def| {
146// Stop at NPO types so that we don't miss that attribute in the check below!
147def.is_struct() && !is_npo(def)
148 });
149interp_ok(match inner.ty.kind() {
150 ty::Ref(..) | ty::FnPtr(..) => {
151// Option<&T> behaves like &T, and same for fn()
152inner153 }
154 ty::Adt(def, _) if is_npo(*def) => {
155// Once we found a `nonnull_optimization_guaranteed` type, further strip off
156 // newtype structs from it to find the underlying ABI type.
157self.unfold_transparent(inner, /* may_unfold */ |def| def.is_struct())
158 }
159_ => {
160// Everything else we do not unfold.
161layout162 }
163 })
164 }
165166/// Check if these two layouts look like they are fn-ABI-compatible.
167 /// (We also compare the `PassMode`, so this doesn't have to check everything. But it turns out
168 /// that only checking the `PassMode` is insufficient.)
169fn layout_compat(
170&self,
171 caller: TyAndLayout<'tcx>,
172 callee: TyAndLayout<'tcx>,
173 ) -> InterpResult<'tcx, bool> {
174// Fast path: equal types are definitely compatible.
175if caller.ty == callee.ty {
176return interp_ok(true);
177 }
178// 1-ZST are compatible with all 1-ZST (and with nothing else).
179if caller.is_1zst() || callee.is_1zst() {
180return interp_ok(caller.is_1zst() && callee.is_1zst());
181 }
182// Unfold newtypes and NPO optimizations.
183let unfold = |layout: TyAndLayout<'tcx>| {
184self.unfold_npo(self.unfold_transparent(layout, /* may_unfold */ |_def| true))
185 };
186let caller = unfold(caller)?;
187let callee = unfold(callee)?;
188// Now see if these inner types are compatible.
189190 // Compatible pointer types. For thin pointers, we have to accept even non-`repr(transparent)`
191 // things as compatible due to `DispatchFromDyn`. For instance, `Rc<i32>` and `*mut i32`
192 // must be compatible. So we just accept everything with Pointer ABI as compatible,
193 // even if this will accept some code that is not stably guaranteed to work.
194 // This also handles function pointers.
195let thin_pointer = |layout: TyAndLayout<'tcx>| match layout.backend_repr {
196 abi::BackendRepr::Scalar(s) => match s.primitive() {
197 abi::Primitive::Pointer(addr_space) => Some(addr_space),
198_ => None,
199 },
200_ => None,
201 };
202if let (Some(caller), Some(callee)) = (thin_pointer(caller), thin_pointer(callee)) {
203return interp_ok(caller == callee);
204 }
205// For wide pointers we have to get the pointee type.
206let pointee_ty = |ty: Ty<'tcx>| -> InterpResult<'tcx, Option<Ty<'tcx>>> {
207// We cannot use `builtin_deref` here since we need to reject `Box<T, MyAlloc>`.
208interp_ok(Some(match ty.kind() {
209 ty::Ref(_, ty, _) => *ty,
210 ty::RawPtr(ty, _) => *ty,
211// We only accept `Box` with the default allocator.
212_ if ty.is_box_global(*self.tcx) => ty.expect_boxed_ty(),
213_ => return interp_ok(None),
214 }))
215 };
216if let (Some(caller), Some(callee)) = (pointee_ty(caller.ty)?, pointee_ty(callee.ty)?) {
217// This is okay if they have the same metadata type.
218let meta_ty = |ty: Ty<'tcx>| {
219// Even if `ty` is normalized, the search for the unsized tail will project
220 // to fields, which can yield non-normalized types. So we need to provide a
221 // normalization function.
222let normalize = |ty| {
223self.tcx.normalize_erasing_regions(self.typing_env, Unnormalized::new_wip(ty))
224 };
225ty.ptr_metadata_ty(*self.tcx, normalize)
226 };
227return interp_ok(meta_ty(caller) == meta_ty(callee));
228 }
229230// Compatible integer types (in particular, usize vs ptr-sized-u32/u64).
231 // `char` counts as `u32.`
232let int_ty = |ty: Ty<'tcx>| {
233Some(match ty.kind() {
234 ty::Int(ity) => (Integer::from_int_ty(&self.tcx, *ity), /* signed */ true),
235 ty::Uint(uty) => (Integer::from_uint_ty(&self.tcx, *uty), /* signed */ false),
236 ty::Char => (Integer::I32, /* signed */ false),
237_ => return None,
238 })
239 };
240if let (Some(caller), Some(callee)) = (int_ty(caller.ty), int_ty(callee.ty)) {
241// This is okay if they are the same integer type.
242return interp_ok(caller == callee);
243 }
244245// Fall back to exact equality.
246interp_ok(caller == callee)
247 }
248249/// Returns a `bool` saying whether the two arguments are ABI-compatible.
250pub fn check_argument_compat(
251&self,
252 caller_abi: &ArgAbi<'tcx, Ty<'tcx>>,
253 callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
254 ) -> InterpResult<'tcx, bool> {
255// We do not want to accept things as ABI-compatible that just "happen to be" compatible on the current target,
256 // so we implement a type-based check that reflects the guaranteed rules for ABI compatibility.
257if self.layout_compat(caller_abi.layout, callee_abi.layout)? {
258// Ensure that our checks imply actual ABI compatibility for this concrete call.
259 // (This can fail e.g. if `#[rustc_nonnull_optimization_guaranteed]` is used incorrectly.)
260if !caller_abi.eq_abi(callee_abi) {
::core::panicking::panic("assertion failed: caller_abi.eq_abi(callee_abi)")
};assert!(caller_abi.eq_abi(callee_abi));
261interp_ok(true)
262 } else {
263{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:263",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(263u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("check_argument_compat: incompatible ABIs:\ncaller: {0:?}\ncallee: {1:?}",
caller_abi, callee_abi) as &dyn Value))])
});
} else { ; }
};trace!(
264"check_argument_compat: incompatible ABIs:\ncaller: {:?}\ncallee: {:?}",
265 caller_abi, callee_abi
266 );
267interp_ok(false)
268 }
269 }
270271/// Initialize a single callee argument, checking the types for compatibility.
272fn pass_argument<'x, 'y>(
273&mut self,
274 caller_args: &mut impl Iterator<
275 Item = (&'x FnArg<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>),
276 >,
277 callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
278 callee_arg_idx: usize,
279 callee_arg: &mir::Place<'tcx>,
280 callee_ty: Ty<'tcx>,
281 already_live: bool,
282 ) -> InterpResult<'tcx>
283where
284'tcx: 'x,
285'tcx: 'y,
286 {
287match (&callee_ty, &callee_abi.layout.ty) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(callee_ty, callee_abi.layout.ty);
288if callee_abi.is_ignore() {
289// This one is skipped. Still must be made live though!
290if !already_live {
291self.storage_live(callee_arg.as_local().unwrap())?;
292 }
293return interp_ok(());
294 }
295// Find next caller arg.
296let Some((caller_arg, caller_abi)) = caller_args.next() else {
297do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("calling a function with fewer arguments than it requires"))
})));throw_ub_format!("calling a function with fewer arguments than it requires");
298 };
299match (&caller_arg.layout().layout, &caller_abi.layout.layout) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(caller_arg.layout().layout, caller_abi.layout.layout);
300// Sadly we cannot assert that `caller_arg.layout().ty` and `caller_abi.layout.ty` are
301 // equal; in closures the types sometimes differ. We just hope that `caller_abi` is the
302 // right type to print to the user.
303304 // Check compatibility
305if !self.check_argument_compat(caller_abi, callee_abi)? {
306do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::AbiMismatchArgument {
arg_idx: callee_arg_idx,
caller_ty: caller_abi.layout.ty,
callee_ty: callee_abi.layout.ty,
});throw_ub!(AbiMismatchArgument {
307 arg_idx: callee_arg_idx,
308 caller_ty: caller_abi.layout.ty,
309 callee_ty: callee_abi.layout.ty
310 });
311 }
312// We work with a copy of the argument for now; if this is in-place argument passing, we
313 // will later protect the source it comes from. This means the callee cannot observe if we
314 // did in-place of by-copy argument passing, except for pointer equality tests.
315let caller_arg_copy = caller_arg.copy_fn_arg();
316if !already_live {
317let local = callee_arg.as_local().unwrap();
318let meta = caller_arg_copy.meta();
319// `check_argument_compat` ensures that if metadata is needed, both have the same type,
320 // so we know they will use the metadata the same way.
321if !(!meta.has_meta() || caller_arg_copy.layout.ty == callee_ty) {
::core::panicking::panic("assertion failed: !meta.has_meta() || caller_arg_copy.layout.ty == callee_ty")
};assert!(!meta.has_meta() || caller_arg_copy.layout.ty == callee_ty);
322323self.storage_live_dyn(local, meta)?;
324 }
325// Now we can finally actually evaluate the callee place.
326let callee_arg = self.eval_place(*callee_arg)?;
327// We allow some transmutes here.
328 // FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
329 // is true for all `copy_op`, but there are a lot of special cases for argument passing
330 // specifically.)
331self.copy_op_allow_transmute(&caller_arg_copy, &callee_arg)?;
332// If this was an in-place pass, protect the place it comes from for the duration of the call.
333if let FnArg::InPlace(mplace) = caller_arg {
334 M::protect_in_place_function_argument(self, mplace)?;
335 }
336interp_ok(())
337 }
338339/// The main entry point for creating a new stack frame: performs ABI checks and initializes
340 /// arguments.
341#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("init_stack_frame",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(341u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["instance", "body",
"caller_fn_abi", "args", "with_caller_location",
"destination", "cont"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&instance)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&body)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&caller_fn_abi)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&args)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&with_caller_location
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&destination)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&cont)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
let _trace =
<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(352u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["step", "instance",
"tracing_separate_thread"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"init_stack_frame")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&instance)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
});
let extra_tys =
if caller_fn_abi.c_variadic {
let fixed_count =
usize::try_from(caller_fn_abi.fixed_count).unwrap();
let extra_tys =
args[fixed_count..].iter().map(|arg| arg.layout().ty);
self.tcx.mk_type_list_from_iter(extra_tys)
} else { ty::List::empty() };
let callee_fn_abi =
self.fn_abi_of_instance_no_deduced_attrs(instance,
extra_tys)?;
if caller_fn_abi.conv != callee_fn_abi.conv {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("calling a function with calling convention \"{0}\" using calling convention \"{1}\"",
callee_fn_abi.conv, caller_fn_abi.conv))
})))
}
if caller_fn_abi.c_variadic != callee_fn_abi.c_variadic {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::CVariadicMismatch {
caller_is_c_variadic: caller_fn_abi.c_variadic,
callee_is_c_variadic: callee_fn_abi.c_variadic,
});
}
if caller_fn_abi.c_variadic &&
caller_fn_abi.fixed_count != callee_fn_abi.fixed_count {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::CVariadicFixedCountMismatch {
caller: caller_fn_abi.fixed_count,
callee: callee_fn_abi.fixed_count,
});
}
M::check_fn_target_features(self, instance)?;
if !callee_fn_abi.can_unwind {
match &mut cont {
ReturnContinuation::Stop { .. } => {}
ReturnContinuation::Goto { unwind, .. } => {
*unwind = mir::UnwindAction::Unreachable;
}
}
}
let destination_mplace =
self.place_to_op(destination)?.as_mplace_or_imm().left();
self.push_stack_frame_raw(instance, body, destination, cont)?;
let preamble_span = self.frame().loc.unwrap_right();
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:411",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(411u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("caller ABI: {0:#?}, args: {1:#?}",
caller_fn_abi,
args.iter().map(|arg|
(arg.layout().ty,
match arg {
FnArg::Copy(op) =>
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("copy({0:?})", op))
}),
FnArg::InPlace(mplace) =>
::alloc::__export::must_use({
::alloc::fmt::format(format_args!("in-place({0:?})",
mplace))
}),
})).collect::<Vec<_>>()) as &dyn Value))])
});
} else { ; }
};
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:424",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(424u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("spread_arg: {0:?}, locals: {1:#?}",
body.spread_arg,
body.args_iter().map(|local|
(local,
self.layout_of_local(self.frame(), local,
None).unwrap().ty)).collect::<Vec<_>>()) as &dyn Value))])
});
} else { ; }
};
match (&(args.len() + if with_caller_location { 1 } else { 0 }),
&caller_fn_abi.args.len()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val,
::core::option::Option::Some(format_args!("mismatch between caller ABI and caller arguments")));
}
}
};
let mut caller_args =
args.iter().zip(caller_fn_abi.args.iter()).filter(|arg_and_abi|
!arg_and_abi.1.is_ignore());
let mut callee_args_abis = callee_fn_abi.args.iter().enumerate();
let va_list_arg =
callee_fn_abi.c_variadic.then(||
mir::Local::from_usize(body.arg_count));
for local in body.args_iter() {
self.frame_mut().loc =
Right(body.local_decls[local].source_info.span);
let dest = mir::Place::from(local);
let ty = self.layout_of_local(self.frame(), local, None)?.ty;
if Some(local) == va_list_arg {
self.storage_live(local)?;
let place = self.eval_place(dest)?;
let mplace = self.force_allocation(&place)?;
let varargs =
self.allocate_varargs(&mut caller_args,
(&mut callee_args_abis).filter(|(_, abi)|
!abi.is_ignore()))?;
self.frame_mut().va_list = varargs.clone();
let key = self.va_list_ptr(varargs.into());
self.write_bytes_ptr(mplace.ptr(),
(0..mplace.layout.size.bytes()).map(|_| 0u8))?;
let key_mplace = self.va_list_key_field(&mplace)?;
self.write_pointer(key, &key_mplace)?;
} else if Some(local) == body.spread_arg {
self.storage_live(local)?;
let ty::Tuple(fields) =
ty.kind() else {
::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("non-tuple type for `spread_arg`: {0}", ty))
};
for (i, field_ty) in fields.iter().enumerate() {
let dest =
dest.project_deeper(&[mir::ProjectionElem::Field(FieldIdx::from_usize(i),
field_ty)], *self.tcx);
let (idx, callee_abi) = callee_args_abis.next().unwrap();
self.pass_argument(&mut caller_args, callee_abi, idx, &dest,
field_ty, true)?;
}
} else {
let (idx, callee_abi) = callee_args_abis.next().unwrap();
self.pass_argument(&mut caller_args, callee_abi, idx, &dest,
ty, false)?;
}
}
self.frame_mut().loc =
Right(body.local_decls[mir::RETURN_PLACE].source_info.span);
if !self.check_argument_compat(&caller_fn_abi.ret,
&callee_fn_abi.ret)? {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::AbiMismatchReturn {
caller_ty: caller_fn_abi.ret.layout.ty,
callee_ty: callee_fn_abi.ret.layout.ty,
});
}
if let Some(mplace) = destination_mplace {
M::protect_in_place_function_argument(self, &mplace)?;
}
self.frame_mut().loc = Right(preamble_span);
if instance.def.requires_caller_location(*self.tcx) {
callee_args_abis.next().unwrap();
}
if !callee_args_abis.next().is_none() {
{
::core::panicking::panic_fmt(format_args!("mismatch between callee ABI and callee body arguments"));
}
};
if caller_args.next().is_some() {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("calling a function with more arguments than it expected"))
})));
}
self.push_stack_frame_done()
}
}
}#[instrument(skip(self), level = "trace")]342pub fn init_stack_frame(
343&mut self,
344 instance: Instance<'tcx>,
345 body: &'tcx mir::Body<'tcx>,
346 caller_fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
347 args: &[FnArg<'tcx, M::Provenance>],
348 with_caller_location: bool,
349 destination: &PlaceTy<'tcx, M::Provenance>,
350mut cont: ReturnContinuation,
351 ) -> InterpResult<'tcx> {
352let _trace = enter_trace_span!(M, step::init_stack_frame, %instance, tracing_separate_thread = Empty);
353354// The first order of business is to figure out the callee signature.
355 // However, that requires the list of variadic arguments.
356 // We use the *caller* information to determine where to split the list of arguments,
357 // and then later check that the callee indeed has the same number of fixed arguments.
358let extra_tys = if caller_fn_abi.c_variadic {
359let fixed_count = usize::try_from(caller_fn_abi.fixed_count).unwrap();
360let extra_tys = args[fixed_count..].iter().map(|arg| arg.layout().ty);
361self.tcx.mk_type_list_from_iter(extra_tys)
362 } else {
363 ty::List::empty()
364 };
365let callee_fn_abi = self.fn_abi_of_instance_no_deduced_attrs(instance, extra_tys)?;
366367if caller_fn_abi.conv != callee_fn_abi.conv {
368throw_ub_format!(
369"calling a function with calling convention \"{callee_conv}\" using calling convention \"{caller_conv}\"",
370 callee_conv = callee_fn_abi.conv,
371 caller_conv = caller_fn_abi.conv,
372 )
373 }
374375if caller_fn_abi.c_variadic != callee_fn_abi.c_variadic {
376throw_ub!(CVariadicMismatch {
377 caller_is_c_variadic: caller_fn_abi.c_variadic,
378 callee_is_c_variadic: callee_fn_abi.c_variadic,
379 });
380 }
381if caller_fn_abi.c_variadic && caller_fn_abi.fixed_count != callee_fn_abi.fixed_count {
382throw_ub!(CVariadicFixedCountMismatch {
383 caller: caller_fn_abi.fixed_count,
384 callee: callee_fn_abi.fixed_count,
385 });
386 }
387388// Check that all target features required by the callee (i.e., from
389 // the attribute `#[target_feature(enable = ...)]`) are enabled at
390 // compile time.
391M::check_fn_target_features(self, instance)?;
392393if !callee_fn_abi.can_unwind {
394// The callee cannot unwind, so force the `Unreachable` unwind handling.
395match &mut cont {
396 ReturnContinuation::Stop { .. } => {}
397 ReturnContinuation::Goto { unwind, .. } => {
398*unwind = mir::UnwindAction::Unreachable;
399 }
400 }
401 }
402403// *Before* pushing the new frame, determine whether the return destination is in memory.
404 // Need to use `place_to_op` to be *sure* we get the mplace if there is one.
405let destination_mplace = self.place_to_op(destination)?.as_mplace_or_imm().left();
406407// Push the "raw" frame -- this leaves locals uninitialized.
408self.push_stack_frame_raw(instance, body, destination, cont)?;
409let preamble_span = self.frame().loc.unwrap_right(); // the span used for preamble errors
410411trace!(
412"caller ABI: {:#?}, args: {:#?}",
413 caller_fn_abi,
414 args.iter()
415 .map(|arg| (
416 arg.layout().ty,
417match arg {
418 FnArg::Copy(op) => format!("copy({op:?})"),
419 FnArg::InPlace(mplace) => format!("in-place({mplace:?})"),
420 }
421 ))
422 .collect::<Vec<_>>()
423 );
424trace!(
425"spread_arg: {:?}, locals: {:#?}",
426 body.spread_arg,
427 body.args_iter()
428 .map(|local| (local, self.layout_of_local(self.frame(), local, None).unwrap().ty,))
429 .collect::<Vec<_>>()
430 );
431432// In principle, we have two iterators: Where the arguments come from, and where
433 // they go to.
434435 // The "where they come from" part is easy, we expect the caller to do any special handling
436 // that might be required here (e.g. for untupling).
437 // If `with_caller_location` is set we pretend there is an extra argument (that
438 // we will not pass; our `caller_location` intrinsic implementation walks the stack instead).
439assert_eq!(
440 args.len() + if with_caller_location { 1 } else { 0 },
441 caller_fn_abi.args.len(),
442"mismatch between caller ABI and caller arguments",
443 );
444let mut caller_args = args
445 .iter()
446 .zip(caller_fn_abi.args.iter())
447 .filter(|arg_and_abi| !arg_and_abi.1.is_ignore());
448449// Now we have to spread them out across the callee's locals,
450 // taking into account the `spread_arg`. If we could write
451 // this is a single iterator (that handles `spread_arg`), then
452 // `pass_argument` would be the loop body. It takes care to
453 // not advance `caller_iter` for ignored arguments.
454let mut callee_args_abis = callee_fn_abi.args.iter().enumerate();
455// Determine whether there is a special VaList argument. This is always the
456 // last argument, and since arguments start at index 1 that's `arg_count`.
457let va_list_arg = callee_fn_abi.c_variadic.then(|| mir::Local::from_usize(body.arg_count));
458for local in body.args_iter() {
459// Update the span that we show in case of an error to point to this argument.
460self.frame_mut().loc = Right(body.local_decls[local].source_info.span);
461// Construct the destination place for this argument. At this point all
462 // locals are still dead, so we cannot construct a `PlaceTy`.
463let dest = mir::Place::from(local);
464// `layout_of_local` does more than just the instantiation we need to get the
465 // type, but the result gets cached so this avoids calling the instantiation
466 // query *again* the next time this local is accessed.
467let ty = self.layout_of_local(self.frame(), local, None)?.ty;
468if Some(local) == va_list_arg {
469// This is the last callee-side argument of a variadic function.
470 // This argument is a VaList holding the remaining caller-side arguments.
471self.storage_live(local)?;
472473let place = self.eval_place(dest)?;
474let mplace = self.force_allocation(&place)?;
475476// Consume the remaining arguments by putting them into the variable argument
477 // list.
478let varargs = self.allocate_varargs(
479&mut caller_args,
480// "Ignored" arguments aren't actually passed, so the callee should also
481 // ignore them. (`pass_argument` does this for regular arguments.)
482(&mut callee_args_abis).filter(|(_, abi)| !abi.is_ignore()),
483 )?;
484// When the frame is dropped, these variable arguments are deallocated.
485self.frame_mut().va_list = varargs.clone();
486let key = self.va_list_ptr(varargs.into());
487488// Zero the VaList, so it is fully initialized.
489self.write_bytes_ptr(mplace.ptr(), (0..mplace.layout.size.bytes()).map(|_| 0u8))?;
490491// Store the "key" pointer in the right field.
492let key_mplace = self.va_list_key_field(&mplace)?;
493self.write_pointer(key, &key_mplace)?;
494 } else if Some(local) == body.spread_arg {
495// Make the local live once, then fill in the value field by field.
496self.storage_live(local)?;
497// Must be a tuple
498let ty::Tuple(fields) = ty.kind() else {
499span_bug!(self.cur_span(), "non-tuple type for `spread_arg`: {ty}")
500 };
501for (i, field_ty) in fields.iter().enumerate() {
502let dest = dest.project_deeper(
503&[mir::ProjectionElem::Field(FieldIdx::from_usize(i), field_ty)],
504*self.tcx,
505 );
506let (idx, callee_abi) = callee_args_abis.next().unwrap();
507self.pass_argument(
508&mut caller_args,
509 callee_abi,
510 idx,
511&dest,
512 field_ty,
513/* already_live */ true,
514 )?;
515 }
516 } else {
517// Normal argument. Cannot mark it as live yet, it might be unsized!
518let (idx, callee_abi) = callee_args_abis.next().unwrap();
519self.pass_argument(
520&mut caller_args,
521 callee_abi,
522 idx,
523&dest,
524 ty,
525/* already_live */ false,
526 )?;
527 }
528 }
529530// Don't forget to check the return type!
531self.frame_mut().loc = Right(body.local_decls[mir::RETURN_PLACE].source_info.span);
532if !self.check_argument_compat(&caller_fn_abi.ret, &callee_fn_abi.ret)? {
533throw_ub!(AbiMismatchReturn {
534 caller_ty: caller_fn_abi.ret.layout.ty,
535 callee_ty: callee_fn_abi.ret.layout.ty
536 });
537 }
538// Protect return place for in-place return value passing.
539 // We only need to protect anything if this is actually an in-memory place.
540if let Some(mplace) = destination_mplace {
541 M::protect_in_place_function_argument(self, &mplace)?;
542 }
543544// For the final checks, use same span as preamble since it is unclear what else to do.
545self.frame_mut().loc = Right(preamble_span);
546// If the callee needs a caller location, pretend we consume one more argument from the ABI.
547if instance.def.requires_caller_location(*self.tcx) {
548 callee_args_abis.next().unwrap();
549 }
550// Now we should have no more caller args or callee arg ABIs.
551assert!(
552 callee_args_abis.next().is_none(),
553"mismatch between callee ABI and callee body arguments"
554);
555if caller_args.next().is_some() {
556throw_ub_format!("calling a function with more arguments than it expected");
557 }
558559// Done!
560self.push_stack_frame_done()
561 }
562563/// Initiate a call to this function -- pushing the stack frame and initializing the arguments.
564 ///
565 /// `caller_fn_abi` is used to determine if all the arguments are passed the proper way.
566 /// However, we also need `caller_abi` to determine if we need to do untupling of arguments.
567 ///
568 /// `with_caller_location` indicates whether the caller passed a caller location. Miri
569 /// implements caller locations without argument passing, but to match `FnAbi` we need to know
570 /// when those arguments are present.
571pub(super) fn init_fn_call(
572&mut self,
573 fn_val: FnVal<'tcx, M::ExtraFnVal>,
574 (caller_abi, caller_fn_abi): (ExternAbi, &FnAbi<'tcx, Ty<'tcx>>),
575 args: &[FnArg<'tcx, M::Provenance>],
576 with_caller_location: bool,
577 destination: &PlaceTy<'tcx, M::Provenance>,
578 target: Option<mir::BasicBlock>,
579 unwind: mir::UnwindAction,
580 ) -> InterpResult<'tcx> {
581let _trace =
582<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(582u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["step",
"tracing_separate_thread", "fn_val"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"init_fn_call")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&fn_val) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, step::init_fn_call, tracing_separate_thread = Empty, ?fn_val)583 .or_if_tracing_disabled(|| {
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:583",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(583u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_call: {0:#?}",
fn_val) as &dyn Value))])
});
} else { ; }
}trace!("init_fn_call: {:#?}", fn_val));
584585let instance = match fn_val {
586 FnVal::Instance(instance) => instance,
587 FnVal::Other(extra) => {
588return M::call_extra_fn(
589self,
590extra,
591caller_fn_abi,
592args,
593destination,
594target,
595unwind,
596 );
597 }
598 };
599600match instance.def {
601 ty::InstanceKind::Intrinsic(def_id) => {
602if !self.tcx.intrinsic(def_id).is_some() {
::core::panicking::panic("assertion failed: self.tcx.intrinsic(def_id).is_some()")
};assert!(self.tcx.intrinsic(def_id).is_some());
603// FIXME: Should `InPlace` arguments be reset to uninit?
604if let Some(fallback) = M::call_intrinsic(
605self,
606instance,
607&Self::copy_fn_args(args),
608destination,
609target,
610unwind,
611 )? {
612if !!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden {
::core::panicking::panic("assertion failed: !self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden")
};assert!(!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden);
613{
match fallback.def {
ty::InstanceKind::Item(_) => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"ty::InstanceKind::Item(_)", ::core::option::Option::None);
}
}
};assert_matches!(fallback.def, ty::InstanceKind::Item(_));
614return self.init_fn_call(
615 FnVal::Instance(fallback),
616 (caller_abi, caller_fn_abi),
617args,
618with_caller_location,
619destination,
620target,
621unwind,
622 );
623 } else {
624interp_ok(())
625 }
626 }
627 ty::InstanceKind::VTableShim(..)
628 | ty::InstanceKind::ReifyShim(..)
629 | ty::InstanceKind::ClosureOnceShim { .. }
630 | ty::InstanceKind::ConstructCoroutineInClosureShim { .. }
631 | ty::InstanceKind::FnPtrShim(..)
632 | ty::InstanceKind::DropGlue(..)
633 | ty::InstanceKind::CloneShim(..)
634 | ty::InstanceKind::FnPtrAddrShim(..)
635 | ty::InstanceKind::ThreadLocalShim(..)
636 | ty::InstanceKind::AsyncDropGlueCtorShim(..)
637 | ty::InstanceKind::AsyncDropGlue(..)
638 | ty::InstanceKind::FutureDropPollShim(..)
639 | ty::InstanceKind::Item(_) => {
640// We need MIR for this fn.
641 // Note that this can be an intrinsic, if we are executing its fallback body.
642let Some((body, instance)) = M::find_mir_or_eval_fn(
643self,
644instance,
645caller_fn_abi,
646args,
647destination,
648target,
649unwind,
650 )?
651else {
652return interp_ok(());
653 };
654655// Special handling for the closure ABI: untuple the last argument.
656let args: Cow<'_, [FnArg<'tcx, M::Provenance>]> =
657if caller_abi == ExternAbi::RustCall && !args.is_empty() {
658// Untuple
659let (untuple_arg, args) = args.split_last().unwrap();
660let ty::Tuple(untuple_fields) = untuple_arg.layout().ty.kind() else {
661::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("untuple argument must be a tuple"))span_bug!(self.cur_span(), "untuple argument must be a tuple")662 };
663{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:663",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(663u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_call: Will pass last argument by untupling")
as &dyn Value))])
});
} else { ; }
};trace!("init_fn_call: Will pass last argument by untupling");
664Cow::from(
665args.iter()
666// The regular arguments.
667.map(|a| interp_ok(a.clone()))
668// The fields of the untupled argument.
669.chain((0..untuple_fields.len()).map(|i| {
670self.fn_arg_project_field(untuple_arg, FieldIdx::from_usize(i))
671 }))
672 .collect::<InterpResult<'_, Vec<_>>>()?,
673 )
674 } else {
675// Plain arg passing
676Cow::from(args)
677 };
678679self.init_stack_frame(
680instance,
681body,
682caller_fn_abi,
683&args,
684with_caller_location,
685destination,
686 ReturnContinuation::Goto { ret: target, unwind },
687 )
688 }
689// `InstanceKind::Virtual` does not have callable MIR. Calls to `Virtual` instances must be
690 // codegen'd / interpreted as virtual calls through the vtable.
691ty::InstanceKind::Virtual(def_id, idx) => {
692let mut args = args.to_vec();
693// We have to implement all "dyn-compatible receivers". So we have to go search for a
694 // pointer or `dyn Trait` type, but it could be wrapped in newtypes. So recursively
695 // unwrap those newtypes until we are there.
696 // An `InPlace` does nothing here, we keep the original receiver intact. We can't
697 // really pass the argument in-place anyway, and we are constructing a new
698 // `Immediate` receiver.
699let mut receiver = args[0].copy_fn_arg();
700let receiver_place = loop {
701match receiver.layout.ty.kind() {
702 ty::Ref(..) | ty::RawPtr(..) => {
703// We do *not* use `deref_pointer` here: we don't want to conceptually
704 // create a place that must be dereferenceable, since the receiver might
705 // be a raw pointer and (for `*const dyn Trait`) we don't need to
706 // actually access memory to resolve this method.
707 // Also see <https://github.com/rust-lang/miri/issues/2786>.
708let val = self.read_immediate(&receiver)?;
709break self.imm_ptr_to_mplace(&val)?;
710 }
711 ty::Dynamic(..) => break receiver.assert_mem_place(), // no immediate unsized values
712_ => {
713// Not there yet, search for the only non-ZST field.
714 // (The rules for `DispatchFromDyn` ensure there's exactly one such field.)
715let (idx, _) = receiver.layout.non_1zst_field(self).expect(
716"not exactly one non-1-ZST field in a `DispatchFromDyn` type",
717 );
718receiver = self.project_field(&receiver, idx)?;
719 }
720 }
721 };
722723// Obtain the underlying trait we are working on, and the adjusted receiver argument.
724 // Doesn't have to be a `dyn Trait`, but the unsized tail must be `dyn Trait`.
725 // (For that reason we also cannot use `unpack_dyn_trait`.)
726let receiver_tail =
727self.tcx.struct_tail_for_codegen(receiver_place.layout.ty, self.typing_env);
728let ty::Dynamic(receiver_trait, _) = receiver_tail.kind() else {
729::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("dynamic call on non-`dyn` type {0}", receiver_tail))span_bug!(self.cur_span(), "dynamic call on non-`dyn` type {}", receiver_tail)730 };
731if !receiver_place.layout.is_unsized() {
::core::panicking::panic("assertion failed: receiver_place.layout.is_unsized()")
};assert!(receiver_place.layout.is_unsized());
732733// Get the required information from the vtable.
734let vptr = receiver_place.meta().unwrap_meta().to_pointer(self)?;
735let dyn_ty = self.get_ptr_vtable_ty(vptr, Some(receiver_trait))?;
736let adjusted_recv = receiver_place.ptr();
737738// Now determine the actual method to call. Usually we use the easy way of just
739 // looking up the method at index `idx`.
740let vtable_entries = self.vtable_entries(receiver_trait.principal(), dyn_ty);
741let Some(ty::VtblEntry::Method(fn_inst)) = vtable_entries.get(idx).copied() else {
742// FIXME(fee1-dead) these could be variants of the UB info enum instead of this
743do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("`dyn` call trying to call something that is not a method"))
})));throw_ub_format!("`dyn` call trying to call something that is not a method");
744 };
745{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:745",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(745u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Virtual call dispatches to {0:#?}",
fn_inst) as &dyn Value))])
});
} else { ; }
};trace!("Virtual call dispatches to {fn_inst:#?}");
746// We can also do the lookup based on `def_id` and `dyn_ty`, and check that that
747 // produces the same result.
748self.assert_virtual_instance_matches_concrete(dyn_ty, def_id, instance, fn_inst);
749750// Adjust receiver argument. Layout can be any (thin) ptr.
751let receiver_ty = Ty::new_mut_ptr(self.tcx.tcx, dyn_ty);
752args[0] = FnArg::Copy(
753ImmTy::from_immediate(
754Scalar::from_maybe_pointer(adjusted_recv, self).into(),
755self.layout_of(receiver_ty)?,
756 )
757 .into(),
758 );
759{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:759",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(759u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Patched receiver operand to {0:#?}",
args[0]) as &dyn Value))])
});
} else { ; }
};trace!("Patched receiver operand to {:#?}", args[0]);
760// Need to also adjust the type in the ABI. Strangely, the layout there is actually
761 // already fine! Just the type is bogus. This is due to what `force_thin_self_ptr`
762 // does in `fn_abi_new_uncached`; supposedly, codegen relies on having the bogus
763 // type, so we just patch this up locally.
764let mut caller_fn_abi = caller_fn_abi.clone();
765caller_fn_abi.args[0].layout.ty = receiver_ty;
766767// recurse with concrete function
768self.init_fn_call(
769 FnVal::Instance(fn_inst),
770 (caller_abi, &caller_fn_abi),
771&args,
772with_caller_location,
773destination,
774target,
775unwind,
776 )
777 }
778 }
779 }
780781fn assert_virtual_instance_matches_concrete(
782&self,
783 dyn_ty: Ty<'tcx>,
784 def_id: DefId,
785 virtual_instance: ty::Instance<'tcx>,
786 concrete_instance: ty::Instance<'tcx>,
787 ) {
788let tcx = *self.tcx;
789790let trait_def_id = tcx.parent(def_id);
791let virtual_trait_ref = ty::TraitRef::from_assoc(tcx, trait_def_id, virtual_instance.args);
792let existential_trait_ref = ty::ExistentialTraitRef::erase_self_ty(tcx, virtual_trait_ref);
793let concrete_trait_ref = existential_trait_ref.with_self_ty(tcx, dyn_ty);
794795let concrete_method = {
796let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(796u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["resolve", "def_id"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"expect_resolve_for_vtable")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&def_id) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::expect_resolve_for_vtable, ?def_id);
797Instance::expect_resolve_for_vtable(
798tcx,
799self.typing_env,
800def_id,
801virtual_instance.args.rebase_onto(tcx, trait_def_id, concrete_trait_ref.args),
802self.cur_span(),
803 )
804 };
805match (&concrete_instance, &concrete_method) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(concrete_instance, concrete_method);
806 }
807808/// Initiate a tail call to this function -- popping the current stack frame, pushing the new
809 /// stack frame and initializing the arguments.
810pub(super) fn init_fn_tail_call(
811&mut self,
812 fn_val: FnVal<'tcx, M::ExtraFnVal>,
813 (caller_abi, caller_fn_abi): (ExternAbi, &FnAbi<'tcx, Ty<'tcx>>),
814 args: &[FnArg<'tcx, M::Provenance>],
815 with_caller_location: bool,
816 ) -> InterpResult<'tcx> {
817{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:817",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(817u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_fn_tail_call: {0:#?}",
fn_val) as &dyn Value))])
});
} else { ; }
};trace!("init_fn_tail_call: {:#?}", fn_val);
818// This is the "canonical" implementation of tails calls,
819 // a pop of the current stack frame, followed by a normal call
820 // which pushes a new stack frame, with the return address from
821 // the popped stack frame.
822 //
823 // Note that we cannot use `return_from_current_stack_frame`,
824 // as that "executes" the goto to the return block, but we don't want to,
825 // only the tail called function should return to the current return block.
826827 // The arguments need to all be copied since the current stack frame will be removed
828 // before the callee even starts executing.
829 // FIXME(explicit_tail_calls,#144855): does this match what codegen does?
830let args = args.iter().map(|fn_arg| FnArg::Copy(fn_arg.copy_fn_arg())).collect::<Vec<_>>();
831// Remove the frame from the stack.
832let frame = self.pop_stack_frame_raw()?;
833// Remember where this frame would have returned to.
834let ReturnContinuation::Goto { ret, unwind } = frame.return_cont() else {
835::rustc_middle::util::bug::bug_fmt(format_args!("can\'t tailcall as root of the stack"));bug!("can't tailcall as root of the stack");
836 };
837// There's no return value to deal with! Instead, we forward the old return place
838 // to the new function.
839 // FIXME(explicit_tail_calls):
840 // we should check if both caller&callee can/n't unwind,
841 // see <https://github.com/rust-lang/rust/pull/113128#issuecomment-1614979803>
842843 // Now push the new stack frame.
844self.init_fn_call(
845fn_val,
846 (caller_abi, caller_fn_abi),
847&*args,
848with_caller_location,
849frame.return_place(),
850ret,
851unwind,
852 )?;
853854// Finally, clear the local variables. Has to be done after pushing to support
855 // non-scalar arguments.
856 // FIXME(explicit_tail_calls,#144855): revisit this once codegen supports indirect
857 // arguments, to ensure the semantics are compatible.
858let return_action = self.cleanup_stack_frame(/* unwinding */ false, frame)?;
859match (&return_action, &ReturnAction::Normal) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(return_action, ReturnAction::Normal);
860861interp_ok(())
862 }
863864pub(super) fn init_drop_in_place_call(
865&mut self,
866 place: &PlaceTy<'tcx, M::Provenance>,
867 instance: ty::Instance<'tcx>,
868 target: mir::BasicBlock,
869 unwind: mir::UnwindAction,
870 ) -> InterpResult<'tcx> {
871{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:871",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(871u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("init_drop_in_place_call: {0:?},\n instance={1:?}",
place, instance) as &dyn Value))])
});
} else { ; }
};trace!("init_drop_in_place_call: {:?},\n instance={:?}", place, instance);
872// We take the address of the object. This may well be unaligned, which is fine
873 // for us here. However, unaligned accesses will probably make the actual drop
874 // implementation fail -- a problem shared by rustc.
875let place = self.force_allocation(place)?;
876877// We behave a bit different from codegen here.
878 // Codegen creates an `InstanceKind::Virtual` with index 0 (the slot of the drop method) and
879 // then dispatches that to the normal call machinery. However, our call machinery currently
880 // only supports calling `VtblEntry::Method`; it would choke on a `MetadataDropInPlace`. So
881 // instead we do the virtual call stuff ourselves. It's easier here than in `eval_fn_call`
882 // since we can just get a place of the underlying type and use `mplace_to_imm_ptr`.
883let place = match place.layout.ty.kind() {
884 ty::Dynamic(data, _) => {
885// Dropping a trait object. Need to find actual drop fn.
886self.unpack_dyn_trait(&place, data)?
887}
888_ => {
889if true {
match (&instance,
&ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty))
{
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_eq!(
890 instance,
891 ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
892 );
893place894 }
895 };
896let instance = {
897let _trace =
898<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("resolve",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(898u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["resolve", "ty"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"resolve_drop_in_place")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&place.layout.ty)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(M, resolve::resolve_drop_in_place, ty = ?place.layout.ty);
899 ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
900 };
901let fn_abi = self.fn_abi_of_instance_no_deduced_attrs(instance, ty::List::empty())?;
902903let arg = self.mplace_to_imm_ptr(&place, None)?;
904let ret = MPlaceTy::fake_alloc_zst(self.layout_of(self.tcx.types.unit)?);
905906self.init_fn_call(
907 FnVal::Instance(instance),
908 (ExternAbi::Rust, fn_abi),
909&[FnArg::Copy(arg.into())],
910false,
911&ret.into(),
912Some(target),
913unwind,
914 )
915 }
916917/// Pops the current frame from the stack, copies the return value to the caller, deallocates
918 /// the memory for allocated locals, and jumps to an appropriate place.
919 ///
920 /// If `unwinding` is `false`, then we are performing a normal return
921 /// from a function. In this case, we jump back into the frame of the caller,
922 /// and continue execution as normal.
923 ///
924 /// If `unwinding` is `true`, then we are in the middle of a panic,
925 /// and need to unwind this frame. In this case, we jump to the
926 /// `cleanup` block for the function, which is responsible for running
927 /// `Drop` impls for any locals that have been initialized at this point.
928 /// The cleanup block ends with a special `Resume` terminator, which will
929 /// cause us to continue unwinding.
930#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("return_from_current_stack_frame",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(930u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["unwinding"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&unwinding as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:935",
"rustc_const_eval::interpret::call", ::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(935u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("popping stack frame ({0})",
if unwinding {
"during unwinding"
} else { "returning from function" }) as &dyn Value))])
});
} else { ; }
};
match (&unwinding,
&match self.frame().loc {
Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
Right(_) => true,
}) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
if unwinding && self.frame_idx() == 0 {
do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Ub(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("unwinding past the topmost frame of the stack"))
})));
}
let return_op =
self.local_to_op(mir::RETURN_PLACE,
None).expect("return place should always be live");
let frame = self.pop_stack_frame_raw()?;
if !unwinding {
self.copy_op_allow_transmute(&return_op,
frame.return_place())?;
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/call.rs:961",
"rustc_const_eval::interpret::call",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/call.rs"),
::tracing_core::__macro_support::Option::Some(961u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::call"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("return value: {0:?}",
self.dump_place(frame.return_place())) as &dyn Value))])
});
} else { ; }
};
}
let return_cont = frame.return_cont();
let return_action = self.cleanup_stack_frame(unwinding, frame)?;
match return_action {
ReturnAction::Normal => {}
ReturnAction::NoJump => { return interp_ok(()); }
ReturnAction::NoCleanup => {
if !self.stack().is_empty() {
{
::core::panicking::panic_fmt(format_args!("only the topmost frame should ever be leaked"));
}
};
if !!unwinding {
{
::core::panicking::panic_fmt(format_args!("tried to skip cleanup during unwinding"));
}
};
return interp_ok(());
}
}
if unwinding {
match return_cont {
ReturnContinuation::Goto { unwind, .. } => {
self.unwind_to_block(unwind)
}
ReturnContinuation::Stop { .. } => {
{
::core::panicking::panic_fmt(format_args!("encountered ReturnContinuation::Stop when unwinding!"));
}
}
}
} else {
match return_cont {
ReturnContinuation::Goto { ret, .. } =>
self.return_to_block(ret),
ReturnContinuation::Stop { .. } => {
if !self.stack().is_empty() {
{
::core::panicking::panic_fmt(format_args!("only the bottommost frame can have ReturnContinuation::Stop"));
}
};
interp_ok(())
}
}
}
}
}
}#[instrument(skip(self), level = "trace")]931pub(super) fn return_from_current_stack_frame(
932&mut self,
933 unwinding: bool,
934 ) -> InterpResult<'tcx> {
935info!(
936"popping stack frame ({})",
937if unwinding { "during unwinding" } else { "returning from function" }
938 );
939940// Check `unwinding`.
941assert_eq!(
942 unwinding,
943match self.frame().loc {
944 Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
945 Right(_) => true,
946 }
947 );
948if unwinding && self.frame_idx() == 0 {
949throw_ub_format!("unwinding past the topmost frame of the stack");
950 }
951952// Get out the return value. Must happen *before* the frame is popped as we have to get the
953 // local's value out.
954let return_op =
955self.local_to_op(mir::RETURN_PLACE, None).expect("return place should always be live");
956// Remove the frame from the stack.
957let frame = self.pop_stack_frame_raw()?;
958// Copy the return value and remember the return continuation.
959if !unwinding {
960self.copy_op_allow_transmute(&return_op, frame.return_place())?;
961trace!("return value: {:?}", self.dump_place(frame.return_place()));
962 }
963let return_cont = frame.return_cont();
964// Finish popping the stack frame.
965let return_action = self.cleanup_stack_frame(unwinding, frame)?;
966// Jump to the next block.
967match return_action {
968 ReturnAction::Normal => {}
969 ReturnAction::NoJump => {
970// The hook already did everything.
971return interp_ok(());
972 }
973 ReturnAction::NoCleanup => {
974// If we are not doing cleanup, also skip everything else.
975assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked");
976assert!(!unwinding, "tried to skip cleanup during unwinding");
977// Don't jump anywhere.
978return interp_ok(());
979 }
980 }
981982// Normal return, figure out where to jump.
983if unwinding {
984// Follow the unwind edge.
985match return_cont {
986 ReturnContinuation::Goto { unwind, .. } => {
987// This must be the very last thing that happens, since it can in fact push a new stack frame.
988self.unwind_to_block(unwind)
989 }
990 ReturnContinuation::Stop { .. } => {
991panic!("encountered ReturnContinuation::Stop when unwinding!")
992 }
993 }
994 } else {
995// Follow the normal return edge.
996match return_cont {
997 ReturnContinuation::Goto { ret, .. } => self.return_to_block(ret),
998 ReturnContinuation::Stop { .. } => {
999assert!(
1000self.stack().is_empty(),
1001"only the bottommost frame can have ReturnContinuation::Stop"
1002);
1003 interp_ok(())
1004 }
1005 }
1006 }
1007 }
1008}