1//! Manages the low-level pushing and popping of stack frames and the (de)allocation of local variables.
2//! For handling of argument passing and return values, see the `call` module.
3use std::cell::Cell;
4use std::{fmt, mem};
56use either::{Either, Left, Right};
7use rustc_hiras hir;
8use rustc_hir::definitions::DefPathData;
9use rustc_index::IndexVec;
10use rustc_middle::ty::layout::TyAndLayout;
11use rustc_middle::ty::{self, Ty, TyCtxt};
12use rustc_middle::{bug, mir};
13use rustc_mir_dataflow::impls::always_storage_live_locals;
14use rustc_span::Span;
15use rustc_target::callconv::ArgAbi;
16use tracing::field::Empty;
17use tracing::{info_span, instrument, trace};
1819use super::{
20AllocId, CtfeProvenance, FnArg, Immediate, InterpCx, InterpResult, MPlaceTy, Machine, MemPlace,
21MemPlaceMeta, MemoryKind, Operand, PlaceTy, Pointer, Provenance, ReturnAction, Scalar,
22from_known_layout, interp_ok, throw_ub, throw_unsup,
23};
24use crate::{enter_trace_span, errors};
2526// The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread
27// boundary and dropped in the other thread, it would exit the span in the other thread.
28struct SpanGuard(tracing::Span, std::marker::PhantomData<*const u8>);
2930impl SpanGuard {
31/// By default a `SpanGuard` does nothing.
32fn new() -> Self {
33Self(tracing::Span::none(), std::marker::PhantomData)
34 }
3536/// If a span is entered, we exit the previous span (if any, normally none) and enter the
37 /// new span. This is mainly so we don't have to use `Option` for the `tracing_span` field of
38 /// `Frame` by creating a dummy span to being with and then entering it once the frame has
39 /// been pushed.
40fn enter(&mut self, span: tracing::Span) {
41// This executes the destructor on the previous instance of `SpanGuard`, ensuring that
42 // we never enter or exit more spans than vice versa. Unless you `mem::leak`, then we
43 // can't protect the tracing stack, but that'll just lead to weird logging, no actual
44 // problems.
45*self = Self(span, std::marker::PhantomData);
46self.0.with_subscriber(|(id, dispatch)| {
47dispatch.enter(id);
48 });
49 }
50}
5152impl Dropfor SpanGuard {
53fn drop(&mut self) {
54self.0.with_subscriber(|(id, dispatch)| {
55dispatch.exit(id);
56 });
57 }
58}
5960/// A stack frame.
61pub struct Frame<'tcx, Prov: Provenance = CtfeProvenance, Extra = ()> {
62////////////////////////////////////////////////////////////////////////////////
63 // Function and callsite information
64 ////////////////////////////////////////////////////////////////////////////////
65/// The MIR for the function called on this frame.
66pub(super) body: &'tcx mir::Body<'tcx>,
6768/// The def_id and args of the current function.
69pub(super) instance: ty::Instance<'tcx>,
7071/// Extra data for the machine.
72pub extra: Extra,
7374////////////////////////////////////////////////////////////////////////////////
75 // Return place and locals
76 ////////////////////////////////////////////////////////////////////////////////
77/// Where to continue when returning from this function.
78return_cont: ReturnContinuation,
7980/// The location where the result of the current stack frame should be written to,
81 /// and its layout in the caller. This place is to be interpreted relative to the
82 /// *caller's* stack frame. We use a `PlaceTy` instead of an `MPlaceTy` since this
83 /// avoids having to move *all* return places into Miri's memory.
84return_place: PlaceTy<'tcx, Prov>,
8586/// The list of locals for this stack frame, stored in order as
87 /// `[return_ptr, arguments..., variables..., temporaries...]`.
88 /// The locals are stored as `Option<Value>`s.
89 /// `None` represents a local that is currently dead, while a live local
90 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
91 ///
92 /// Do *not* access this directly; always go through the machine hook!
93pub locals: IndexVec<mir::Local, LocalState<'tcx, Prov>>,
9495/// The complete variable argument list of this frame. Its elements must be dropped when the
96 /// frame is popped.
97pub(super) va_list: Vec<MPlaceTy<'tcx, Prov>>,
9899/// The span of the `tracing` crate is stored here.
100 /// When the guard is dropped, the span is exited. This gives us
101 /// a full stack trace on all tracing statements.
102tracing_span: SpanGuard,
103104////////////////////////////////////////////////////////////////////////////////
105 // Current position within the function
106 ////////////////////////////////////////////////////////////////////////////////
107/// If this is `Right`, we are not currently executing any particular statement in
108 /// this frame (can happen e.g. during frame initialization, and during unwinding on
109 /// frames without cleanup code).
110 ///
111 /// Needs to be public because ConstProp does unspeakable things to it.
112pub(super) loc: Either<mir::Location, Span>,
113}
114115/// Where and how to continue when returning/unwinding from the current function.
116#[derive(#[automatically_derived]
impl ::core::clone::Clone for ReturnContinuation {
#[inline]
fn clone(&self) -> ReturnContinuation {
let _: ::core::clone::AssertParamIsClone<Option<mir::BasicBlock>>;
let _: ::core::clone::AssertParamIsClone<mir::UnwindAction>;
let _: ::core::clone::AssertParamIsClone<bool>;
*self
}
}Clone, #[automatically_derived]
impl ::core::marker::Copy for ReturnContinuation { }Copy, #[automatically_derived]
impl ::core::cmp::Eq for ReturnContinuation {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Option<mir::BasicBlock>>;
let _: ::core::cmp::AssertParamIsEq<mir::UnwindAction>;
let _: ::core::cmp::AssertParamIsEq<bool>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for ReturnContinuation {
#[inline]
fn eq(&self, other: &ReturnContinuation) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(ReturnContinuation::Goto { ret: __self_0, unwind: __self_1 },
ReturnContinuation::Goto { ret: __arg1_0, unwind: __arg1_1
}) => __self_0 == __arg1_0 && __self_1 == __arg1_1,
(ReturnContinuation::Stop { cleanup: __self_0 },
ReturnContinuation::Stop { cleanup: __arg1_0 }) =>
__self_0 == __arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for ReturnContinuation {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
ReturnContinuation::Goto { ret: __self_0, unwind: __self_1 } =>
::core::fmt::Formatter::debug_struct_field2_finish(f, "Goto",
"ret", __self_0, "unwind", &__self_1),
ReturnContinuation::Stop { cleanup: __self_0 } =>
::core::fmt::Formatter::debug_struct_field1_finish(f, "Stop",
"cleanup", &__self_0),
}
}
}Debug)] // Miri debug-prints these
117pub enum ReturnContinuation {
118/// Jump to the next block in the caller, or cause UB if None (that's a function
119 /// that may never return).
120 /// `ret` stores the block we jump to on a normal return, while `unwind`
121 /// stores the block used for cleanup during unwinding.
122Goto { ret: Option<mir::BasicBlock>, unwind: mir::UnwindAction },
123/// The root frame of the stack: nowhere else to jump to, so we stop.
124 /// `cleanup` says whether locals are deallocated. Static computation
125 /// wants them leaked to intern what they need (and just throw away
126 /// the entire `ecx` when it is done).
127Stop { cleanup: bool },
128}
129130/// State of a local variable including a memoized layout
131#[derive(#[automatically_derived]
impl<'tcx, Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
LocalState<'tcx, Prov> {
#[inline]
fn clone(&self) -> LocalState<'tcx, Prov> {
LocalState {
value: ::core::clone::Clone::clone(&self.value),
layout: ::core::clone::Clone::clone(&self.layout),
}
}
}Clone)]
132pub struct LocalState<'tcx, Prov: Provenance = CtfeProvenance> {
133 value: LocalValue<Prov>,
134/// Don't modify if `Some`, this is only used to prevent computing the layout twice.
135 /// Avoids computing the layout of locals that are never actually initialized.
136layout: Cell<Option<TyAndLayout<'tcx>>>,
137}
138139impl<Prov: Provenance> std::fmt::Debugfor LocalState<'_, Prov> {
140fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
141f.debug_struct("LocalState")
142 .field("value", &self.value)
143 .field("ty", &self.layout.get().map(|l| l.ty))
144 .finish()
145 }
146}
147148/// Current value of a local variable
149///
150/// This does not store the type of the local; the type is given by `body.local_decls` and can never
151/// change, so by not storing here we avoid having to maintain that as an invariant.
152#[derive(#[automatically_derived]
impl<Prov: ::core::marker::Copy + Provenance> ::core::marker::Copy for
LocalValue<Prov> {
}Copy, #[automatically_derived]
impl<Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
LocalValue<Prov> {
#[inline]
fn clone(&self) -> LocalValue<Prov> {
match self {
LocalValue::Dead => LocalValue::Dead,
LocalValue::Live(__self_0) =>
LocalValue::Live(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl<Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for
LocalValue<Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
LocalValue::Dead => ::core::fmt::Formatter::write_str(f, "Dead"),
LocalValue::Live(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Live",
&__self_0),
}
}
}Debug)] // Miri debug-prints these
153pub(super) enum LocalValue<Prov: Provenance = CtfeProvenance> {
154/// This local is not currently alive, and cannot be used at all.
155Dead,
156/// A normal, live local.
157 /// Mostly for convenience, we re-use the `Operand` type here.
158 /// This is an optimization over just always having a pointer here;
159 /// we can thus avoid doing an allocation when the local just stores
160 /// immediate values *and* never has its address taken.
161Live(Operand<Prov>),
162}
163164impl<'tcx, Prov: Provenance> LocalState<'tcx, Prov> {
165pub fn make_live_uninit(&mut self) {
166self.value = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
167 }
168169/// This is a hack because Miri needs a way to visit all the provenance in a `LocalState`
170 /// without having a layout or `TyCtxt` available, and we want to keep the `Operand` type
171 /// private.
172pub fn as_mplace_or_imm(
173&self,
174 ) -> Option<Either<(Pointer<Option<Prov>>, MemPlaceMeta<Prov>), Immediate<Prov>>> {
175match self.value {
176 LocalValue::Dead => None,
177 LocalValue::Live(Operand::Indirect(mplace)) => Some(Left((mplace.ptr, mplace.meta))),
178 LocalValue::Live(Operand::Immediate(imm)) => Some(Right(imm)),
179 }
180 }
181182/// Read the local's value or error if the local is not yet live or not live anymore.
183#[inline(always)]
184pub(super) fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> {
185match &self.value {
186 LocalValue::Dead => do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::DeadLocal)throw_ub!(DeadLocal), // could even be "invalid program"?
187LocalValue::Live(val) => interp_ok(val),
188 }
189 }
190191/// Overwrite the local. If the local can be overwritten in place, return a reference
192 /// to do so; otherwise return the `MemPlace` to consult instead.
193#[inline(always)]
194pub(super) fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Prov>> {
195match &mut self.value {
196 LocalValue::Dead => do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::DeadLocal)throw_ub!(DeadLocal), // could even be "invalid program"?
197LocalValue::Live(val) => interp_ok(val),
198 }
199 }
200}
201202/// What we store about a frame in an interpreter backtrace.
203#[derive(#[automatically_derived]
impl<'tcx> ::core::clone::Clone for FrameInfo<'tcx> {
#[inline]
fn clone(&self) -> FrameInfo<'tcx> {
FrameInfo {
instance: ::core::clone::Clone::clone(&self.instance),
span: ::core::clone::Clone::clone(&self.span),
}
}
}Clone, #[automatically_derived]
impl<'tcx> ::core::fmt::Debug for FrameInfo<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f, "FrameInfo",
"instance", &self.instance, "span", &&self.span)
}
}Debug)]
204pub struct FrameInfo<'tcx> {
205pub instance: ty::Instance<'tcx>,
206pub span: Span,
207}
208209// FIXME: only used by miri, should be removed once translatable.
210impl<'tcx> fmt::Displayfor FrameInfo<'tcx> {
211fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
212 ty::tls::with(|tcx| {
213if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure {
214f.write_fmt(format_args!("inside closure"))write!(f, "inside closure")215 } else {
216// Note: this triggers a `must_produce_diag` state, which means that if we ever
217 // get here we must emit a diagnostic. We should never display a `FrameInfo` unless
218 // we actually want to emit a warning or error to the user.
219f.write_fmt(format_args!("inside `{0}`", self.instance))write!(f, "inside `{}`", self.instance)220 }
221 })
222 }
223}
224225impl<'tcx> FrameInfo<'tcx> {
226pub fn as_note(&self, tcx: TyCtxt<'tcx>) -> errors::FrameNote {
227let span = self.span;
228if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure {
229 errors::FrameNote {
230 where_: "closure",
231span,
232 instance: String::new(),
233 times: 0,
234 has_label: false,
235 }
236 } else {
237let instance = ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}", self.instance))
})format!("{}", self.instance);
238// Note: this triggers a `must_produce_diag` state, which means that if we ever get
239 // here we must emit a diagnostic. We should never display a `FrameInfo` unless we
240 // actually want to emit a warning or error to the user.
241errors::FrameNote { where_: "instance", span, instance, times: 0, has_label: false }
242 }
243 }
244}
245246impl<'tcx, Prov: Provenance> Frame<'tcx, Prov> {
247pub fn with_extra<Extra>(self, extra: Extra) -> Frame<'tcx, Prov, Extra> {
248Frame {
249 body: self.body,
250 instance: self.instance,
251 return_cont: self.return_cont,
252 return_place: self.return_place,
253 locals: self.locals,
254 va_list: self.va_list,
255 loc: self.loc,
256extra,
257 tracing_span: self.tracing_span,
258 }
259 }
260}
261262impl<'tcx, Prov: Provenance, Extra> Frame<'tcx, Prov, Extra> {
263/// Get the current location within the Frame.
264 ///
265 /// If this is `Right`, we are not currently executing any particular statement in
266 /// this frame (can happen e.g. during frame initialization, and during unwinding on
267 /// frames without cleanup code).
268 ///
269 /// Used by [priroda](https://github.com/oli-obk/priroda).
270pub fn current_loc(&self) -> Either<mir::Location, Span> {
271self.loc
272 }
273274pub fn body(&self) -> &'tcx mir::Body<'tcx> {
275self.body
276 }
277278pub fn instance(&self) -> ty::Instance<'tcx> {
279self.instance
280 }
281282pub fn return_place(&self) -> &PlaceTy<'tcx, Prov> {
283&self.return_place
284 }
285286pub fn return_cont(&self) -> ReturnContinuation {
287self.return_cont
288 }
289290/// Return the `SourceInfo` of the current instruction.
291pub fn current_source_info(&self) -> Option<&mir::SourceInfo> {
292self.loc.left().map(|loc| self.body.source_info(loc))
293 }
294295pub fn current_span(&self) -> Span {
296match self.loc {
297Left(loc) => self.body.source_info(loc).span,
298Right(span) => span,
299 }
300 }
301302pub fn lint_root(&self, tcx: TyCtxt<'tcx>) -> Option<hir::HirId> {
303// We first try to get a HirId via the current source scope,
304 // and fall back to `body.source`.
305self.current_source_info()
306 .and_then(|source_info| match &self.body.source_scopes[source_info.scope].local_data {
307 mir::ClearCrossCrate::Set(data) => Some(data.lint_root),
308 mir::ClearCrossCrate::Clear => None,
309 })
310 .or_else(|| {
311let def_id = self.body.source.def_id().as_local();
312def_id.map(|def_id| tcx.local_def_id_to_hir_id(def_id))
313 })
314 }
315316/// Returns the address of the buffer where the locals are stored. This is used by `Place` as a
317 /// sanity check to detect bugs where we mix up which stack frame a place refers to.
318#[inline(always)]
319pub(super) fn locals_addr(&self) -> usize {
320self.locals.raw.as_ptr().addr()
321 }
322323#[must_use]
324pub fn generate_stacktrace_from_stack(
325 stack: &[Self],
326 tcx: TyCtxt<'tcx>,
327 ) -> Vec<FrameInfo<'tcx>> {
328let mut frames = Vec::new();
329// This deliberately does *not* honor `requires_caller_location` since it is used for much
330 // more than just panics.
331for frame in stack.iter().rev() {
332let mut span = match frame.loc {
333 Left(loc) => {
334// If the stacktrace passes through MIR-inlined source scopes, add them.
335let mir::SourceInfo { mut span, scope } = *frame.body.source_info(loc);
336let mut scope_data = &frame.body.source_scopes[scope];
337while let Some((instance, call_span)) = scope_data.inlined {
338 frames.push(FrameInfo { span, instance });
339 span = call_span;
340 scope_data = &frame.body.source_scopes[scope_data.parent_scope.unwrap()];
341 }
342 span
343 }
344 Right(span) => span,
345 };
346if span.is_dummy() {
347// Some statements lack a proper span; point at the function instead.
348span = tcx.def_span(frame.instance.def_id());
349 }
350 frames.push(FrameInfo { span, instance: frame.instance });
351 }
352{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/stack.rs:352",
"rustc_const_eval::interpret::stack",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/stack.rs"),
::tracing_core::__macro_support::Option::Some(352u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::stack"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("generate stacktrace: {0:#?}",
frames) as &dyn Value))])
});
} else { ; }
};trace!("generate stacktrace: {:#?}", frames);
353frames354 }
355}
356357impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
358/// Very low-level helper that pushes a stack frame without initializing
359 /// the arguments or local variables.
360 ///
361 /// The high-level version of this is `init_stack_frame`.
362#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("push_stack_frame_raw",
"rustc_const_eval::interpret::stack",
::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/stack.rs"),
::tracing_core::__macro_support::Option::Some(362u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::stack"),
::tracing_core::field::FieldSet::new(&["instance"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::DEBUG <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&instance)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/stack.rs:370",
"rustc_const_eval::interpret::stack",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/stack.rs"),
::tracing_core::__macro_support::Option::Some(370u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::stack"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("body: {0:#?}",
body) as &dyn Value))])
});
} else { ; }
};
if true {
match (&self.stack().is_empty(),
&#[allow(non_exhaustive_omitted_patterns)] match return_cont
{
ReturnContinuation::Stop { .. } => true,
_ => false,
}) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};
let dead_local =
LocalState {
value: LocalValue::Dead,
layout: Cell::new(None),
};
let locals = IndexVec::from_elem(dead_local, &body.local_decls);
let pre_frame =
Frame {
body,
loc: Right(body.span),
return_cont,
return_place: return_place.clone(),
locals,
va_list: ::alloc::vec::Vec::new(),
instance,
tracing_span: SpanGuard::new(),
extra: (),
};
let frame = M::init_frame(self, pre_frame)?;
self.stack_mut().push(frame);
for &const_ in body.required_consts() {
let _trace =
<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("const_eval",
"rustc_const_eval::interpret::stack",
::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/stack.rs"),
::tracing_core::__macro_support::Option::Some(400u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::stack"),
::tracing_core::field::FieldSet::new(&["const_eval",
"const_.const_"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"required_consts")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&const_.const_)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
});
let c =
self.instantiate_from_current_frame_and_normalize_erasing_regions(const_.const_)?;
c.eval(*self.tcx, self.typing_env,
const_.span).map_err(|err|
{ err.emit_note(*self.tcx); err })?;
}
M::after_stack_push(self)?;
self.frame_mut().loc = Left(mir::Location::START);
let span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("frame",
"rustc_const_eval::interpret::stack",
::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/stack.rs"),
::tracing_core::__macro_support::Option::Some(416u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::stack"),
::tracing_core::field::FieldSet::new(&["tracing_separate_thread",
"frame"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&instance)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
self.frame_mut().tracing_span.enter(span);
interp_ok(())
}
}
}#[instrument(skip(self, body, return_place, return_cont), level = "debug")]363pub(crate) fn push_stack_frame_raw(
364&mut self,
365 instance: ty::Instance<'tcx>,
366 body: &'tcx mir::Body<'tcx>,
367 return_place: &PlaceTy<'tcx, M::Provenance>,
368 return_cont: ReturnContinuation,
369 ) -> InterpResult<'tcx> {
370trace!("body: {:#?}", body);
371372// We can push a `Root` frame if and only if the stack is empty.
373debug_assert_eq!(
374self.stack().is_empty(),
375matches!(return_cont, ReturnContinuation::Stop { .. })
376 );
377378// First push a stack frame so we have access to `instantiate_from_current_frame` and other
379 // `self.frame()`-based functions.
380let dead_local = LocalState { value: LocalValue::Dead, layout: Cell::new(None) };
381let locals = IndexVec::from_elem(dead_local, &body.local_decls);
382let pre_frame = Frame {
383 body,
384 loc: Right(body.span), // Span used for errors caused during preamble.
385return_cont,
386 return_place: return_place.clone(),
387 locals,
388 va_list: vec![],
389 instance,
390 tracing_span: SpanGuard::new(),
391 extra: (),
392 };
393let frame = M::init_frame(self, pre_frame)?;
394self.stack_mut().push(frame);
395396// Make sure all the constants required by this frame evaluate successfully (post-monomorphization check).
397for &const_ in body.required_consts() {
398// We can't use `eval_mir_constant` here as that assumes that all required consts have
399 // already been checked, so we need a separate tracing call.
400let _trace = enter_trace_span!(M, const_eval::required_consts, ?const_.const_);
401let c =
402self.instantiate_from_current_frame_and_normalize_erasing_regions(const_.const_)?;
403 c.eval(*self.tcx, self.typing_env, const_.span).map_err(|err| {
404 err.emit_note(*self.tcx);
405 err
406 })?;
407 }
408409// Finish things up.
410M::after_stack_push(self)?;
411self.frame_mut().loc = Left(mir::Location::START);
412// `tracing_separate_thread` is used to instruct the tracing_chrome [tracing::Layer] in Miri
413 // to put the "frame" span on a separate trace thread/line than other spans, to make the
414 // visualization in <https://ui.perfetto.dev> easier to interpret. It is set to a value of
415 // [tracing::field::Empty] so that other tracing layers (e.g. the logger) will ignore it.
416let span = info_span!("frame", tracing_separate_thread = Empty, frame = %instance);
417self.frame_mut().tracing_span.enter(span);
418419 interp_ok(())
420 }
421422/// Low-level helper that pops a stack frame from the stack without any cleanup.
423 /// This invokes `before_stack_pop`.
424 /// After calling this function, you need to deal with the return value, and then
425 /// invoke `cleanup_stack_frame`.
426pub(super) fn pop_stack_frame_raw(
427&mut self,
428 ) -> InterpResult<'tcx, Frame<'tcx, M::Provenance, M::FrameExtra>> {
429 M::before_stack_pop(self)?;
430let frame =
431self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
432interp_ok(frame)
433 }
434435/// Deallocate local variables in the stack frame, and invoke `after_stack_pop`.
436pub(super) fn cleanup_stack_frame(
437&mut self,
438 unwinding: bool,
439 frame: Frame<'tcx, M::Provenance, M::FrameExtra>,
440 ) -> InterpResult<'tcx, ReturnAction> {
441let return_cont = frame.return_cont;
442443// Cleanup: deallocate locals.
444 // Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
445 // We do this while the frame is still on the stack, so errors point to the callee.
446let cleanup = match return_cont {
447 ReturnContinuation::Goto { .. } => true,
448 ReturnContinuation::Stop { cleanup, .. } => cleanup,
449 };
450451if cleanup {
452for local in &frame.locals {
453self.deallocate_local(local.value)?;
454 }
455456// Deallocate any c-variadic arguments.
457self.deallocate_varargs(&frame.va_list)?;
458459// Call the machine hook, which determines the next steps.
460let return_action = M::after_stack_pop(self, frame, unwinding)?;
461match (&return_action, &ReturnAction::NoCleanup) {
(left_val, right_val) => {
if *left_val == *right_val {
let kind = ::core::panicking::AssertKind::Ne;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_ne!(return_action, ReturnAction::NoCleanup);
462interp_ok(return_action)
463 } else {
464// We also skip the machine hook when there's no cleanup. This not a real "pop" anyway.
465interp_ok(ReturnAction::NoCleanup)
466 }
467 }
468469/// In the current stack frame, mark all locals as live that are not arguments and don't have
470 /// `Storage*` annotations (this includes the return place).
471pub(crate) fn storage_live_for_always_live_locals(&mut self) -> InterpResult<'tcx> {
472self.storage_live(mir::RETURN_PLACE)?;
473474let body = self.body();
475let always_live = always_storage_live_locals(body);
476for local in body.vars_and_temps_iter() {
477if always_live.contains(local) {
478self.storage_live(local)?;
479 }
480 }
481interp_ok(())
482 }
483484pub fn storage_live_dyn(
485&mut self,
486 local: mir::Local,
487 meta: MemPlaceMeta<M::Provenance>,
488 ) -> InterpResult<'tcx> {
489{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/stack.rs:489",
"rustc_const_eval::interpret::stack",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/stack.rs"),
::tracing_core::__macro_support::Option::Some(489u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::stack"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("{0:?} is now live",
local) as &dyn Value))])
});
} else { ; }
};trace!("{:?} is now live", local);
490491// We avoid `ty.is_trivially_sized` since that does something expensive for ADTs.
492fn is_very_trivially_sized(ty: Ty<'_>) -> bool {
493match ty.kind() {
494 ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
495 | ty::Uint(_)
496 | ty::Int(_)
497 | ty::Bool498 | ty::Float(_)
499 | ty::FnDef(..)
500 | ty::FnPtr(..)
501 | ty::RawPtr(..)
502 | ty::Char503 | ty::Ref(..)
504 | ty::Coroutine(..)
505 | ty::CoroutineWitness(..)
506 | ty::Array(..)
507 | ty::Closure(..)
508 | ty::CoroutineClosure(..)
509 | ty::Never510 | ty::Error(_) => true,
511512 ty::Str | ty::Slice(_) | ty::Dynamic(_, _) | ty::Foreign(..) => false,
513514 ty::Tuple(tys) => tys.last().is_none_or(|ty| is_very_trivially_sized(*ty)),
515516 ty::Pat(ty, ..) => is_very_trivially_sized(*ty),
517518// We don't want to do any queries, so there is not much we can do with ADTs.
519ty::Adt(..) => false,
520521 ty::UnsafeBinder(ty) => is_very_trivially_sized(ty.skip_binder()),
522523 ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => false,
524525 ty::Infer(ty::TyVar(_)) => false,
526527 ty::Bound(..)
528 | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
529::rustc_middle::util::bug::bug_fmt(format_args!("`is_very_trivially_sized` applied to unexpected type: {0}",
ty))bug!("`is_very_trivially_sized` applied to unexpected type: {}", ty)530 }
531 }
532 }
533534// This is a hot function, we avoid computing the layout when possible.
535 // `unsized_` will be `None` for sized types and `Some(layout)` for unsized types.
536let unsized_ = if is_very_trivially_sized(self.body().local_decls[local].ty) {
537None538 } else {
539// We need the layout.
540let layout = self.layout_of_local(self.frame(), local, None)?;
541if layout.is_sized() { None } else { Some(layout) }
542 };
543544let local_val = LocalValue::Live(if let Some(layout) = unsized_ {
545if !meta.has_meta() {
546do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::UnsizedLocal);throw_unsup!(UnsizedLocal);
547 }
548// Need to allocate some memory, since `Immediate::Uninit` cannot be unsized.
549let dest_place = self.allocate_dyn(layout, MemoryKind::Stack, meta)?;
550 Operand::Indirect(*dest_place.mplace())
551 } else {
552// Just make this an efficient immediate.
553if !!meta.has_meta() {
::core::panicking::panic("assertion failed: !meta.has_meta()")
};assert!(!meta.has_meta()); // we're dropping the metadata
554 // Make sure the machine knows this "write" is happening. (This is important so that
555 // races involving local variable allocation can be detected by Miri.)
556M::after_local_write(self, local, /*storage_live*/ true)?;
557// Note that not calling `layout_of` here does have one real consequence:
558 // if the type is too big, we'll only notice this when the local is actually initialized,
559 // which is a bit too late -- we should ideally notice this already here, when the memory
560 // is conceptually allocated. But given how rare that error is and that this is a hot function,
561 // we accept this downside for now.
562Operand::Immediate(Immediate::Uninit)
563 });
564565// If the local is already live, deallocate its old memory.
566let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val);
567self.deallocate_local(old)?;
568interp_ok(())
569 }
570571/// Mark a storage as live, killing the previous content.
572#[inline(always)]
573pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> {
574self.storage_live_dyn(local, MemPlaceMeta::None)
575 }
576577pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> {
578if !(local != mir::RETURN_PLACE) {
{
::core::panicking::panic_fmt(format_args!("Cannot make return place dead"));
}
};assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
579{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/stack.rs:579",
"rustc_const_eval::interpret::stack",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/stack.rs"),
::tracing_core::__macro_support::Option::Some(579u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::stack"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("{0:?} is now dead",
local) as &dyn Value))])
});
} else { ; }
};trace!("{:?} is now dead", local);
580581// If the local is already dead, this is a NOP.
582let old = mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead);
583self.deallocate_local(old)?;
584interp_ok(())
585 }
586587fn deallocate_local(&mut self, local: LocalValue<M::Provenance>) -> InterpResult<'tcx> {
588if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
589// All locals have a backing allocation, even if the allocation is empty
590 // due to the local having ZST type. Hence we can `unwrap`.
591{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/stack.rs:591",
"rustc_const_eval::interpret::stack",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/stack.rs"),
::tracing_core::__macro_support::Option::Some(591u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::stack"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("deallocating local {0:?}: {1:?}",
local,
self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap()))
as &dyn Value))])
});
} else { ; }
};trace!(
592"deallocating local {:?}: {:?}",
593 local,
594// Locals always have a `alloc_id` (they are never the result of a int2ptr).
595self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap())
596 );
597self.deallocate_ptr(ptr, None, MemoryKind::Stack)?;
598 };
599interp_ok(())
600 }
601602/// This is public because it is used by [Aquascope](https://github.com/cognitive-engineering-lab/aquascope/)
603 /// to analyze all the locals in a stack frame.
604#[inline(always)]
605pub fn layout_of_local(
606&self,
607 frame: &Frame<'tcx, M::Provenance, M::FrameExtra>,
608 local: mir::Local,
609 layout: Option<TyAndLayout<'tcx>>,
610 ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
611let state = &frame.locals[local];
612if let Some(layout) = state.layout.get() {
613return interp_ok(layout);
614 }
615616let layout = from_known_layout(self.tcx, self.typing_env, layout, || {
617let local_ty = frame.body.local_decls[local].ty;
618let local_ty =
619self.instantiate_from_frame_and_normalize_erasing_regions(frame, local_ty)?;
620self.layout_of(local_ty).into()
621 })?;
622623// Layouts of locals are requested a lot, so we cache them.
624state.layout.set(Some(layout));
625interp_ok(layout)
626 }
627}
628629impl<'a, 'tcx: 'a, M: Machine<'tcx>> InterpCx<'tcx, M> {
630/// Consume the arguments provided by the iterator and store them as a list
631 /// of variadic arguments. Return a list of the places that hold those arguments.
632pub(crate) fn allocate_varargs<I, J>(
633&mut self,
634 caller_args: &mut I,
635 callee_abis: &mut J,
636 ) -> InterpResult<'tcx, Vec<MPlaceTy<'tcx, M::Provenance>>>
637where
638I: Iterator<Item = (&'a FnArg<'tcx, M::Provenance>, &'a ArgAbi<'tcx, Ty<'tcx>>)>,
639 J: Iterator<Item = (usize, &'a ArgAbi<'tcx, Ty<'tcx>>)>,
640 {
641// Consume the remaining arguments and store them in fresh allocations.
642let mut varargs = Vec::new();
643for (fn_arg, caller_abi) in caller_args {
644// The callee ABI is entirely computed based on which arguments the caller has
645 // provided so it should not be possible to get a mismatch here.
646let (_idx, callee_abi) = callee_abis.next().unwrap();
647if !self.check_argument_compat(caller_abi, callee_abi)? {
::core::panicking::panic("assertion failed: self.check_argument_compat(caller_abi, callee_abi)?")
};assert!(self.check_argument_compat(caller_abi, callee_abi)?);
648// FIXME: do we have to worry about in-place argument passing?
649let op = fn_arg.copy_fn_arg();
650let mplace = self.allocate(op.layout, MemoryKind::Stack)?;
651self.copy_op(&op, &mplace)?;
652653 varargs.push(mplace);
654 }
655if !callee_abis.next().is_none() {
::core::panicking::panic("assertion failed: callee_abis.next().is_none()")
};assert!(callee_abis.next().is_none());
656657interp_ok(varargs)
658 }
659660/// Deallocate the variadic arguments in the list (that must have been created with `allocate_varargs`).
661fn deallocate_varargs(
662&mut self,
663 varargs: &[MPlaceTy<'tcx, M::Provenance>],
664 ) -> InterpResult<'tcx> {
665for vararg in varargs {
666let ptr = vararg.ptr();
667668{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/stack.rs:668",
"rustc_const_eval::interpret::stack",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/stack.rs"),
::tracing_core::__macro_support::Option::Some(668u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::stack"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("deallocating vararg {0:?}: {1:?}",
vararg,
self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap()))
as &dyn Value))])
});
} else { ; }
};trace!(
669"deallocating vararg {:?}: {:?}",
670 vararg,
671// Locals always have a `alloc_id` (they are never the result of a int2ptr).
672self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap())
673 );
674self.deallocate_ptr(ptr, None, MemoryKind::Stack)?;
675 }
676677interp_ok(())
678 }
679}
680681impl<'tcx, Prov: Provenance> LocalState<'tcx, Prov> {
682pub(super) fn print(
683&self,
684 allocs: &mut Vec<Option<AllocId>>,
685 fmt: &mut std::fmt::Formatter<'_>,
686 ) -> std::fmt::Result {
687match self.value {
688 LocalValue::Dead => fmt.write_fmt(format_args!(" is dead"))write!(fmt, " is dead")?,
689 LocalValue::Live(Operand::Immediate(Immediate::Uninit)) => {
690fmt.write_fmt(format_args!(" is uninitialized"))write!(fmt, " is uninitialized")?
691}
692 LocalValue::Live(Operand::Indirect(mplace)) => {
693fmt.write_fmt(format_args!(" by {0} ref {1:?}:",
match mplace.meta {
MemPlaceMeta::Meta(meta) =>
::alloc::__export::must_use({
::alloc::fmt::format(format_args!(" meta({0:?})", meta))
}),
MemPlaceMeta::None => String::new(),
}, mplace.ptr))write!(
694fmt,
695" by {} ref {:?}:",
696match mplace.meta {
697 MemPlaceMeta::Meta(meta) => format!(" meta({meta:?})"),
698 MemPlaceMeta::None => String::new(),
699 },
700 mplace.ptr,
701 )?;
702allocs.extend(mplace.ptr.provenance.map(Provenance::get_alloc_id));
703 }
704 LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
705fmt.write_fmt(format_args!(" {0:?}", val))write!(fmt, " {val:?}")?;
706if let Scalar::Ptr(ptr, _size) = val {
707allocs.push(ptr.provenance.get_alloc_id());
708 }
709 }
710 LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
711fmt.write_fmt(format_args!(" ({0:?}, {1:?})", val1, val2))write!(fmt, " ({val1:?}, {val2:?})")?;
712if let Scalar::Ptr(ptr, _size) = val1 {
713allocs.push(ptr.provenance.get_alloc_id());
714 }
715if let Scalar::Ptr(ptr, _size) = val2 {
716allocs.push(ptr.provenance.get_alloc_id());
717 }
718 }
719 }
720721Ok(())
722 }
723}