rustc_const_eval/interpret/
stack.rs

1//! Manages the low-level pushing and popping of stack frames and the (de)allocation of local variables.
2//! For handling of argument passing and return values, see the `call` module.
3use std::cell::Cell;
4use std::{fmt, mem};
5
6use either::{Either, Left, Right};
7use rustc_hir as hir;
8use rustc_hir::definitions::DefPathData;
9use rustc_index::IndexVec;
10use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
11use rustc_middle::ty::{self, Ty, TyCtxt};
12use rustc_middle::{bug, mir};
13use rustc_mir_dataflow::impls::always_storage_live_locals;
14use rustc_span::Span;
15use tracing::{info_span, instrument, trace};
16
17use super::{
18    AllocId, CtfeProvenance, Immediate, InterpCx, InterpResult, MPlaceTy, Machine, MemPlace,
19    MemPlaceMeta, MemoryKind, Operand, Pointer, Provenance, ReturnAction, Scalar,
20    from_known_layout, interp_ok, throw_ub, throw_unsup,
21};
22use crate::errors;
23
24// The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread
25// boundary and dropped in the other thread, it would exit the span in the other thread.
26struct SpanGuard(tracing::Span, std::marker::PhantomData<*const u8>);
27
28impl SpanGuard {
29    /// By default a `SpanGuard` does nothing.
30    fn new() -> Self {
31        Self(tracing::Span::none(), std::marker::PhantomData)
32    }
33
34    /// If a span is entered, we exit the previous span (if any, normally none) and enter the
35    /// new span. This is mainly so we don't have to use `Option` for the `tracing_span` field of
36    /// `Frame` by creating a dummy span to being with and then entering it once the frame has
37    /// been pushed.
38    fn enter(&mut self, span: tracing::Span) {
39        // This executes the destructor on the previous instance of `SpanGuard`, ensuring that
40        // we never enter or exit more spans than vice versa. Unless you `mem::leak`, then we
41        // can't protect the tracing stack, but that'll just lead to weird logging, no actual
42        // problems.
43        *self = Self(span, std::marker::PhantomData);
44        self.0.with_subscriber(|(id, dispatch)| {
45            dispatch.enter(id);
46        });
47    }
48}
49
50impl Drop for SpanGuard {
51    fn drop(&mut self) {
52        self.0.with_subscriber(|(id, dispatch)| {
53            dispatch.exit(id);
54        });
55    }
56}
57
58/// A stack frame.
59pub struct Frame<'tcx, Prov: Provenance = CtfeProvenance, Extra = ()> {
60    ////////////////////////////////////////////////////////////////////////////////
61    // Function and callsite information
62    ////////////////////////////////////////////////////////////////////////////////
63    /// The MIR for the function called on this frame.
64    pub(super) body: &'tcx mir::Body<'tcx>,
65
66    /// The def_id and args of the current function.
67    pub(super) instance: ty::Instance<'tcx>,
68
69    /// Extra data for the machine.
70    pub extra: Extra,
71
72    ////////////////////////////////////////////////////////////////////////////////
73    // Return place and locals
74    ////////////////////////////////////////////////////////////////////////////////
75    /// Work to perform when returning from this function.
76    return_to_block: StackPopCleanup,
77
78    /// The location where the result of the current stack frame should be written to,
79    /// and its layout in the caller.
80    pub return_place: MPlaceTy<'tcx, Prov>,
81
82    /// The list of locals for this stack frame, stored in order as
83    /// `[return_ptr, arguments..., variables..., temporaries...]`.
84    /// The locals are stored as `Option<Value>`s.
85    /// `None` represents a local that is currently dead, while a live local
86    /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
87    ///
88    /// Do *not* access this directly; always go through the machine hook!
89    pub locals: IndexVec<mir::Local, LocalState<'tcx, Prov>>,
90
91    /// The span of the `tracing` crate is stored here.
92    /// When the guard is dropped, the span is exited. This gives us
93    /// a full stack trace on all tracing statements.
94    tracing_span: SpanGuard,
95
96    ////////////////////////////////////////////////////////////////////////////////
97    // Current position within the function
98    ////////////////////////////////////////////////////////////////////////////////
99    /// If this is `Right`, we are not currently executing any particular statement in
100    /// this frame (can happen e.g. during frame initialization, and during unwinding on
101    /// frames without cleanup code).
102    ///
103    /// Needs to be public because ConstProp does unspeakable things to it.
104    pub(super) loc: Either<mir::Location, Span>,
105}
106
107#[derive(Clone, Copy, Eq, PartialEq, Debug)] // Miri debug-prints these
108pub enum StackPopCleanup {
109    /// Jump to the next block in the caller, or cause UB if None (that's a function
110    /// that may never return). Also store layout of return place so
111    /// we can validate it at that layout.
112    /// `ret` stores the block we jump to on a normal return, while `unwind`
113    /// stores the block used for cleanup during unwinding.
114    Goto { ret: Option<mir::BasicBlock>, unwind: mir::UnwindAction },
115    /// The root frame of the stack: nowhere else to jump to.
116    /// `cleanup` says whether locals are deallocated. Static computation
117    /// wants them leaked to intern what they need (and just throw away
118    /// the entire `ecx` when it is done).
119    Root { cleanup: bool },
120}
121
122/// Return type of [`InterpCx::pop_stack_frame_raw`].
123pub struct StackPopInfo<'tcx, Prov: Provenance> {
124    /// Additional information about the action to be performed when returning from the popped
125    /// stack frame.
126    pub return_action: ReturnAction,
127
128    /// [`return_to_block`](Frame::return_to_block) of the popped stack frame.
129    pub return_to_block: StackPopCleanup,
130
131    /// [`return_place`](Frame::return_place) of the popped stack frame.
132    pub return_place: MPlaceTy<'tcx, Prov>,
133}
134
135/// State of a local variable including a memoized layout
136#[derive(Clone)]
137pub struct LocalState<'tcx, Prov: Provenance = CtfeProvenance> {
138    value: LocalValue<Prov>,
139    /// Don't modify if `Some`, this is only used to prevent computing the layout twice.
140    /// Avoids computing the layout of locals that are never actually initialized.
141    layout: Cell<Option<TyAndLayout<'tcx>>>,
142}
143
144impl<Prov: Provenance> std::fmt::Debug for LocalState<'_, Prov> {
145    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
146        f.debug_struct("LocalState")
147            .field("value", &self.value)
148            .field("ty", &self.layout.get().map(|l| l.ty))
149            .finish()
150    }
151}
152
153/// Current value of a local variable
154///
155/// This does not store the type of the local; the type is given by `body.local_decls` and can never
156/// change, so by not storing here we avoid having to maintain that as an invariant.
157#[derive(Copy, Clone, Debug)] // Miri debug-prints these
158pub(super) enum LocalValue<Prov: Provenance = CtfeProvenance> {
159    /// This local is not currently alive, and cannot be used at all.
160    Dead,
161    /// A normal, live local.
162    /// Mostly for convenience, we re-use the `Operand` type here.
163    /// This is an optimization over just always having a pointer here;
164    /// we can thus avoid doing an allocation when the local just stores
165    /// immediate values *and* never has its address taken.
166    Live(Operand<Prov>),
167}
168
169impl<'tcx, Prov: Provenance> LocalState<'tcx, Prov> {
170    pub fn make_live_uninit(&mut self) {
171        self.value = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
172    }
173
174    /// This is a hack because Miri needs a way to visit all the provenance in a `LocalState`
175    /// without having a layout or `TyCtxt` available, and we want to keep the `Operand` type
176    /// private.
177    pub fn as_mplace_or_imm(
178        &self,
179    ) -> Option<Either<(Pointer<Option<Prov>>, MemPlaceMeta<Prov>), Immediate<Prov>>> {
180        match self.value {
181            LocalValue::Dead => None,
182            LocalValue::Live(Operand::Indirect(mplace)) => Some(Left((mplace.ptr, mplace.meta))),
183            LocalValue::Live(Operand::Immediate(imm)) => Some(Right(imm)),
184        }
185    }
186
187    /// Read the local's value or error if the local is not yet live or not live anymore.
188    #[inline(always)]
189    pub(super) fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> {
190        match &self.value {
191            LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
192            LocalValue::Live(val) => interp_ok(val),
193        }
194    }
195
196    /// Overwrite the local. If the local can be overwritten in place, return a reference
197    /// to do so; otherwise return the `MemPlace` to consult instead.
198    #[inline(always)]
199    pub(super) fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Prov>> {
200        match &mut self.value {
201            LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
202            LocalValue::Live(val) => interp_ok(val),
203        }
204    }
205}
206
207/// What we store about a frame in an interpreter backtrace.
208#[derive(Clone, Debug)]
209pub struct FrameInfo<'tcx> {
210    pub instance: ty::Instance<'tcx>,
211    pub span: Span,
212}
213
214// FIXME: only used by miri, should be removed once translatable.
215impl<'tcx> fmt::Display for FrameInfo<'tcx> {
216    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
217        ty::tls::with(|tcx| {
218            if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure {
219                write!(f, "inside closure")
220            } else {
221                // Note: this triggers a `must_produce_diag` state, which means that if we ever
222                // get here we must emit a diagnostic. We should never display a `FrameInfo` unless
223                // we actually want to emit a warning or error to the user.
224                write!(f, "inside `{}`", self.instance)
225            }
226        })
227    }
228}
229
230impl<'tcx> FrameInfo<'tcx> {
231    pub fn as_note(&self, tcx: TyCtxt<'tcx>) -> errors::FrameNote {
232        let span = self.span;
233        if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure {
234            errors::FrameNote {
235                where_: "closure",
236                span,
237                instance: String::new(),
238                times: 0,
239                has_label: false,
240            }
241        } else {
242            let instance = format!("{}", self.instance);
243            // Note: this triggers a `must_produce_diag` state, which means that if we ever get
244            // here we must emit a diagnostic. We should never display a `FrameInfo` unless we
245            // actually want to emit a warning or error to the user.
246            errors::FrameNote { where_: "instance", span, instance, times: 0, has_label: false }
247        }
248    }
249}
250
251impl<'tcx, Prov: Provenance> Frame<'tcx, Prov> {
252    pub fn with_extra<Extra>(self, extra: Extra) -> Frame<'tcx, Prov, Extra> {
253        Frame {
254            body: self.body,
255            instance: self.instance,
256            return_to_block: self.return_to_block,
257            return_place: self.return_place,
258            locals: self.locals,
259            loc: self.loc,
260            extra,
261            tracing_span: self.tracing_span,
262        }
263    }
264}
265
266impl<'tcx, Prov: Provenance, Extra> Frame<'tcx, Prov, Extra> {
267    /// Get the current location within the Frame.
268    ///
269    /// If this is `Right`, we are not currently executing any particular statement in
270    /// this frame (can happen e.g. during frame initialization, and during unwinding on
271    /// frames without cleanup code).
272    ///
273    /// Used by [priroda](https://github.com/oli-obk/priroda).
274    pub fn current_loc(&self) -> Either<mir::Location, Span> {
275        self.loc
276    }
277
278    pub fn body(&self) -> &'tcx mir::Body<'tcx> {
279        self.body
280    }
281
282    pub fn instance(&self) -> ty::Instance<'tcx> {
283        self.instance
284    }
285
286    /// Return the `SourceInfo` of the current instruction.
287    pub fn current_source_info(&self) -> Option<&mir::SourceInfo> {
288        self.loc.left().map(|loc| self.body.source_info(loc))
289    }
290
291    pub fn current_span(&self) -> Span {
292        match self.loc {
293            Left(loc) => self.body.source_info(loc).span,
294            Right(span) => span,
295        }
296    }
297
298    pub fn lint_root(&self, tcx: TyCtxt<'tcx>) -> Option<hir::HirId> {
299        // We first try to get a HirId via the current source scope,
300        // and fall back to `body.source`.
301        self.current_source_info()
302            .and_then(|source_info| match &self.body.source_scopes[source_info.scope].local_data {
303                mir::ClearCrossCrate::Set(data) => Some(data.lint_root),
304                mir::ClearCrossCrate::Clear => None,
305            })
306            .or_else(|| {
307                let def_id = self.body.source.def_id().as_local();
308                def_id.map(|def_id| tcx.local_def_id_to_hir_id(def_id))
309            })
310    }
311
312    /// Returns the address of the buffer where the locals are stored. This is used by `Place` as a
313    /// sanity check to detect bugs where we mix up which stack frame a place refers to.
314    #[inline(always)]
315    pub(super) fn locals_addr(&self) -> usize {
316        self.locals.raw.as_ptr().addr()
317    }
318
319    #[must_use]
320    pub fn generate_stacktrace_from_stack(stack: &[Self]) -> Vec<FrameInfo<'tcx>> {
321        let mut frames = Vec::new();
322        // This deliberately does *not* honor `requires_caller_location` since it is used for much
323        // more than just panics.
324        for frame in stack.iter().rev() {
325            let span = match frame.loc {
326                Left(loc) => {
327                    // If the stacktrace passes through MIR-inlined source scopes, add them.
328                    let mir::SourceInfo { mut span, scope } = *frame.body.source_info(loc);
329                    let mut scope_data = &frame.body.source_scopes[scope];
330                    while let Some((instance, call_span)) = scope_data.inlined {
331                        frames.push(FrameInfo { span, instance });
332                        span = call_span;
333                        scope_data = &frame.body.source_scopes[scope_data.parent_scope.unwrap()];
334                    }
335                    span
336                }
337                Right(span) => span,
338            };
339            frames.push(FrameInfo { span, instance: frame.instance });
340        }
341        trace!("generate stacktrace: {:#?}", frames);
342        frames
343    }
344}
345
346impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
347    /// Very low-level helper that pushes a stack frame without initializing
348    /// the arguments or local variables.
349    ///
350    /// The high-level version of this is `init_stack_frame`.
351    #[instrument(skip(self, body, return_place, return_to_block), level = "debug")]
352    pub(crate) fn push_stack_frame_raw(
353        &mut self,
354        instance: ty::Instance<'tcx>,
355        body: &'tcx mir::Body<'tcx>,
356        return_place: &MPlaceTy<'tcx, M::Provenance>,
357        return_to_block: StackPopCleanup,
358    ) -> InterpResult<'tcx> {
359        trace!("body: {:#?}", body);
360
361        // We can push a `Root` frame if and only if the stack is empty.
362        debug_assert_eq!(
363            self.stack().is_empty(),
364            matches!(return_to_block, StackPopCleanup::Root { .. })
365        );
366
367        // First push a stack frame so we have access to `instantiate_from_current_frame` and other
368        // `self.frame()`-based functions.
369        let dead_local = LocalState { value: LocalValue::Dead, layout: Cell::new(None) };
370        let locals = IndexVec::from_elem(dead_local, &body.local_decls);
371        let pre_frame = Frame {
372            body,
373            loc: Right(body.span), // Span used for errors caused during preamble.
374            return_to_block,
375            return_place: return_place.clone(),
376            locals,
377            instance,
378            tracing_span: SpanGuard::new(),
379            extra: (),
380        };
381        let frame = M::init_frame(self, pre_frame)?;
382        self.stack_mut().push(frame);
383
384        // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check).
385        for &const_ in body.required_consts() {
386            let c =
387                self.instantiate_from_current_frame_and_normalize_erasing_regions(const_.const_)?;
388            c.eval(*self.tcx, self.typing_env, const_.span).map_err(|err| {
389                err.emit_note(*self.tcx);
390                err
391            })?;
392        }
393
394        // Finish things up.
395        M::after_stack_push(self)?;
396        self.frame_mut().loc = Left(mir::Location::START);
397        let span = info_span!("frame", "{}", instance);
398        self.frame_mut().tracing_span.enter(span);
399
400        interp_ok(())
401    }
402
403    /// Low-level helper that pops a stack frame from the stack and returns some information about
404    /// it.
405    ///
406    /// This also deallocates locals, if necessary.
407    ///
408    /// [`M::before_stack_pop`] should be called before calling this function.
409    /// [`M::after_stack_pop`] is called by this function automatically.
410    ///
411    /// The high-level version of this is `return_from_current_stack_frame`.
412    ///
413    /// [`M::before_stack_pop`]: Machine::before_stack_pop
414    /// [`M::after_stack_pop`]: Machine::after_stack_pop
415    pub(super) fn pop_stack_frame_raw(
416        &mut self,
417        unwinding: bool,
418    ) -> InterpResult<'tcx, StackPopInfo<'tcx, M::Provenance>> {
419        let cleanup = self.cleanup_current_frame_locals()?;
420
421        let frame =
422            self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
423
424        let return_to_block = frame.return_to_block;
425        let return_place = frame.return_place.clone();
426
427        let return_action;
428        if cleanup {
429            return_action = M::after_stack_pop(self, frame, unwinding)?;
430            assert_ne!(return_action, ReturnAction::NoCleanup);
431        } else {
432            return_action = ReturnAction::NoCleanup;
433        };
434
435        interp_ok(StackPopInfo { return_action, return_to_block, return_place })
436    }
437
438    /// A private helper for [`pop_stack_frame_raw`](InterpCx::pop_stack_frame_raw).
439    /// Returns `true` if cleanup has been done, `false` otherwise.
440    fn cleanup_current_frame_locals(&mut self) -> InterpResult<'tcx, bool> {
441        // Cleanup: deallocate locals.
442        // Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
443        // We do this while the frame is still on the stack, so errors point to the callee.
444        let return_to_block = self.frame().return_to_block;
445        let cleanup = match return_to_block {
446            StackPopCleanup::Goto { .. } => true,
447            StackPopCleanup::Root { cleanup, .. } => cleanup,
448        };
449
450        if cleanup {
451            // We need to take the locals out, since we need to mutate while iterating.
452            let locals = mem::take(&mut self.frame_mut().locals);
453            for local in &locals {
454                self.deallocate_local(local.value)?;
455            }
456        }
457
458        interp_ok(cleanup)
459    }
460
461    /// In the current stack frame, mark all locals as live that are not arguments and don't have
462    /// `Storage*` annotations (this includes the return place).
463    pub(crate) fn storage_live_for_always_live_locals(&mut self) -> InterpResult<'tcx> {
464        self.storage_live(mir::RETURN_PLACE)?;
465
466        let body = self.body();
467        let always_live = always_storage_live_locals(body);
468        for local in body.vars_and_temps_iter() {
469            if always_live.contains(local) {
470                self.storage_live(local)?;
471            }
472        }
473        interp_ok(())
474    }
475
476    pub fn storage_live_dyn(
477        &mut self,
478        local: mir::Local,
479        meta: MemPlaceMeta<M::Provenance>,
480    ) -> InterpResult<'tcx> {
481        trace!("{:?} is now live", local);
482
483        // We avoid `ty.is_trivially_sized` since that does something expensive for ADTs.
484        fn is_very_trivially_sized(ty: Ty<'_>) -> bool {
485            match ty.kind() {
486                ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
487                | ty::Uint(_)
488                | ty::Int(_)
489                | ty::Bool
490                | ty::Float(_)
491                | ty::FnDef(..)
492                | ty::FnPtr(..)
493                | ty::RawPtr(..)
494                | ty::Char
495                | ty::Ref(..)
496                | ty::Coroutine(..)
497                | ty::CoroutineWitness(..)
498                | ty::Array(..)
499                | ty::Closure(..)
500                | ty::CoroutineClosure(..)
501                | ty::Never
502                | ty::Error(_)
503                | ty::Dynamic(_, _, ty::DynStar) => true,
504
505                ty::Str | ty::Slice(_) | ty::Dynamic(_, _, ty::Dyn) | ty::Foreign(..) => false,
506
507                ty::Tuple(tys) => tys.last().is_none_or(|ty| is_very_trivially_sized(*ty)),
508
509                ty::Pat(ty, ..) => is_very_trivially_sized(*ty),
510
511                // We don't want to do any queries, so there is not much we can do with ADTs.
512                ty::Adt(..) => false,
513
514                ty::UnsafeBinder(ty) => is_very_trivially_sized(ty.skip_binder()),
515
516                ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => false,
517
518                ty::Infer(ty::TyVar(_)) => false,
519
520                ty::Bound(..)
521                | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
522                    bug!("`is_very_trivially_sized` applied to unexpected type: {}", ty)
523                }
524            }
525        }
526
527        // This is a hot function, we avoid computing the layout when possible.
528        // `unsized_` will be `None` for sized types and `Some(layout)` for unsized types.
529        let unsized_ = if is_very_trivially_sized(self.body().local_decls[local].ty) {
530            None
531        } else {
532            // We need the layout.
533            let layout = self.layout_of_local(self.frame(), local, None)?;
534            if layout.is_sized() { None } else { Some(layout) }
535        };
536
537        let local_val = LocalValue::Live(if let Some(layout) = unsized_ {
538            if !meta.has_meta() {
539                throw_unsup!(UnsizedLocal);
540            }
541            // Need to allocate some memory, since `Immediate::Uninit` cannot be unsized.
542            let dest_place = self.allocate_dyn(layout, MemoryKind::Stack, meta)?;
543            Operand::Indirect(*dest_place.mplace())
544        } else {
545            // Just make this an efficient immediate.
546            assert!(!meta.has_meta()); // we're dropping the metadata
547            // Make sure the machine knows this "write" is happening. (This is important so that
548            // races involving local variable allocation can be detected by Miri.)
549            M::after_local_write(self, local, /*storage_live*/ true)?;
550            // Note that not calling `layout_of` here does have one real consequence:
551            // if the type is too big, we'll only notice this when the local is actually initialized,
552            // which is a bit too late -- we should ideally notice this already here, when the memory
553            // is conceptually allocated. But given how rare that error is and that this is a hot function,
554            // we accept this downside for now.
555            Operand::Immediate(Immediate::Uninit)
556        });
557
558        // If the local is already live, deallocate its old memory.
559        let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val);
560        self.deallocate_local(old)?;
561        interp_ok(())
562    }
563
564    /// Mark a storage as live, killing the previous content.
565    #[inline(always)]
566    pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> {
567        self.storage_live_dyn(local, MemPlaceMeta::None)
568    }
569
570    pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> {
571        assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
572        trace!("{:?} is now dead", local);
573
574        // If the local is already dead, this is a NOP.
575        let old = mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead);
576        self.deallocate_local(old)?;
577        interp_ok(())
578    }
579
580    fn deallocate_local(&mut self, local: LocalValue<M::Provenance>) -> InterpResult<'tcx> {
581        if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
582            // All locals have a backing allocation, even if the allocation is empty
583            // due to the local having ZST type. Hence we can `unwrap`.
584            trace!(
585                "deallocating local {:?}: {:?}",
586                local,
587                // Locals always have a `alloc_id` (they are never the result of a int2ptr).
588                self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap())
589            );
590            self.deallocate_ptr(ptr, None, MemoryKind::Stack)?;
591        };
592        interp_ok(())
593    }
594
595    /// This is public because it is used by [Aquascope](https://github.com/cognitive-engineering-lab/aquascope/)
596    /// to analyze all the locals in a stack frame.
597    #[inline(always)]
598    pub fn layout_of_local(
599        &self,
600        frame: &Frame<'tcx, M::Provenance, M::FrameExtra>,
601        local: mir::Local,
602        layout: Option<TyAndLayout<'tcx>>,
603    ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
604        let state = &frame.locals[local];
605        if let Some(layout) = state.layout.get() {
606            return interp_ok(layout);
607        }
608
609        let layout = from_known_layout(self.tcx, self.typing_env, layout, || {
610            let local_ty = frame.body.local_decls[local].ty;
611            let local_ty =
612                self.instantiate_from_frame_and_normalize_erasing_regions(frame, local_ty)?;
613            self.layout_of(local_ty).into()
614        })?;
615
616        // Layouts of locals are requested a lot, so we cache them.
617        state.layout.set(Some(layout));
618        interp_ok(layout)
619    }
620}
621
622impl<'tcx, Prov: Provenance> LocalState<'tcx, Prov> {
623    pub(super) fn print(
624        &self,
625        allocs: &mut Vec<Option<AllocId>>,
626        fmt: &mut std::fmt::Formatter<'_>,
627    ) -> std::fmt::Result {
628        match self.value {
629            LocalValue::Dead => write!(fmt, " is dead")?,
630            LocalValue::Live(Operand::Immediate(Immediate::Uninit)) => {
631                write!(fmt, " is uninitialized")?
632            }
633            LocalValue::Live(Operand::Indirect(mplace)) => {
634                write!(
635                    fmt,
636                    " by {} ref {:?}:",
637                    match mplace.meta {
638                        MemPlaceMeta::Meta(meta) => format!(" meta({meta:?})"),
639                        MemPlaceMeta::None => String::new(),
640                    },
641                    mplace.ptr,
642                )?;
643                allocs.extend(mplace.ptr.provenance.map(Provenance::get_alloc_id));
644            }
645            LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
646                write!(fmt, " {val:?}")?;
647                if let Scalar::Ptr(ptr, _size) = val {
648                    allocs.push(ptr.provenance.get_alloc_id());
649                }
650            }
651            LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
652                write!(fmt, " ({val1:?}, {val2:?})")?;
653                if let Scalar::Ptr(ptr, _size) = val1 {
654                    allocs.push(ptr.provenance.get_alloc_id());
655                }
656                if let Scalar::Ptr(ptr, _size) = val2 {
657                    allocs.push(ptr.provenance.get_alloc_id());
658                }
659            }
660        }
661
662        Ok(())
663    }
664}