rustc_const_eval/interpret/
step.rs

1//! This module contains the `InterpCx` methods for executing a single step of the interpreter.
2//!
3//! The main entry point is the `step` method.
4
5use either::Either;
6use rustc_abi::{FIRST_VARIANT, FieldIdx};
7use rustc_index::IndexSlice;
8use rustc_middle::ty::layout::FnAbiOf;
9use rustc_middle::ty::{self, Instance, Ty};
10use rustc_middle::{bug, mir, span_bug};
11use rustc_span::source_map::Spanned;
12use rustc_target::callconv::FnAbi;
13use tracing::{info, instrument, trace};
14
15use super::{
16    FnArg, FnVal, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemPlaceMeta, PlaceTy,
17    Projectable, Scalar, interp_ok, throw_ub,
18};
19use crate::util;
20
21struct EvaluatedCalleeAndArgs<'tcx, M: Machine<'tcx>> {
22    callee: FnVal<'tcx, M::ExtraFnVal>,
23    args: Vec<FnArg<'tcx, M::Provenance>>,
24    fn_sig: ty::FnSig<'tcx>,
25    fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
26    /// True if the function is marked as `#[track_caller]` ([`ty::InstanceKind::requires_caller_location`])
27    with_caller_location: bool,
28}
29
30impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
31    /// Returns `true` as long as there are more things to do.
32    ///
33    /// This is used by [priroda](https://github.com/oli-obk/priroda)
34    ///
35    /// This is marked `#inline(always)` to work around adversarial codegen when `opt-level = 3`
36    #[inline(always)]
37    pub fn step(&mut self) -> InterpResult<'tcx, bool> {
38        if self.stack().is_empty() {
39            return interp_ok(false);
40        }
41
42        let Either::Left(loc) = self.frame().loc else {
43            // We are unwinding and this fn has no cleanup code.
44            // Just go on unwinding.
45            trace!("unwinding: skipping frame");
46            self.return_from_current_stack_frame(/* unwinding */ true)?;
47            return interp_ok(true);
48        };
49        let basic_block = &self.body().basic_blocks[loc.block];
50
51        if let Some(stmt) = basic_block.statements.get(loc.statement_index) {
52            let old_frames = self.frame_idx();
53            self.eval_statement(stmt)?;
54            // Make sure we are not updating `statement_index` of the wrong frame.
55            assert_eq!(old_frames, self.frame_idx());
56            // Advance the program counter.
57            self.frame_mut().loc.as_mut().left().unwrap().statement_index += 1;
58            return interp_ok(true);
59        }
60
61        M::before_terminator(self)?;
62
63        let terminator = basic_block.terminator();
64        self.eval_terminator(terminator)?;
65        if !self.stack().is_empty() {
66            if let Either::Left(loc) = self.frame().loc {
67                info!("// executing {:?}", loc.block);
68            }
69        }
70        interp_ok(true)
71    }
72
73    /// Runs the interpretation logic for the given `mir::Statement` at the current frame and
74    /// statement counter.
75    ///
76    /// This does NOT move the statement counter forward, the caller has to do that!
77    pub fn eval_statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> {
78        info!("{:?}", stmt);
79
80        use rustc_middle::mir::StatementKind::*;
81
82        match &stmt.kind {
83            Assign(box (place, rvalue)) => self.eval_rvalue_into_place(rvalue, *place)?,
84
85            SetDiscriminant { place, variant_index } => {
86                let dest = self.eval_place(**place)?;
87                self.write_discriminant(*variant_index, &dest)?;
88            }
89
90            Deinit(place) => {
91                let dest = self.eval_place(**place)?;
92                self.write_uninit(&dest)?;
93            }
94
95            // Mark locals as alive
96            StorageLive(local) => {
97                self.storage_live(*local)?;
98            }
99
100            // Mark locals as dead
101            StorageDead(local) => {
102                self.storage_dead(*local)?;
103            }
104
105            // No dynamic semantics attached to `FakeRead`; MIR
106            // interpreter is solely intended for borrowck'ed code.
107            FakeRead(..) => {}
108
109            // Stacked Borrows.
110            Retag(kind, place) => {
111                let dest = self.eval_place(**place)?;
112                M::retag_place_contents(self, *kind, &dest)?;
113            }
114
115            Intrinsic(box intrinsic) => self.eval_nondiverging_intrinsic(intrinsic)?,
116
117            // Evaluate the place expression, without reading from it.
118            PlaceMention(box place) => {
119                let _ = self.eval_place(*place)?;
120            }
121
122            // This exists purely to guide borrowck lifetime inference, and does not have
123            // an operational effect.
124            AscribeUserType(..) => {}
125
126            // Currently, Miri discards Coverage statements. Coverage statements are only injected
127            // via an optional compile time MIR pass and have no side effects. Since Coverage
128            // statements don't exist at the source level, it is safe for Miri to ignore them, even
129            // for undefined behavior (UB) checks.
130            //
131            // A coverage counter inside a const expression (for example, a counter injected in a
132            // const function) is discarded when the const is evaluated at compile time. Whether
133            // this should change, and/or how to implement a const eval counter, is a subject of the
134            // following issue:
135            //
136            // FIXME(#73156): Handle source code coverage in const eval
137            Coverage(..) => {}
138
139            ConstEvalCounter => {
140                M::increment_const_eval_counter(self)?;
141            }
142
143            // Defined to do nothing. These are added by optimization passes, to avoid changing the
144            // size of MIR constantly.
145            Nop => {}
146
147            // Only used for temporary lifetime lints
148            BackwardIncompatibleDropHint { .. } => {}
149        }
150
151        interp_ok(())
152    }
153
154    /// Evaluate an assignment statement.
155    ///
156    /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
157    /// type writes its results directly into the memory specified by the place.
158    pub fn eval_rvalue_into_place(
159        &mut self,
160        rvalue: &mir::Rvalue<'tcx>,
161        place: mir::Place<'tcx>,
162    ) -> InterpResult<'tcx> {
163        let dest = self.eval_place(place)?;
164        // FIXME: ensure some kind of non-aliasing between LHS and RHS?
165        // Also see https://github.com/rust-lang/rust/issues/68364.
166
167        use rustc_middle::mir::Rvalue::*;
168        match *rvalue {
169            ThreadLocalRef(did) => {
170                let ptr = M::thread_local_static_pointer(self, did)?;
171                self.write_pointer(ptr, &dest)?;
172            }
173
174            Use(ref operand) => {
175                // Avoid recomputing the layout
176                let op = self.eval_operand(operand, Some(dest.layout))?;
177                self.copy_op(&op, &dest)?;
178            }
179
180            CopyForDeref(place) => {
181                let op = self.eval_place_to_op(place, Some(dest.layout))?;
182                self.copy_op(&op, &dest)?;
183            }
184
185            BinaryOp(bin_op, box (ref left, ref right)) => {
186                let layout = util::binop_left_homogeneous(bin_op).then_some(dest.layout);
187                let left = self.read_immediate(&self.eval_operand(left, layout)?)?;
188                let layout = util::binop_right_homogeneous(bin_op).then_some(left.layout);
189                let right = self.read_immediate(&self.eval_operand(right, layout)?)?;
190                let result = self.binary_op(bin_op, &left, &right)?;
191                assert_eq!(result.layout, dest.layout, "layout mismatch for result of {bin_op:?}");
192                self.write_immediate(*result, &dest)?;
193            }
194
195            UnaryOp(un_op, ref operand) => {
196                // The operand always has the same type as the result.
197                let val = self.read_immediate(&self.eval_operand(operand, Some(dest.layout))?)?;
198                let result = self.unary_op(un_op, &val)?;
199                assert_eq!(result.layout, dest.layout, "layout mismatch for result of {un_op:?}");
200                self.write_immediate(*result, &dest)?;
201            }
202
203            NullaryOp(null_op, ty) => {
204                let ty = self.instantiate_from_current_frame_and_normalize_erasing_regions(ty)?;
205                let val = self.nullary_op(null_op, ty)?;
206                self.write_immediate(*val, &dest)?;
207            }
208
209            Aggregate(box ref kind, ref operands) => {
210                self.write_aggregate(kind, operands, &dest)?;
211            }
212
213            Repeat(ref operand, _) => {
214                self.write_repeat(operand, &dest)?;
215            }
216
217            Len(place) => {
218                let src = self.eval_place(place)?;
219                let len = src.len(self)?;
220                self.write_scalar(Scalar::from_target_usize(len, self), &dest)?;
221            }
222
223            Ref(_, borrow_kind, place) => {
224                let src = self.eval_place(place)?;
225                let place = self.force_allocation(&src)?;
226                let val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
227                // A fresh reference was created, make sure it gets retagged.
228                let val = M::retag_ptr_value(
229                    self,
230                    if borrow_kind.allows_two_phase_borrow() {
231                        mir::RetagKind::TwoPhase
232                    } else {
233                        mir::RetagKind::Default
234                    },
235                    &val,
236                )?;
237                self.write_immediate(*val, &dest)?;
238            }
239
240            RawPtr(kind, place) => {
241                // Figure out whether this is an addr_of of an already raw place.
242                let place_base_raw = if place.is_indirect_first_projection() {
243                    let ty = self.frame().body.local_decls[place.local].ty;
244                    ty.is_raw_ptr()
245                } else {
246                    // Not a deref, and thus not raw.
247                    false
248                };
249
250                let src = self.eval_place(place)?;
251                let place = self.force_allocation(&src)?;
252                let mut val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
253                if !place_base_raw && !kind.is_fake() {
254                    // If this was not already raw, it needs retagging -- except for "fake"
255                    // raw borrows whose defining property is that they do not get retagged.
256                    val = M::retag_ptr_value(self, mir::RetagKind::Raw, &val)?;
257                }
258                self.write_immediate(*val, &dest)?;
259            }
260
261            ShallowInitBox(ref operand, _) => {
262                let src = self.eval_operand(operand, None)?;
263                let v = self.read_immediate(&src)?;
264                self.write_immediate(*v, &dest)?;
265            }
266
267            Cast(cast_kind, ref operand, cast_ty) => {
268                let src = self.eval_operand(operand, None)?;
269                let cast_ty =
270                    self.instantiate_from_current_frame_and_normalize_erasing_regions(cast_ty)?;
271                self.cast(&src, cast_kind, cast_ty, &dest)?;
272            }
273
274            Discriminant(place) => {
275                let op = self.eval_place_to_op(place, None)?;
276                let variant = self.read_discriminant(&op)?;
277                let discr = self.discriminant_for_variant(op.layout.ty, variant)?;
278                self.write_immediate(*discr, &dest)?;
279            }
280
281            WrapUnsafeBinder(ref op, _ty) => {
282                // Constructing an unsafe binder acts like a transmute
283                // since the operand's layout does not change.
284                let op = self.eval_operand(op, None)?;
285                self.copy_op_allow_transmute(&op, &dest)?;
286            }
287        }
288
289        trace!("{:?}", self.dump_place(&dest));
290
291        interp_ok(())
292    }
293
294    /// Writes the aggregate to the destination.
295    #[instrument(skip(self), level = "trace")]
296    fn write_aggregate(
297        &mut self,
298        kind: &mir::AggregateKind<'tcx>,
299        operands: &IndexSlice<FieldIdx, mir::Operand<'tcx>>,
300        dest: &PlaceTy<'tcx, M::Provenance>,
301    ) -> InterpResult<'tcx> {
302        self.write_uninit(dest)?; // make sure all the padding ends up as uninit
303        let (variant_index, variant_dest, active_field_index) = match *kind {
304            mir::AggregateKind::Adt(_, variant_index, _, _, active_field_index) => {
305                let variant_dest = self.project_downcast(dest, variant_index)?;
306                (variant_index, variant_dest, active_field_index)
307            }
308            mir::AggregateKind::RawPtr(..) => {
309                // Pointers don't have "fields" in the normal sense, so the
310                // projection-based code below would either fail in projection
311                // or in type mismatches. Instead, build an `Immediate` from
312                // the parts and write that to the destination.
313                let [data, meta] = &operands.raw else {
314                    bug!("{kind:?} should have 2 operands, had {operands:?}");
315                };
316                let data = self.eval_operand(data, None)?;
317                let data = self.read_pointer(&data)?;
318                let meta = self.eval_operand(meta, None)?;
319                let meta = if meta.layout.is_zst() {
320                    MemPlaceMeta::None
321                } else {
322                    MemPlaceMeta::Meta(self.read_scalar(&meta)?)
323                };
324                let ptr_imm = Immediate::new_pointer_with_meta(data, meta, self);
325                let ptr = ImmTy::from_immediate(ptr_imm, dest.layout);
326                self.copy_op(&ptr, dest)?;
327                return interp_ok(());
328            }
329            _ => (FIRST_VARIANT, dest.clone(), None),
330        };
331        if active_field_index.is_some() {
332            assert_eq!(operands.len(), 1);
333        }
334        for (field_index, operand) in operands.iter_enumerated() {
335            let field_index = active_field_index.unwrap_or(field_index);
336            let field_dest = self.project_field(&variant_dest, field_index.as_usize())?;
337            let op = self.eval_operand(operand, Some(field_dest.layout))?;
338            self.copy_op(&op, &field_dest)?;
339        }
340        self.write_discriminant(variant_index, dest)
341    }
342
343    /// Repeats `operand` into the destination. `dest` must have array type, and that type
344    /// determines how often `operand` is repeated.
345    fn write_repeat(
346        &mut self,
347        operand: &mir::Operand<'tcx>,
348        dest: &PlaceTy<'tcx, M::Provenance>,
349    ) -> InterpResult<'tcx> {
350        let src = self.eval_operand(operand, None)?;
351        assert!(src.layout.is_sized());
352        let dest = self.force_allocation(&dest)?;
353        let length = dest.len(self)?;
354
355        if length == 0 {
356            // Nothing to copy... but let's still make sure that `dest` as a place is valid.
357            self.get_place_alloc_mut(&dest)?;
358        } else {
359            // Write the src to the first element.
360            let first = self.project_index(&dest, 0)?;
361            self.copy_op(&src, &first)?;
362
363            // This is performance-sensitive code for big static/const arrays! So we
364            // avoid writing each operand individually and instead just make many copies
365            // of the first element.
366            let elem_size = first.layout.size;
367            let first_ptr = first.ptr();
368            let rest_ptr = first_ptr.wrapping_offset(elem_size, self);
369            // No alignment requirement since `copy_op` above already checked it.
370            self.mem_copy_repeatedly(
371                first_ptr,
372                rest_ptr,
373                elem_size,
374                length - 1,
375                /*nonoverlapping:*/ true,
376            )?;
377        }
378
379        interp_ok(())
380    }
381
382    /// Evaluate the arguments of a function call
383    fn eval_fn_call_argument(
384        &self,
385        op: &mir::Operand<'tcx>,
386    ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
387        interp_ok(match op {
388            mir::Operand::Copy(_) | mir::Operand::Constant(_) => {
389                // Make a regular copy.
390                let op = self.eval_operand(op, None)?;
391                FnArg::Copy(op)
392            }
393            mir::Operand::Move(place) => {
394                // If this place lives in memory, preserve its location.
395                // We call `place_to_op` which will be an `MPlaceTy` whenever there exists
396                // an mplace for this place. (This is in contrast to `PlaceTy::as_mplace_or_local`
397                // which can return a local even if that has an mplace.)
398                let place = self.eval_place(*place)?;
399                let op = self.place_to_op(&place)?;
400
401                match op.as_mplace_or_imm() {
402                    Either::Left(mplace) => FnArg::InPlace(mplace),
403                    Either::Right(_imm) => {
404                        // This argument doesn't live in memory, so there's no place
405                        // to make inaccessible during the call.
406                        // We rely on there not being any stray `PlaceTy` that would let the
407                        // caller directly access this local!
408                        // This is also crucial for tail calls, where we want the `FnArg` to
409                        // stay valid when the old stack frame gets popped.
410                        FnArg::Copy(op)
411                    }
412                }
413            }
414        })
415    }
416
417    /// Shared part of `Call` and `TailCall` implementation — finding and evaluating all the
418    /// necessary information about callee and arguments to make a call.
419    fn eval_callee_and_args(
420        &self,
421        terminator: &mir::Terminator<'tcx>,
422        func: &mir::Operand<'tcx>,
423        args: &[Spanned<mir::Operand<'tcx>>],
424    ) -> InterpResult<'tcx, EvaluatedCalleeAndArgs<'tcx, M>> {
425        let func = self.eval_operand(func, None)?;
426        let args = args
427            .iter()
428            .map(|arg| self.eval_fn_call_argument(&arg.node))
429            .collect::<InterpResult<'tcx, Vec<_>>>()?;
430
431        let fn_sig_binder = func.layout.ty.fn_sig(*self.tcx);
432        let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.typing_env, fn_sig_binder);
433        let extra_args = &args[fn_sig.inputs().len()..];
434        let extra_args =
435            self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout().ty));
436
437        let (callee, fn_abi, with_caller_location) = match *func.layout.ty.kind() {
438            ty::FnPtr(..) => {
439                let fn_ptr = self.read_pointer(&func)?;
440                let fn_val = self.get_ptr_fn(fn_ptr)?;
441                (fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false)
442            }
443            ty::FnDef(def_id, args) => {
444                let instance = self.resolve(def_id, args)?;
445                (
446                    FnVal::Instance(instance),
447                    self.fn_abi_of_instance(instance, extra_args)?,
448                    instance.def.requires_caller_location(*self.tcx),
449                )
450            }
451            _ => {
452                span_bug!(terminator.source_info.span, "invalid callee of type {}", func.layout.ty)
453            }
454        };
455
456        interp_ok(EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location })
457    }
458
459    fn eval_terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> {
460        info!("{:?}", terminator.kind);
461
462        use rustc_middle::mir::TerminatorKind::*;
463        match terminator.kind {
464            Return => {
465                self.return_from_current_stack_frame(/* unwinding */ false)?
466            }
467
468            Goto { target } => self.go_to_block(target),
469
470            SwitchInt { ref discr, ref targets } => {
471                let discr = self.read_immediate(&self.eval_operand(discr, None)?)?;
472                trace!("SwitchInt({:?})", *discr);
473
474                // Branch to the `otherwise` case by default, if no match is found.
475                let mut target_block = targets.otherwise();
476
477                for (const_int, target) in targets.iter() {
478                    // Compare using MIR BinOp::Eq, to also support pointer values.
479                    // (Avoiding `self.binary_op` as that does some redundant layout computation.)
480                    let res = self.binary_op(
481                        mir::BinOp::Eq,
482                        &discr,
483                        &ImmTy::from_uint(const_int, discr.layout),
484                    )?;
485                    if res.to_scalar().to_bool()? {
486                        target_block = target;
487                        break;
488                    }
489                }
490
491                self.go_to_block(target_block);
492            }
493
494            Call {
495                ref func,
496                ref args,
497                destination,
498                target,
499                unwind,
500                call_source: _,
501                fn_span: _,
502            } => {
503                let old_stack = self.frame_idx();
504                let old_loc = self.frame().loc;
505
506                let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
507                    self.eval_callee_and_args(terminator, func, args)?;
508
509                let destination = self.force_allocation(&self.eval_place(destination)?)?;
510                self.init_fn_call(
511                    callee,
512                    (fn_sig.abi, fn_abi),
513                    &args,
514                    with_caller_location,
515                    &destination,
516                    target,
517                    if fn_abi.can_unwind { unwind } else { mir::UnwindAction::Unreachable },
518                )?;
519                // Sanity-check that `eval_fn_call` either pushed a new frame or
520                // did a jump to another block.
521                if self.frame_idx() == old_stack && self.frame().loc == old_loc {
522                    span_bug!(terminator.source_info.span, "evaluating this call made no progress");
523                }
524            }
525
526            TailCall { ref func, ref args, fn_span: _ } => {
527                let old_frame_idx = self.frame_idx();
528
529                let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
530                    self.eval_callee_and_args(terminator, func, args)?;
531
532                self.init_fn_tail_call(callee, (fn_sig.abi, fn_abi), &args, with_caller_location)?;
533
534                if self.frame_idx() != old_frame_idx {
535                    span_bug!(
536                        terminator.source_info.span,
537                        "evaluating this tail call pushed a new stack frame"
538                    );
539                }
540            }
541
542            Drop { place, target, unwind, replace: _ } => {
543                let place = self.eval_place(place)?;
544                let instance = Instance::resolve_drop_in_place(*self.tcx, place.layout.ty);
545                if let ty::InstanceKind::DropGlue(_, None) = instance.def {
546                    // This is the branch we enter if and only if the dropped type has no drop glue
547                    // whatsoever. This can happen as a result of monomorphizing a drop of a
548                    // generic. In order to make sure that generic and non-generic code behaves
549                    // roughly the same (and in keeping with Mir semantics) we do nothing here.
550                    self.go_to_block(target);
551                    return interp_ok(());
552                }
553                trace!("TerminatorKind::drop: {:?}, type {}", place, place.layout.ty);
554                self.init_drop_in_place_call(&place, instance, target, unwind)?;
555            }
556
557            Assert { ref cond, expected, ref msg, target, unwind } => {
558                let ignored =
559                    M::ignore_optional_overflow_checks(self) && msg.is_optional_overflow_check();
560                let cond_val = self.read_scalar(&self.eval_operand(cond, None)?)?.to_bool()?;
561                if ignored || expected == cond_val {
562                    self.go_to_block(target);
563                } else {
564                    M::assert_panic(self, msg, unwind)?;
565                }
566            }
567
568            UnwindTerminate(reason) => {
569                M::unwind_terminate(self, reason)?;
570            }
571
572            // When we encounter Resume, we've finished unwinding
573            // cleanup for the current stack frame. We pop it in order
574            // to continue unwinding the next frame
575            UnwindResume => {
576                trace!("unwinding: resuming from cleanup");
577                // By definition, a Resume terminator means
578                // that we're unwinding
579                self.return_from_current_stack_frame(/* unwinding */ true)?;
580                return interp_ok(());
581            }
582
583            // It is UB to ever encounter this.
584            Unreachable => throw_ub!(Unreachable),
585
586            // These should never occur for MIR we actually run.
587            FalseEdge { .. } | FalseUnwind { .. } | Yield { .. } | CoroutineDrop => span_bug!(
588                terminator.source_info.span,
589                "{:#?} should have been eliminated by MIR pass",
590                terminator.kind
591            ),
592
593            InlineAsm { template, ref operands, options, ref targets, .. } => {
594                M::eval_inline_asm(self, template, operands, options, targets)?;
595            }
596        }
597
598        interp_ok(())
599    }
600}