1use std::iter;
6
7use either::Either;
8use rustc_abi::{FIRST_VARIANT, FieldIdx};
9use rustc_data_structures::fx::FxHashSet;
10use rustc_index::IndexSlice;
11use rustc_middle::ty::{self, Instance, Ty};
12use rustc_middle::{bug, mir, span_bug};
13use rustc_span::source_map::Spanned;
14use rustc_target::callconv::FnAbi;
15use tracing::field::Empty;
16use tracing::{info, instrument, trace};
17
18use super::{
19    FnArg, FnVal, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemPlaceMeta, PlaceTy,
20    Projectable, interp_ok, throw_ub, throw_unsup_format,
21};
22use crate::interpret::EnteredTraceSpan;
23use crate::{enter_trace_span, util};
24
25struct EvaluatedCalleeAndArgs<'tcx, M: Machine<'tcx>> {
26    callee: FnVal<'tcx, M::ExtraFnVal>,
27    args: Vec<FnArg<'tcx, M::Provenance>>,
28    fn_sig: ty::FnSig<'tcx>,
29    fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
30    with_caller_location: bool,
32}
33
34impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
35    #[inline(always)]
41    pub fn step(&mut self) -> InterpResult<'tcx, bool> {
42        if self.stack().is_empty() {
43            return interp_ok(false);
44        }
45
46        let Either::Left(loc) = self.frame().loc else {
47            trace!("unwinding: skipping frame");
50            self.return_from_current_stack_frame(true)?;
51            return interp_ok(true);
52        };
53        let basic_block = &self.body().basic_blocks[loc.block];
54
55        if let Some(stmt) = basic_block.statements.get(loc.statement_index) {
56            let old_frames = self.frame_idx();
57            self.eval_statement(stmt)?;
58            assert_eq!(old_frames, self.frame_idx());
60            self.frame_mut().loc.as_mut().left().unwrap().statement_index += 1;
62            return interp_ok(true);
63        }
64
65        M::before_terminator(self)?;
66
67        let terminator = basic_block.terminator();
68        self.eval_terminator(terminator)?;
69        if !self.stack().is_empty() {
70            if let Either::Left(loc) = self.frame().loc {
71                info!("// executing {:?}", loc.block);
72            }
73        }
74        interp_ok(true)
75    }
76
77    pub fn eval_statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> {
82        let _trace = enter_trace_span!(
83            M,
84            step::eval_statement,
85            stmt = ?stmt.kind,
86            span = ?stmt.source_info.span,
87            tracing_separate_thread = Empty,
88        )
89        .or_if_tracing_disabled(|| info!(stmt = ?stmt.kind));
90
91        use rustc_middle::mir::StatementKind::*;
92
93        match &stmt.kind {
94            Assign(box (place, rvalue)) => self.eval_rvalue_into_place(rvalue, *place)?,
95
96            SetDiscriminant { place, variant_index } => {
97                let dest = self.eval_place(**place)?;
98                self.write_discriminant(*variant_index, &dest)?;
99            }
100
101            StorageLive(local) => {
103                self.storage_live(*local)?;
104            }
105
106            StorageDead(local) => {
108                self.storage_dead(*local)?;
109            }
110
111            FakeRead(..) => {}
114
115            Retag(kind, place) => {
117                let dest = self.eval_place(**place)?;
118                M::retag_place_contents(self, *kind, &dest)?;
119            }
120
121            Intrinsic(box intrinsic) => self.eval_nondiverging_intrinsic(intrinsic)?,
122
123            PlaceMention(box place) => {
125                let _ = self.eval_place(*place)?;
126            }
127
128            AscribeUserType(..) => {}
131
132            Coverage(..) => {}
144
145            ConstEvalCounter => {
146                M::increment_const_eval_counter(self)?;
147            }
148
149            Nop => {}
152
153            BackwardIncompatibleDropHint { .. } => {}
155        }
156
157        interp_ok(())
158    }
159
160    pub fn eval_rvalue_into_place(
165        &mut self,
166        rvalue: &mir::Rvalue<'tcx>,
167        place: mir::Place<'tcx>,
168    ) -> InterpResult<'tcx> {
169        let dest = self.eval_place(place)?;
170        use rustc_middle::mir::Rvalue::*;
174        match *rvalue {
175            ThreadLocalRef(did) => {
176                let ptr = M::thread_local_static_pointer(self, did)?;
177                self.write_pointer(ptr, &dest)?;
178            }
179
180            Use(ref operand) => {
181                let op = self.eval_operand(operand, Some(dest.layout))?;
183                self.copy_op(&op, &dest)?;
184            }
185
186            CopyForDeref(_) => bug!("`CopyForDeref` in runtime MIR"),
187
188            BinaryOp(bin_op, box (ref left, ref right)) => {
189                let layout = util::binop_left_homogeneous(bin_op).then_some(dest.layout);
190                let left = self.read_immediate(&self.eval_operand(left, layout)?)?;
191                let layout = util::binop_right_homogeneous(bin_op).then_some(left.layout);
192                let right = self.read_immediate(&self.eval_operand(right, layout)?)?;
193                let result = self.binary_op(bin_op, &left, &right)?;
194                assert_eq!(result.layout, dest.layout, "layout mismatch for result of {bin_op:?}");
195                self.write_immediate(*result, &dest)?;
196            }
197
198            UnaryOp(un_op, ref operand) => {
199                let val = self.read_immediate(&self.eval_operand(operand, Some(dest.layout))?)?;
201                let result = self.unary_op(un_op, &val)?;
202                assert_eq!(result.layout, dest.layout, "layout mismatch for result of {un_op:?}");
203                self.write_immediate(*result, &dest)?;
204            }
205
206            NullaryOp(null_op, ty) => {
207                let ty = self.instantiate_from_current_frame_and_normalize_erasing_regions(ty)?;
208                let val = self.nullary_op(null_op, ty)?;
209                self.write_immediate(*val, &dest)?;
210            }
211
212            Aggregate(box ref kind, ref operands) => {
213                self.write_aggregate(kind, operands, &dest)?;
214            }
215
216            Repeat(ref operand, _) => {
217                self.write_repeat(operand, &dest)?;
218            }
219
220            Ref(_, borrow_kind, place) => {
221                let src = self.eval_place(place)?;
222                let place = self.force_allocation(&src)?;
223                let val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
224                let val = M::retag_ptr_value(
226                    self,
227                    if borrow_kind.allows_two_phase_borrow() {
228                        mir::RetagKind::TwoPhase
229                    } else {
230                        mir::RetagKind::Default
231                    },
232                    &val,
233                )?;
234                self.write_immediate(*val, &dest)?;
235            }
236
237            RawPtr(kind, place) => {
238                let place_base_raw = if place.is_indirect_first_projection() {
240                    let ty = self.frame().body.local_decls[place.local].ty;
241                    ty.is_raw_ptr()
242                } else {
243                    false
245                };
246
247                let src = self.eval_place(place)?;
248                let place = self.force_allocation(&src)?;
249                let mut val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
250                if !place_base_raw && !kind.is_fake() {
251                    val = M::retag_ptr_value(self, mir::RetagKind::Raw, &val)?;
254                }
255                self.write_immediate(*val, &dest)?;
256            }
257
258            ShallowInitBox(ref operand, _) => {
259                let src = self.eval_operand(operand, None)?;
260                let v = self.read_immediate(&src)?;
261                self.write_immediate(*v, &dest)?;
262            }
263
264            Cast(cast_kind, ref operand, cast_ty) => {
265                let src = self.eval_operand(operand, None)?;
266                let cast_ty =
267                    self.instantiate_from_current_frame_and_normalize_erasing_regions(cast_ty)?;
268                self.cast(&src, cast_kind, cast_ty, &dest)?;
269            }
270
271            Discriminant(place) => {
272                let op = self.eval_place_to_op(place, None)?;
273                let variant = self.read_discriminant(&op)?;
274                let discr = self.discriminant_for_variant(op.layout.ty, variant)?;
275                self.write_immediate(*discr, &dest)?;
276            }
277
278            WrapUnsafeBinder(ref op, _ty) => {
279                let op = self.eval_operand(op, None)?;
282                self.copy_op_allow_transmute(&op, &dest)?;
283            }
284        }
285
286        trace!("{:?}", self.dump_place(&dest));
287
288        interp_ok(())
289    }
290
291    #[instrument(skip(self), level = "trace")]
293    fn write_aggregate(
294        &mut self,
295        kind: &mir::AggregateKind<'tcx>,
296        operands: &IndexSlice<FieldIdx, mir::Operand<'tcx>>,
297        dest: &PlaceTy<'tcx, M::Provenance>,
298    ) -> InterpResult<'tcx> {
299        let (variant_index, variant_dest, active_field_index) = match *kind {
300            mir::AggregateKind::Adt(_, variant_index, _, _, active_field_index) => {
301                let variant_dest = self.project_downcast(dest, variant_index)?;
302                (variant_index, variant_dest, active_field_index)
303            }
304            mir::AggregateKind::RawPtr(..) => {
305                let [data, meta] = &operands.raw else {
310                    bug!("{kind:?} should have 2 operands, had {operands:?}");
311                };
312                let data = self.eval_operand(data, None)?;
313                let data = self.read_pointer(&data)?;
314                let meta = self.eval_operand(meta, None)?;
315                let meta = if meta.layout.is_zst() {
316                    MemPlaceMeta::None
317                } else {
318                    MemPlaceMeta::Meta(self.read_scalar(&meta)?)
319                };
320                let ptr_imm = Immediate::new_pointer_with_meta(data, meta, self);
321                let ptr = ImmTy::from_immediate(ptr_imm, dest.layout);
322                self.copy_op(&ptr, dest)?;
323                return interp_ok(());
324            }
325            _ => (FIRST_VARIANT, dest.clone(), None),
326        };
327        if active_field_index.is_some() {
328            assert_eq!(operands.len(), 1);
329        }
330        for (field_index, operand) in operands.iter_enumerated() {
331            let field_index = active_field_index.unwrap_or(field_index);
332            let field_dest = self.project_field(&variant_dest, field_index)?;
333            let op = self.eval_operand(operand, Some(field_dest.layout))?;
334            self.copy_op_no_validate(&op, &field_dest, false)?;
336        }
337        self.write_discriminant(variant_index, dest)?;
338        if M::enforce_validity(self, dest.layout()) {
341            self.validate_operand(
342                dest,
343                M::enforce_validity_recursively(self, dest.layout()),
344                true,
345            )?;
346        }
347        interp_ok(())
348    }
349
350    fn write_repeat(
353        &mut self,
354        operand: &mir::Operand<'tcx>,
355        dest: &PlaceTy<'tcx, M::Provenance>,
356    ) -> InterpResult<'tcx> {
357        let src = self.eval_operand(operand, None)?;
358        assert!(src.layout.is_sized());
359        let dest = self.force_allocation(&dest)?;
360        let length = dest.len(self)?;
361
362        if length == 0 {
363            self.get_place_alloc_mut(&dest)?;
365        } else {
366            let first = self.project_index(&dest, 0)?;
368            self.copy_op(&src, &first)?;
369
370            let elem_size = first.layout.size;
374            let first_ptr = first.ptr();
375            let rest_ptr = first_ptr.wrapping_offset(elem_size, self);
376            self.mem_copy_repeatedly(
378                first_ptr,
379                rest_ptr,
380                elem_size,
381                length - 1,
382                true,
383            )?;
384        }
385
386        interp_ok(())
387    }
388
389    fn eval_fn_call_argument(
391        &mut self,
392        op: &mir::Operand<'tcx>,
393        move_definitely_disjoint: bool,
394    ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
395        interp_ok(match op {
396            mir::Operand::Copy(_) | mir::Operand::Constant(_) => {
397                let op = self.eval_operand(op, None)?;
399                FnArg::Copy(op)
400            }
401            mir::Operand::Move(place) => {
402                let place = self.eval_place(*place)?;
403                if move_definitely_disjoint {
404                    let op = self.place_to_op(&place)?;
408                    match op.as_mplace_or_imm() {
409                        Either::Left(mplace) => FnArg::InPlace(mplace),
410                        Either::Right(_imm) => FnArg::Copy(op),
411                    }
412                } else {
413                    FnArg::InPlace(self.force_allocation(&place)?)
415                }
416            }
417        })
418    }
419
420    fn eval_callee_and_args(
423        &mut self,
424        terminator: &mir::Terminator<'tcx>,
425        func: &mir::Operand<'tcx>,
426        args: &[Spanned<mir::Operand<'tcx>>],
427        dest: &mir::Place<'tcx>,
428    ) -> InterpResult<'tcx, EvaluatedCalleeAndArgs<'tcx, M>> {
429        let func = self.eval_operand(func, None)?;
430
431        let move_definitely_disjoint = 'move_definitely_disjoint: {
440            let mut previous_locals = FxHashSet::<mir::Local>::default();
441            for place in args
442                .iter()
443                .filter_map(|a| {
444                    if let mir::Operand::Move(place) = &a.node { Some(place) } else { None }
446                })
447                .chain(iter::once(dest))
448            {
449                if place.is_indirect_first_projection() {
450                    break 'move_definitely_disjoint false;
452                }
453                if !previous_locals.insert(place.local) {
454                    break 'move_definitely_disjoint false;
456                }
457            }
458            true
460        };
461        let args = args
462            .iter()
463            .map(|arg| self.eval_fn_call_argument(&arg.node, move_definitely_disjoint))
464            .collect::<InterpResult<'tcx, Vec<_>>>()?;
465
466        let fn_sig_binder = {
467            let _trace = enter_trace_span!(M, "fn_sig", ty = ?func.layout.ty.kind());
468            func.layout.ty.fn_sig(*self.tcx)
469        };
470        let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.typing_env, fn_sig_binder);
471        let extra_args = &args[fn_sig.inputs().len()..];
472        let extra_args =
473            self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout().ty));
474
475        let (callee, fn_abi, with_caller_location) = match *func.layout.ty.kind() {
476            ty::FnPtr(..) => {
477                let fn_ptr = self.read_pointer(&func)?;
478                let fn_val = self.get_ptr_fn(fn_ptr)?;
479                (fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false)
480            }
481            ty::FnDef(def_id, args) => {
482                let instance = self.resolve(def_id, args)?;
483                (
484                    FnVal::Instance(instance),
485                    self.fn_abi_of_instance(instance, extra_args)?,
486                    instance.def.requires_caller_location(*self.tcx),
487                )
488            }
489            _ => {
490                span_bug!(terminator.source_info.span, "invalid callee of type {}", func.layout.ty)
491            }
492        };
493
494        interp_ok(EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location })
495    }
496
497    fn eval_terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> {
498        let _trace = enter_trace_span!(
499            M,
500            step::eval_terminator,
501            terminator = ?terminator.kind,
502            span = ?terminator.source_info.span,
503            tracing_separate_thread = Empty,
504        )
505        .or_if_tracing_disabled(|| info!(terminator = ?terminator.kind));
506
507        use rustc_middle::mir::TerminatorKind::*;
508        match terminator.kind {
509            Return => {
510                self.return_from_current_stack_frame(false)?
511            }
512
513            Goto { target } => self.go_to_block(target),
514
515            SwitchInt { ref discr, ref targets } => {
516                let discr = self.read_immediate(&self.eval_operand(discr, None)?)?;
517                trace!("SwitchInt({:?})", *discr);
518
519                let mut target_block = targets.otherwise();
521
522                for (const_int, target) in targets.iter() {
523                    let res = self.binary_op(
526                        mir::BinOp::Eq,
527                        &discr,
528                        &ImmTy::from_uint(const_int, discr.layout),
529                    )?;
530                    if res.to_scalar().to_bool()? {
531                        target_block = target;
532                        break;
533                    }
534                }
535
536                self.go_to_block(target_block);
537            }
538
539            Call {
540                ref func,
541                ref args,
542                destination,
543                target,
544                unwind,
545                call_source: _,
546                fn_span: _,
547            } => {
548                let old_stack = self.frame_idx();
549                let old_loc = self.frame().loc;
550
551                let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
552                    self.eval_callee_and_args(terminator, func, args, &destination)?;
553
554                let destination = self.eval_place(destination)?;
555                self.init_fn_call(
556                    callee,
557                    (fn_sig.abi, fn_abi),
558                    &args,
559                    with_caller_location,
560                    &destination,
561                    target,
562                    if fn_abi.can_unwind { unwind } else { mir::UnwindAction::Unreachable },
563                )?;
564                if self.frame_idx() == old_stack && self.frame().loc == old_loc {
567                    span_bug!(terminator.source_info.span, "evaluating this call made no progress");
568                }
569            }
570
571            TailCall { ref func, ref args, fn_span: _ } => {
572                let old_frame_idx = self.frame_idx();
573
574                let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
575                    self.eval_callee_and_args(terminator, func, args, &mir::Place::return_place())?;
576
577                self.init_fn_tail_call(callee, (fn_sig.abi, fn_abi), &args, with_caller_location)?;
578
579                if self.frame_idx() != old_frame_idx {
580                    span_bug!(
581                        terminator.source_info.span,
582                        "evaluating this tail call pushed a new stack frame"
583                    );
584                }
585            }
586
587            Drop { place, target, unwind, replace: _, drop, async_fut } => {
588                assert!(
589                    async_fut.is_none() && drop.is_none(),
590                    "Async Drop must be expanded or reset to sync in runtime MIR"
591                );
592                let place = self.eval_place(place)?;
593                let instance = {
594                    let _trace =
595                        enter_trace_span!(M, resolve::resolve_drop_in_place, ty = ?place.layout.ty);
596                    Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
597                };
598                if let ty::InstanceKind::DropGlue(_, None) = instance.def {
599                    self.go_to_block(target);
604                    return interp_ok(());
605                }
606                trace!("TerminatorKind::drop: {:?}, type {}", place, place.layout.ty);
607                self.init_drop_in_place_call(&place, instance, target, unwind)?;
608            }
609
610            Assert { ref cond, expected, ref msg, target, unwind } => {
611                let ignored =
612                    M::ignore_optional_overflow_checks(self) && msg.is_optional_overflow_check();
613                let cond_val = self.read_scalar(&self.eval_operand(cond, None)?)?.to_bool()?;
614                if ignored || expected == cond_val {
615                    self.go_to_block(target);
616                } else {
617                    M::assert_panic(self, msg, unwind)?;
618                }
619            }
620
621            UnwindTerminate(reason) => {
622                M::unwind_terminate(self, reason)?;
623            }
624
625            UnwindResume => {
629                trace!("unwinding: resuming from cleanup");
630                self.return_from_current_stack_frame(true)?;
633                return interp_ok(());
634            }
635
636            Unreachable => throw_ub!(Unreachable),
638
639            FalseEdge { .. } | FalseUnwind { .. } | Yield { .. } | CoroutineDrop => span_bug!(
641                terminator.source_info.span,
642                "{:#?} should have been eliminated by MIR pass",
643                terminator.kind
644            ),
645
646            InlineAsm { .. } => {
647                throw_unsup_format!("inline assembly is not supported");
648            }
649        }
650
651        interp_ok(())
652    }
653}