1use either::Either;
6use rustc_abi::{FIRST_VARIANT, FieldIdx};
7use rustc_index::IndexSlice;
8use rustc_middle::ty::layout::FnAbiOf;
9use rustc_middle::ty::{self, Instance, Ty};
10use rustc_middle::{bug, mir, span_bug};
11use rustc_span::source_map::Spanned;
12use rustc_target::callconv::FnAbi;
13use tracing::{info, instrument, trace};
14
15use super::{
16 FnArg, FnVal, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemPlaceMeta, PlaceTy,
17 Projectable, Scalar, interp_ok, throw_ub,
18};
19use crate::util;
20
21struct EvaluatedCalleeAndArgs<'tcx, M: Machine<'tcx>> {
22 callee: FnVal<'tcx, M::ExtraFnVal>,
23 args: Vec<FnArg<'tcx, M::Provenance>>,
24 fn_sig: ty::FnSig<'tcx>,
25 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
26 with_caller_location: bool,
28}
29
30impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
31 #[inline(always)]
37 pub fn step(&mut self) -> InterpResult<'tcx, bool> {
38 if self.stack().is_empty() {
39 return interp_ok(false);
40 }
41
42 let Either::Left(loc) = self.frame().loc else {
43 trace!("unwinding: skipping frame");
46 self.return_from_current_stack_frame(true)?;
47 return interp_ok(true);
48 };
49 let basic_block = &self.body().basic_blocks[loc.block];
50
51 if let Some(stmt) = basic_block.statements.get(loc.statement_index) {
52 let old_frames = self.frame_idx();
53 self.eval_statement(stmt)?;
54 assert_eq!(old_frames, self.frame_idx());
56 self.frame_mut().loc.as_mut().left().unwrap().statement_index += 1;
58 return interp_ok(true);
59 }
60
61 M::before_terminator(self)?;
62
63 let terminator = basic_block.terminator();
64 self.eval_terminator(terminator)?;
65 if !self.stack().is_empty() {
66 if let Either::Left(loc) = self.frame().loc {
67 info!("// executing {:?}", loc.block);
68 }
69 }
70 interp_ok(true)
71 }
72
73 pub fn eval_statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> {
78 info!("{:?}", stmt);
79
80 use rustc_middle::mir::StatementKind::*;
81
82 match &stmt.kind {
83 Assign(box (place, rvalue)) => self.eval_rvalue_into_place(rvalue, *place)?,
84
85 SetDiscriminant { place, variant_index } => {
86 let dest = self.eval_place(**place)?;
87 self.write_discriminant(*variant_index, &dest)?;
88 }
89
90 Deinit(place) => {
91 let dest = self.eval_place(**place)?;
92 self.write_uninit(&dest)?;
93 }
94
95 StorageLive(local) => {
97 self.storage_live(*local)?;
98 }
99
100 StorageDead(local) => {
102 self.storage_dead(*local)?;
103 }
104
105 FakeRead(..) => {}
108
109 Retag(kind, place) => {
111 let dest = self.eval_place(**place)?;
112 M::retag_place_contents(self, *kind, &dest)?;
113 }
114
115 Intrinsic(box intrinsic) => self.eval_nondiverging_intrinsic(intrinsic)?,
116
117 PlaceMention(box place) => {
119 let _ = self.eval_place(*place)?;
120 }
121
122 AscribeUserType(..) => {}
125
126 Coverage(..) => {}
138
139 ConstEvalCounter => {
140 M::increment_const_eval_counter(self)?;
141 }
142
143 Nop => {}
146
147 BackwardIncompatibleDropHint { .. } => {}
149 }
150
151 interp_ok(())
152 }
153
154 pub fn eval_rvalue_into_place(
159 &mut self,
160 rvalue: &mir::Rvalue<'tcx>,
161 place: mir::Place<'tcx>,
162 ) -> InterpResult<'tcx> {
163 let dest = self.eval_place(place)?;
164 use rustc_middle::mir::Rvalue::*;
168 match *rvalue {
169 ThreadLocalRef(did) => {
170 let ptr = M::thread_local_static_pointer(self, did)?;
171 self.write_pointer(ptr, &dest)?;
172 }
173
174 Use(ref operand) => {
175 let op = self.eval_operand(operand, Some(dest.layout))?;
177 self.copy_op(&op, &dest)?;
178 }
179
180 CopyForDeref(place) => {
181 let op = self.eval_place_to_op(place, Some(dest.layout))?;
182 self.copy_op(&op, &dest)?;
183 }
184
185 BinaryOp(bin_op, box (ref left, ref right)) => {
186 let layout = util::binop_left_homogeneous(bin_op).then_some(dest.layout);
187 let left = self.read_immediate(&self.eval_operand(left, layout)?)?;
188 let layout = util::binop_right_homogeneous(bin_op).then_some(left.layout);
189 let right = self.read_immediate(&self.eval_operand(right, layout)?)?;
190 let result = self.binary_op(bin_op, &left, &right)?;
191 assert_eq!(result.layout, dest.layout, "layout mismatch for result of {bin_op:?}");
192 self.write_immediate(*result, &dest)?;
193 }
194
195 UnaryOp(un_op, ref operand) => {
196 let val = self.read_immediate(&self.eval_operand(operand, Some(dest.layout))?)?;
198 let result = self.unary_op(un_op, &val)?;
199 assert_eq!(result.layout, dest.layout, "layout mismatch for result of {un_op:?}");
200 self.write_immediate(*result, &dest)?;
201 }
202
203 NullaryOp(null_op, ty) => {
204 let ty = self.instantiate_from_current_frame_and_normalize_erasing_regions(ty)?;
205 let val = self.nullary_op(null_op, ty)?;
206 self.write_immediate(*val, &dest)?;
207 }
208
209 Aggregate(box ref kind, ref operands) => {
210 self.write_aggregate(kind, operands, &dest)?;
211 }
212
213 Repeat(ref operand, _) => {
214 self.write_repeat(operand, &dest)?;
215 }
216
217 Len(place) => {
218 let src = self.eval_place(place)?;
219 let len = src.len(self)?;
220 self.write_scalar(Scalar::from_target_usize(len, self), &dest)?;
221 }
222
223 Ref(_, borrow_kind, place) => {
224 let src = self.eval_place(place)?;
225 let place = self.force_allocation(&src)?;
226 let val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
227 let val = M::retag_ptr_value(
229 self,
230 if borrow_kind.allows_two_phase_borrow() {
231 mir::RetagKind::TwoPhase
232 } else {
233 mir::RetagKind::Default
234 },
235 &val,
236 )?;
237 self.write_immediate(*val, &dest)?;
238 }
239
240 RawPtr(kind, place) => {
241 let place_base_raw = if place.is_indirect_first_projection() {
243 let ty = self.frame().body.local_decls[place.local].ty;
244 ty.is_raw_ptr()
245 } else {
246 false
248 };
249
250 let src = self.eval_place(place)?;
251 let place = self.force_allocation(&src)?;
252 let mut val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
253 if !place_base_raw && !kind.is_fake() {
254 val = M::retag_ptr_value(self, mir::RetagKind::Raw, &val)?;
257 }
258 self.write_immediate(*val, &dest)?;
259 }
260
261 ShallowInitBox(ref operand, _) => {
262 let src = self.eval_operand(operand, None)?;
263 let v = self.read_immediate(&src)?;
264 self.write_immediate(*v, &dest)?;
265 }
266
267 Cast(cast_kind, ref operand, cast_ty) => {
268 let src = self.eval_operand(operand, None)?;
269 let cast_ty =
270 self.instantiate_from_current_frame_and_normalize_erasing_regions(cast_ty)?;
271 self.cast(&src, cast_kind, cast_ty, &dest)?;
272 }
273
274 Discriminant(place) => {
275 let op = self.eval_place_to_op(place, None)?;
276 let variant = self.read_discriminant(&op)?;
277 let discr = self.discriminant_for_variant(op.layout.ty, variant)?;
278 self.write_immediate(*discr, &dest)?;
279 }
280
281 WrapUnsafeBinder(ref op, _ty) => {
282 let op = self.eval_operand(op, None)?;
285 self.copy_op_allow_transmute(&op, &dest)?;
286 }
287 }
288
289 trace!("{:?}", self.dump_place(&dest));
290
291 interp_ok(())
292 }
293
294 #[instrument(skip(self), level = "trace")]
296 fn write_aggregate(
297 &mut self,
298 kind: &mir::AggregateKind<'tcx>,
299 operands: &IndexSlice<FieldIdx, mir::Operand<'tcx>>,
300 dest: &PlaceTy<'tcx, M::Provenance>,
301 ) -> InterpResult<'tcx> {
302 self.write_uninit(dest)?; let (variant_index, variant_dest, active_field_index) = match *kind {
304 mir::AggregateKind::Adt(_, variant_index, _, _, active_field_index) => {
305 let variant_dest = self.project_downcast(dest, variant_index)?;
306 (variant_index, variant_dest, active_field_index)
307 }
308 mir::AggregateKind::RawPtr(..) => {
309 let [data, meta] = &operands.raw else {
314 bug!("{kind:?} should have 2 operands, had {operands:?}");
315 };
316 let data = self.eval_operand(data, None)?;
317 let data = self.read_pointer(&data)?;
318 let meta = self.eval_operand(meta, None)?;
319 let meta = if meta.layout.is_zst() {
320 MemPlaceMeta::None
321 } else {
322 MemPlaceMeta::Meta(self.read_scalar(&meta)?)
323 };
324 let ptr_imm = Immediate::new_pointer_with_meta(data, meta, self);
325 let ptr = ImmTy::from_immediate(ptr_imm, dest.layout);
326 self.copy_op(&ptr, dest)?;
327 return interp_ok(());
328 }
329 _ => (FIRST_VARIANT, dest.clone(), None),
330 };
331 if active_field_index.is_some() {
332 assert_eq!(operands.len(), 1);
333 }
334 for (field_index, operand) in operands.iter_enumerated() {
335 let field_index = active_field_index.unwrap_or(field_index);
336 let field_dest = self.project_field(&variant_dest, field_index.as_usize())?;
337 let op = self.eval_operand(operand, Some(field_dest.layout))?;
338 self.copy_op(&op, &field_dest)?;
339 }
340 self.write_discriminant(variant_index, dest)
341 }
342
343 fn write_repeat(
346 &mut self,
347 operand: &mir::Operand<'tcx>,
348 dest: &PlaceTy<'tcx, M::Provenance>,
349 ) -> InterpResult<'tcx> {
350 let src = self.eval_operand(operand, None)?;
351 assert!(src.layout.is_sized());
352 let dest = self.force_allocation(&dest)?;
353 let length = dest.len(self)?;
354
355 if length == 0 {
356 self.get_place_alloc_mut(&dest)?;
358 } else {
359 let first = self.project_index(&dest, 0)?;
361 self.copy_op(&src, &first)?;
362
363 let elem_size = first.layout.size;
367 let first_ptr = first.ptr();
368 let rest_ptr = first_ptr.wrapping_offset(elem_size, self);
369 self.mem_copy_repeatedly(
371 first_ptr,
372 rest_ptr,
373 elem_size,
374 length - 1,
375 true,
376 )?;
377 }
378
379 interp_ok(())
380 }
381
382 fn eval_fn_call_argument(
384 &self,
385 op: &mir::Operand<'tcx>,
386 ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
387 interp_ok(match op {
388 mir::Operand::Copy(_) | mir::Operand::Constant(_) => {
389 let op = self.eval_operand(op, None)?;
391 FnArg::Copy(op)
392 }
393 mir::Operand::Move(place) => {
394 let place = self.eval_place(*place)?;
399 let op = self.place_to_op(&place)?;
400
401 match op.as_mplace_or_imm() {
402 Either::Left(mplace) => FnArg::InPlace(mplace),
403 Either::Right(_imm) => {
404 FnArg::Copy(op)
411 }
412 }
413 }
414 })
415 }
416
417 fn eval_callee_and_args(
420 &self,
421 terminator: &mir::Terminator<'tcx>,
422 func: &mir::Operand<'tcx>,
423 args: &[Spanned<mir::Operand<'tcx>>],
424 ) -> InterpResult<'tcx, EvaluatedCalleeAndArgs<'tcx, M>> {
425 let func = self.eval_operand(func, None)?;
426 let args = args
427 .iter()
428 .map(|arg| self.eval_fn_call_argument(&arg.node))
429 .collect::<InterpResult<'tcx, Vec<_>>>()?;
430
431 let fn_sig_binder = func.layout.ty.fn_sig(*self.tcx);
432 let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.typing_env, fn_sig_binder);
433 let extra_args = &args[fn_sig.inputs().len()..];
434 let extra_args =
435 self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout().ty));
436
437 let (callee, fn_abi, with_caller_location) = match *func.layout.ty.kind() {
438 ty::FnPtr(..) => {
439 let fn_ptr = self.read_pointer(&func)?;
440 let fn_val = self.get_ptr_fn(fn_ptr)?;
441 (fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false)
442 }
443 ty::FnDef(def_id, args) => {
444 let instance = self.resolve(def_id, args)?;
445 (
446 FnVal::Instance(instance),
447 self.fn_abi_of_instance(instance, extra_args)?,
448 instance.def.requires_caller_location(*self.tcx),
449 )
450 }
451 _ => {
452 span_bug!(terminator.source_info.span, "invalid callee of type {}", func.layout.ty)
453 }
454 };
455
456 interp_ok(EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location })
457 }
458
459 fn eval_terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> {
460 info!("{:?}", terminator.kind);
461
462 use rustc_middle::mir::TerminatorKind::*;
463 match terminator.kind {
464 Return => {
465 self.return_from_current_stack_frame(false)?
466 }
467
468 Goto { target } => self.go_to_block(target),
469
470 SwitchInt { ref discr, ref targets } => {
471 let discr = self.read_immediate(&self.eval_operand(discr, None)?)?;
472 trace!("SwitchInt({:?})", *discr);
473
474 let mut target_block = targets.otherwise();
476
477 for (const_int, target) in targets.iter() {
478 let res = self.binary_op(
481 mir::BinOp::Eq,
482 &discr,
483 &ImmTy::from_uint(const_int, discr.layout),
484 )?;
485 if res.to_scalar().to_bool()? {
486 target_block = target;
487 break;
488 }
489 }
490
491 self.go_to_block(target_block);
492 }
493
494 Call {
495 ref func,
496 ref args,
497 destination,
498 target,
499 unwind,
500 call_source: _,
501 fn_span: _,
502 } => {
503 let old_stack = self.frame_idx();
504 let old_loc = self.frame().loc;
505
506 let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
507 self.eval_callee_and_args(terminator, func, args)?;
508
509 let destination = self.force_allocation(&self.eval_place(destination)?)?;
510 self.init_fn_call(
511 callee,
512 (fn_sig.abi, fn_abi),
513 &args,
514 with_caller_location,
515 &destination,
516 target,
517 if fn_abi.can_unwind { unwind } else { mir::UnwindAction::Unreachable },
518 )?;
519 if self.frame_idx() == old_stack && self.frame().loc == old_loc {
522 span_bug!(terminator.source_info.span, "evaluating this call made no progress");
523 }
524 }
525
526 TailCall { ref func, ref args, fn_span: _ } => {
527 let old_frame_idx = self.frame_idx();
528
529 let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
530 self.eval_callee_and_args(terminator, func, args)?;
531
532 self.init_fn_tail_call(callee, (fn_sig.abi, fn_abi), &args, with_caller_location)?;
533
534 if self.frame_idx() != old_frame_idx {
535 span_bug!(
536 terminator.source_info.span,
537 "evaluating this tail call pushed a new stack frame"
538 );
539 }
540 }
541
542 Drop { place, target, unwind, replace: _ } => {
543 let place = self.eval_place(place)?;
544 let instance = Instance::resolve_drop_in_place(*self.tcx, place.layout.ty);
545 if let ty::InstanceKind::DropGlue(_, None) = instance.def {
546 self.go_to_block(target);
551 return interp_ok(());
552 }
553 trace!("TerminatorKind::drop: {:?}, type {}", place, place.layout.ty);
554 self.init_drop_in_place_call(&place, instance, target, unwind)?;
555 }
556
557 Assert { ref cond, expected, ref msg, target, unwind } => {
558 let ignored =
559 M::ignore_optional_overflow_checks(self) && msg.is_optional_overflow_check();
560 let cond_val = self.read_scalar(&self.eval_operand(cond, None)?)?.to_bool()?;
561 if ignored || expected == cond_val {
562 self.go_to_block(target);
563 } else {
564 M::assert_panic(self, msg, unwind)?;
565 }
566 }
567
568 UnwindTerminate(reason) => {
569 M::unwind_terminate(self, reason)?;
570 }
571
572 UnwindResume => {
576 trace!("unwinding: resuming from cleanup");
577 self.return_from_current_stack_frame(true)?;
580 return interp_ok(());
581 }
582
583 Unreachable => throw_ub!(Unreachable),
585
586 FalseEdge { .. } | FalseUnwind { .. } | Yield { .. } | CoroutineDrop => span_bug!(
588 terminator.source_info.span,
589 "{:#?} should have been eliminated by MIR pass",
590 terminator.kind
591 ),
592
593 InlineAsm { template, ref operands, options, ref targets, .. } => {
594 M::eval_inline_asm(self, template, operands, options, targets)?;
595 }
596 }
597
598 interp_ok(())
599 }
600}