1//! This module contains everything needed to instantiate an interpreter.
2//! This separation exists to ensure that no fancy miri features like
3//! interpreting common C functions leak into CTFE.
45use std::borrow::{Borrow, Cow};
6use std::fmt::Debug;
7use std::hash::Hash;
89use rustc_abi::{Align, Size};
10use rustc_apfloat::{Float, FloatConvert};
11use rustc_middle::query::TyCtxtAt;
12use rustc_middle::ty::Ty;
13use rustc_middle::ty::layout::TyAndLayout;
14use rustc_middle::{mir, ty};
15use rustc_span::def_id::DefId;
16use rustc_target::callconv::FnAbi;
1718use super::{
19AllocBytes, AllocId, AllocKind, AllocRange, Allocation, CTFE_ALLOC_SALT, ConstAllocation,
20CtfeProvenance, EnteredTraceSpan, FnArg, Frame, ImmTy, InterpCx, InterpResult, MPlaceTy,
21MemoryKind, Misalignment, OpTy, PlaceTy, Pointer, Provenance, RangeSet, interp_ok, throw_unsup,
22};
2324/// Data returned by [`Machine::after_stack_pop`], and consumed by
25/// [`InterpCx::return_from_current_stack_frame`] to determine what actions should be done when
26/// returning from a stack frame.
27#[derive(#[automatically_derived]
impl ::core::cmp::Eq for ReturnAction {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for ReturnAction {
#[inline]
fn eq(&self, other: &ReturnAction) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for ReturnAction {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
ReturnAction::Normal => "Normal",
ReturnAction::NoJump => "NoJump",
ReturnAction::NoCleanup => "NoCleanup",
})
}
}Debug, #[automatically_derived]
impl ::core::marker::Copy for ReturnAction { }Copy, #[automatically_derived]
impl ::core::clone::Clone for ReturnAction {
#[inline]
fn clone(&self) -> ReturnAction { *self }
}Clone)]
28pub enum ReturnAction {
29/// Indicates that no special handling should be
30 /// done - we'll either return normally or unwind
31 /// based on the terminator for the function
32 /// we're leaving.
33Normal,
3435/// Indicates that we should *not* jump to the return/unwind address, as the callback already
36 /// took care of everything.
37NoJump,
3839/// Returned by [`InterpCx::pop_stack_frame_raw`] when no cleanup should be done.
40NoCleanup,
41}
4243/// Whether this kind of memory is allowed to leak
44pub trait MayLeak: Copy {
45fn may_leak(self) -> bool;
46}
4748/// The functionality needed by memory to manage its allocations
49pub trait AllocMap<K: Hash + Eq, V> {
50/// Tests if the map contains the given key.
51 /// Deliberately takes `&mut` because that is sufficient, and some implementations
52 /// can be more efficient then (using `RefCell::get_mut`).
53fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool54where
55K: Borrow<Q>;
5657/// Callers should prefer [`AllocMap::contains_key`] when it is possible to call because it may
58 /// be more efficient. This function exists for callers that only have a shared reference
59 /// (which might make it slightly less efficient than `contains_key`, e.g. if
60 /// the data is stored inside a `RefCell`).
61fn contains_key_ref<Q: ?Sized + Hash + Eq>(&self, k: &Q) -> bool62where
63K: Borrow<Q>;
6465/// Inserts a new entry into the map.
66fn insert(&mut self, k: K, v: V) -> Option<V>;
6768/// Removes an entry from the map.
69fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
70where
71K: Borrow<Q>;
7273/// Returns data based on the keys and values in the map.
74fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>;
7576/// Returns a reference to entry `k`. If no such entry exists, call
77 /// `vacant` and either forward its error, or add its result to the map
78 /// and return a reference to *that*.
79fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E>;
8081/// Returns a mutable reference to entry `k`. If no such entry exists, call
82 /// `vacant` and either forward its error, or add its result to the map
83 /// and return a reference to *that*.
84fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E>;
8586/// Read-only lookup.
87fn get(&self, k: K) -> Option<&V> {
88self.get_or(k, || Err(())).ok()
89 }
9091/// Mutable lookup.
92fn get_mut(&mut self, k: K) -> Option<&mut V> {
93self.get_mut_or(k, || Err(())).ok()
94 }
95}
9697/// Methods of this trait signifies a point where CTFE evaluation would fail
98/// and some use case dependent behaviour can instead be applied.
99pub trait Machine<'tcx>: Sized {
100/// Additional memory kinds a machine wishes to distinguish from the builtin ones
101type MemoryKind: Debug + std::fmt::Display + MayLeak + Eq + 'static;
102103/// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
104type Provenance: Provenance + Eq + Hash + 'static;
105106/// When getting the AllocId of a pointer, some extra data is also obtained from the provenance
107 /// that is passed to memory access hooks so they can do things with it.
108type ProvenanceExtra: Copy + 'static;
109110/// Machines can define extra (non-instance) things that represent values of function pointers.
111 /// For example, Miri uses this to return a function pointer from `dlsym`
112 /// that can later be called to execute the right thing.
113type ExtraFnVal: Debug + Copy;
114115/// Extra data stored in every call frame.
116type FrameExtra;
117118/// Extra data stored in every allocation.
119type AllocExtra: Debug + Clone + 'tcx;
120121/// Type for the bytes of the allocation.
122type Bytes: AllocBytes + 'static;
123124/// Memory's allocation map
125type MemoryMap: AllocMap<
126AllocId,
127 (
128MemoryKind<Self::MemoryKind>,
129Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>,
130 ),
131 > + Default132 + Clone;
133134/// The memory kind to use for copied global memory (held in `tcx`) --
135 /// or None if such memory should not be mutated and thus any such attempt will cause
136 /// a `ModifiedStatic` error to be raised.
137 /// Statics are copied under two circumstances: When they are mutated, and when
138 /// `adjust_allocation` (see below) returns an owned allocation
139 /// that is added to the memory so that the work is not done twice.
140const GLOBAL_KIND: Option<Self::MemoryKind>;
141142/// Should the machine panic on allocation failures?
143const PANIC_ON_ALLOC_FAIL: bool;
144145/// Determines whether `eval_mir_constant` can never fail because all required consts have
146 /// already been checked before.
147const ALL_CONSTS_ARE_PRECHECKED: bool = true;
148149/// Whether memory accesses should be alignment-checked.
150fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool;
151152/// Gives the machine a chance to detect more misalignment than the built-in checks would catch.
153#[inline(always)]
154fn alignment_check(
155 _ecx: &InterpCx<'tcx, Self>,
156 _alloc_id: AllocId,
157 _alloc_align: Align,
158 _alloc_kind: AllocKind,
159 _offset: Size,
160 _align: Align,
161 ) -> Option<Misalignment> {
162None163 }
164165/// Whether to enforce the validity invariant for a specific layout.
166fn enforce_validity(ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool;
167/// Whether to enforce the validity invariant *recursively*.
168fn enforce_validity_recursively(
169 _ecx: &InterpCx<'tcx, Self>,
170 _layout: TyAndLayout<'tcx>,
171 ) -> bool {
172false
173}
174175/// Whether Assert(OverflowNeg) and Assert(Overflow) MIR terminators should actually
176 /// check for overflow.
177fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool;
178179/// Entry point for obtaining the MIR of anything that should get evaluated.
180 /// So not just functions and shims, but also const/static initializers, anonymous
181 /// constants, ...
182fn load_mir(
183 ecx: &InterpCx<'tcx, Self>,
184 instance: ty::InstanceKind<'tcx>,
185 ) -> &'tcx mir::Body<'tcx> {
186ecx.tcx.instance_mir(instance)
187 }
188189/// Entry point to all function calls.
190 ///
191 /// Returns either the mir to use for the call, or `None` if execution should
192 /// just proceed (which usually means this hook did all the work that the
193 /// called function should usually have done). In the latter case, it is
194 /// this hook's responsibility to advance the instruction pointer!
195 /// (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR
196 /// nor just jump to `ret`, but instead push their own stack frame.)
197 /// Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them
198 /// was used.
199fn find_mir_or_eval_fn(
200 ecx: &mut InterpCx<'tcx, Self>,
201 instance: ty::Instance<'tcx>,
202 abi: &FnAbi<'tcx, Ty<'tcx>>,
203 args: &[FnArg<'tcx, Self::Provenance>],
204 destination: &PlaceTy<'tcx, Self::Provenance>,
205 target: Option<mir::BasicBlock>,
206 unwind: mir::UnwindAction,
207 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>>;
208209/// Execute `fn_val`. It is the hook's responsibility to advance the instruction
210 /// pointer as appropriate.
211fn call_extra_fn(
212 ecx: &mut InterpCx<'tcx, Self>,
213 fn_val: Self::ExtraFnVal,
214 abi: &FnAbi<'tcx, Ty<'tcx>>,
215 args: &[FnArg<'tcx, Self::Provenance>],
216 destination: &PlaceTy<'tcx, Self::Provenance>,
217 target: Option<mir::BasicBlock>,
218 unwind: mir::UnwindAction,
219 ) -> InterpResult<'tcx>;
220221/// Directly process an intrinsic without pushing a stack frame. It is the hook's
222 /// responsibility to advance the instruction pointer as appropriate.
223 ///
224 /// Returns `None` if the intrinsic was fully handled.
225 /// Otherwise, returns an `Instance` of the function that implements the intrinsic.
226fn call_intrinsic(
227 ecx: &mut InterpCx<'tcx, Self>,
228 instance: ty::Instance<'tcx>,
229 args: &[OpTy<'tcx, Self::Provenance>],
230 destination: &PlaceTy<'tcx, Self::Provenance>,
231 target: Option<mir::BasicBlock>,
232 unwind: mir::UnwindAction,
233 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>>;
234235/// Check whether the given function may be executed on the current machine, in terms of the
236 /// target features is requires.
237fn check_fn_target_features(
238 _ecx: &InterpCx<'tcx, Self>,
239 _instance: ty::Instance<'tcx>,
240 ) -> InterpResult<'tcx>;
241242/// Called to evaluate `Assert` MIR terminators that trigger a panic.
243fn assert_panic(
244 ecx: &mut InterpCx<'tcx, Self>,
245 msg: &mir::AssertMessage<'tcx>,
246 unwind: mir::UnwindAction,
247 ) -> InterpResult<'tcx>;
248249/// Called to trigger a non-unwinding panic.
250fn panic_nounwind(_ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx>;
251252/// Called when unwinding reached a state where execution should be terminated.
253fn unwind_terminate(
254 ecx: &mut InterpCx<'tcx, Self>,
255 reason: mir::UnwindTerminateReason,
256 ) -> InterpResult<'tcx>;
257258/// Called for all binary operations where the LHS has pointer type.
259 ///
260 /// Returns a (value, overflowed) pair if the operation succeeded
261fn binary_ptr_op(
262 ecx: &InterpCx<'tcx, Self>,
263 bin_op: mir::BinOp,
264 left: &ImmTy<'tcx, Self::Provenance>,
265 right: &ImmTy<'tcx, Self::Provenance>,
266 ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>;
267268/// Generate the NaN returned by a float operation, given the list of inputs.
269 /// (This is all inputs, not just NaN inputs!)
270fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
271 _ecx: &InterpCx<'tcx, Self>,
272 _inputs: &[F1],
273 ) -> F2 {
274// By default we always return the preferred NaN.
275F2::NAN276 }
277278/// Apply non-determinism to float operations that do not return a precise result.
279fn apply_float_nondet(
280 _ecx: &mut InterpCx<'tcx, Self>,
281 val: ImmTy<'tcx, Self::Provenance>,
282 ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
283interp_ok(val)
284 }
285286/// Determines the result of `min`/`max` on floats when the arguments are equal.
287fn equal_float_min_max<F: Float>(_ecx: &InterpCx<'tcx, Self>, a: F, _b: F) -> F {
288// By default, we pick the left argument.
289a290 }
291292/// Determines whether the `fmuladd` intrinsics fuse the multiply-add or use separate operations.
293fn float_fuse_mul_add(_ecx: &InterpCx<'tcx, Self>) -> bool;
294295/// Called before a basic block terminator is executed.
296#[inline]
297fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
298interp_ok(())
299 }
300301/// Determines the result of a `Operand::RuntimeChecks` invocation.
302fn runtime_checks(
303 _ecx: &InterpCx<'tcx, Self>,
304 r: mir::RuntimeChecks,
305 ) -> InterpResult<'tcx, bool>;
306307/// Called when the interpreter encounters a `StatementKind::ConstEvalCounter` instruction.
308 /// You can use this to detect long or endlessly running programs.
309#[inline]
310fn increment_const_eval_counter(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
311interp_ok(())
312 }
313314/// Called before a global allocation is accessed.
315 /// `def_id` is `Some` if this is the "lazy" allocation of a static.
316#[inline]
317fn before_access_global(
318 _tcx: TyCtxtAt<'tcx>,
319 _machine: &Self,
320 _alloc_id: AllocId,
321 _allocation: ConstAllocation<'tcx>,
322 _static_def_id: Option<DefId>,
323 _is_write: bool,
324 ) -> InterpResult<'tcx> {
325interp_ok(())
326 }
327328/// Return the `AllocId` for the given thread-local static in the current thread.
329fn thread_local_static_pointer(
330 _ecx: &mut InterpCx<'tcx, Self>,
331 def_id: DefId,
332 ) -> InterpResult<'tcx, Pointer<Self::Provenance>> {
333do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::ThreadLocalStatic(def_id))throw_unsup!(ThreadLocalStatic(def_id))334 }
335336/// Return the `AllocId` for the given `extern static`.
337fn extern_static_pointer(
338 ecx: &InterpCx<'tcx, Self>,
339 def_id: DefId,
340 ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
341342/// "Int-to-pointer cast"
343fn ptr_from_addr_cast(
344 ecx: &InterpCx<'tcx, Self>,
345 addr: u64,
346 ) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>;
347348/// Marks a pointer as exposed, allowing its provenance
349 /// to be recovered. "Pointer-to-int cast"
350fn expose_provenance(
351 ecx: &InterpCx<'tcx, Self>,
352 provenance: Self::Provenance,
353 ) -> InterpResult<'tcx>;
354355/// Convert a pointer with provenance into an allocation-offset pair and extra provenance info.
356 /// `size` says how many bytes of memory are expected at that pointer. The *sign* of `size` can
357 /// be used to disambiguate situations where a wildcard pointer sits right in between two
358 /// allocations.
359 ///
360 /// If `ptr.provenance.get_alloc_id()` is `Some(p)`, the returned `AllocId` must be `p`.
361 /// The resulting `AllocId` will just be used for that one step and the forgotten again
362 /// (i.e., we'll never turn the data returned here back into a `Pointer` that might be
363 /// stored in machine state).
364 ///
365 /// When this fails, that means the pointer does not point to a live allocation.
366fn ptr_get_alloc(
367 ecx: &InterpCx<'tcx, Self>,
368 ptr: Pointer<Self::Provenance>,
369 size: i64,
370 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>;
371372/// Return a "root" pointer for the given allocation: the one that is used for direct
373 /// accesses to this static/const/fn allocation, or the one returned from the heap allocator.
374 ///
375 /// Not called on `extern` or thread-local statics (those use the methods above).
376 ///
377 /// `kind` is the kind of the allocation the pointer points to; it can be `None` when
378 /// it's a global and `GLOBAL_KIND` is `None`.
379fn adjust_alloc_root_pointer(
380 ecx: &InterpCx<'tcx, Self>,
381 ptr: Pointer,
382 kind: Option<MemoryKind<Self::MemoryKind>>,
383 ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
384385/// Called to adjust global allocations to the Provenance and AllocExtra of this machine.
386 ///
387 /// If `alloc` contains pointers, then they are all pointing to globals.
388 ///
389 /// This should avoid copying if no work has to be done! If this returns an owned
390 /// allocation (because a copy had to be done to adjust things), machine memory will
391 /// cache the result. (This relies on `AllocMap::get_or` being able to add the
392 /// owned allocation to the map even when the map is shared.)
393fn adjust_global_allocation<'b>(
394 ecx: &InterpCx<'tcx, Self>,
395 id: AllocId,
396 alloc: &'b Allocation,
397 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>;
398399/// Initialize the extra state of an allocation local to this machine.
400 ///
401 /// This is guaranteed to be called exactly once on all allocations local to this machine.
402 /// It will not be called automatically for global allocations; `adjust_global_allocation`
403 /// has to do that itself if that is desired.
404fn init_local_allocation(
405 ecx: &InterpCx<'tcx, Self>,
406 id: AllocId,
407 kind: MemoryKind<Self::MemoryKind>,
408 size: Size,
409 align: Align,
410 ) -> InterpResult<'tcx, Self::AllocExtra>;
411412/// Hook for performing extra checks on a memory read access.
413 /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
414 /// `range`.
415 ///
416 /// This will *not* be called during validation!
417 ///
418 /// Takes read-only access to the allocation so we can keep all the memory read
419 /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
420 /// need to mutate.
421 ///
422 /// This is not invoked for ZST accesses, as no read actually happens.
423#[inline(always)]
424fn before_memory_read(
425 _tcx: TyCtxtAt<'tcx>,
426 _machine: &Self,
427 _alloc_extra: &Self::AllocExtra,
428 _ptr: Pointer<Option<Self::Provenance>>,
429 _prov: (AllocId, Self::ProvenanceExtra),
430 _range: AllocRange,
431 ) -> InterpResult<'tcx> {
432interp_ok(())
433 }
434435/// Hook for performing extra checks on any memory read access,
436 /// that involves an allocation, even ZST reads.
437 ///
438 /// This will *not* be called during validation!
439 ///
440 /// Used to prevent statics from self-initializing by reading from their own memory
441 /// as it is being initialized.
442fn before_alloc_access(
443 _tcx: TyCtxtAt<'tcx>,
444 _machine: &Self,
445 _alloc_id: AllocId,
446 ) -> InterpResult<'tcx> {
447interp_ok(())
448 }
449450/// Hook for performing extra checks on a memory write access.
451 /// This is not invoked for ZST accesses, as no write actually happens.
452 /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
453 /// `range`.
454#[inline(always)]
455fn before_memory_write(
456 _tcx: TyCtxtAt<'tcx>,
457 _machine: &mut Self,
458 _alloc_extra: &mut Self::AllocExtra,
459 _ptr: Pointer<Option<Self::Provenance>>,
460 _prov: (AllocId, Self::ProvenanceExtra),
461 _range: AllocRange,
462 ) -> InterpResult<'tcx> {
463interp_ok(())
464 }
465466/// Hook for performing extra operations on a memory deallocation.
467 /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
468 /// the allocation.
469#[inline(always)]
470fn before_memory_deallocation(
471 _tcx: TyCtxtAt<'tcx>,
472 _machine: &mut Self,
473 _alloc_extra: &mut Self::AllocExtra,
474 _ptr: Pointer<Option<Self::Provenance>>,
475 _prov: (AllocId, Self::ProvenanceExtra),
476 _size: Size,
477 _align: Align,
478 _kind: MemoryKind<Self::MemoryKind>,
479 ) -> InterpResult<'tcx> {
480interp_ok(())
481 }
482483/// Executes a retagging operation for a single pointer.
484 /// Returns the possibly adjusted pointer.
485#[inline]
486fn retag_ptr_value(
487 _ecx: &mut InterpCx<'tcx, Self>,
488 _kind: mir::RetagKind,
489 val: &ImmTy<'tcx, Self::Provenance>,
490 ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
491interp_ok(val.clone())
492 }
493494/// Executes a retagging operation on a compound value.
495 /// Replaces all pointers stored in the given place.
496#[inline]
497fn retag_place_contents(
498 _ecx: &mut InterpCx<'tcx, Self>,
499 _kind: mir::RetagKind,
500 _place: &PlaceTy<'tcx, Self::Provenance>,
501 ) -> InterpResult<'tcx> {
502interp_ok(())
503 }
504505/// Called on places used for in-place function argument and return value handling.
506 ///
507 /// These places need to be protected to make sure the program cannot tell whether the
508 /// argument/return value was actually copied or passed in-place..
509fn protect_in_place_function_argument(
510 ecx: &mut InterpCx<'tcx, Self>,
511 mplace: &MPlaceTy<'tcx, Self::Provenance>,
512 ) -> InterpResult<'tcx> {
513// Without an aliasing model, all we can do is put `Uninit` into the place.
514 // Conveniently this also ensures that the place actually points to suitable memory.
515ecx.write_uninit(mplace)
516 }
517518/// Called immediately before a new stack frame gets pushed.
519fn init_frame(
520 ecx: &mut InterpCx<'tcx, Self>,
521 frame: Frame<'tcx, Self::Provenance>,
522 ) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
523524/// Borrow the current thread's stack.
525fn stack<'a>(
526 ecx: &'a InterpCx<'tcx, Self>,
527 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>];
528529/// Mutably borrow the current thread's stack.
530fn stack_mut<'a>(
531 ecx: &'a mut InterpCx<'tcx, Self>,
532 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
533534/// Called immediately after a stack frame got pushed and its locals got initialized.
535fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
536interp_ok(())
537 }
538539/// Called just before the frame is removed from the stack (followed by return value copy and
540 /// local cleanup).
541fn before_stack_pop(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
542interp_ok(())
543 }
544545/// Called immediately after a stack frame got popped, but before jumping back to the caller.
546 /// The `locals` have already been destroyed!
547#[inline(always)]
548fn after_stack_pop(
549 _ecx: &mut InterpCx<'tcx, Self>,
550 _frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>,
551 unwinding: bool,
552 ) -> InterpResult<'tcx, ReturnAction> {
553// By default, we do not support unwinding from panics
554if !!unwinding { ::core::panicking::panic("assertion failed: !unwinding") };assert!(!unwinding);
555interp_ok(ReturnAction::Normal)
556 }
557558/// Called immediately after an "immediate" local variable is read in a given frame
559 /// (i.e., this is called for reads that do not end up accessing addressable memory).
560#[inline(always)]
561fn after_local_read(
562 _ecx: &InterpCx<'tcx, Self>,
563 _frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
564 _local: mir::Local,
565 ) -> InterpResult<'tcx> {
566interp_ok(())
567 }
568569/// Called immediately after an "immediate" local variable is assigned a new value
570 /// (i.e., this is called for writes that do not end up in memory).
571 /// `storage_live` indicates whether this is the initial write upon `StorageLive`.
572#[inline(always)]
573fn after_local_write(
574 _ecx: &mut InterpCx<'tcx, Self>,
575 _local: mir::Local,
576 _storage_live: bool,
577 ) -> InterpResult<'tcx> {
578interp_ok(())
579 }
580581/// Called immediately after actual memory was allocated for a local
582 /// but before the local's stack frame is updated to point to that memory.
583#[inline(always)]
584fn after_local_moved_to_memory(
585 _ecx: &mut InterpCx<'tcx, Self>,
586 _local: mir::Local,
587 _mplace: &MPlaceTy<'tcx, Self::Provenance>,
588 ) -> InterpResult<'tcx> {
589interp_ok(())
590 }
591592/// Returns the salt to be used for a deduplicated global alloation.
593 /// If the allocation is for a function, the instance is provided as well
594 /// (this lets Miri ensure unique addresses for some functions).
595fn get_global_alloc_salt(
596 ecx: &InterpCx<'tcx, Self>,
597 instance: Option<ty::Instance<'tcx>>,
598 ) -> usize;
599600fn cached_union_data_range<'e>(
601 _ecx: &'e mut InterpCx<'tcx, Self>,
602 _ty: Ty<'tcx>,
603 compute_range: impl FnOnce() -> RangeSet,
604 ) -> Cow<'e, RangeSet> {
605// Default to no caching.
606Cow::Owned(compute_range())
607 }
608609/// Compute the value passed to the constructors of the `AllocBytes` type for
610 /// abstract machine allocations.
611fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams;
612613/// Allows enabling/disabling tracing calls from within `rustc_const_eval` at compile time, by
614 /// delegating the entering of [tracing::Span]s to implementors of the [Machine] trait. The
615 /// default implementation corresponds to tracing being disabled, meaning the tracing calls will
616 /// supposedly be optimized out completely. To enable tracing, override this trait method and
617 /// return `span.entered()`. Also see [crate::enter_trace_span].
618#[must_use]
619 #[inline(always)]
620fn enter_trace_span(_span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
621 ()
622 }
623}
624625/// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
626/// (CTFE and ConstProp) use the same instance. Here, we share that code.
627pub macro compile_time_machine(<$tcx: lifetime>) {
628type Provenance = CtfeProvenance;
629type ProvenanceExtra = bool; // the "immutable" flag
630631type ExtraFnVal = !;
632633type MemoryKind = $crate::const_eval::MemoryKind;
634type MemoryMap =
635 rustc_data_structures::fx::FxIndexMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation)>;
636const GLOBAL_KIND: Option<Self::MemoryKind> = None; // no copying of globals from `tcx` to machine memory
637638type AllocExtra = ();
639type FrameExtra = ();
640type Bytes = Box<[u8]>;
641642#[inline(always)]
643fn ignore_optional_overflow_checks(_ecx: &InterpCx<$tcx, Self>) -> bool {
644false
645}
646647#[inline(always)]
648fn unwind_terminate(
649 _ecx: &mut InterpCx<$tcx, Self>,
650 _reason: mir::UnwindTerminateReason,
651 ) -> InterpResult<$tcx> {
652unreachable!("unwinding cannot happen during compile-time evaluation")
653 }
654655#[inline(always)]
656fn check_fn_target_features(
657 _ecx: &InterpCx<$tcx, Self>,
658 _instance: ty::Instance<$tcx>,
659 ) -> InterpResult<$tcx> {
660// For now we don't do any checking here. We can't use `tcx.sess` because that can differ
661 // between crates, and we need to ensure that const-eval always behaves the same.
662interp_ok(())
663 }
664665#[inline(always)]
666fn call_extra_fn(
667 _ecx: &mut InterpCx<$tcx, Self>,
668 fn_val: !,
669 _abi: &FnAbi<$tcx, Ty<$tcx>>,
670 _args: &[FnArg<$tcx>],
671 _destination: &PlaceTy<$tcx, Self::Provenance>,
672 _target: Option<mir::BasicBlock>,
673 _unwind: mir::UnwindAction,
674 ) -> InterpResult<$tcx> {
675match fn_val {}
676 }
677678#[inline(always)]
679fn float_fuse_mul_add(_ecx: &InterpCx<$tcx, Self>) -> bool {
680true
681}
682683#[inline(always)]
684fn adjust_global_allocation<'b>(
685 _ecx: &InterpCx<$tcx, Self>,
686 _id: AllocId,
687 alloc: &'b Allocation,
688 ) -> InterpResult<$tcx, Cow<'b, Allocation<Self::Provenance>>> {
689// Overwrite default implementation: no need to adjust anything.
690interp_ok(Cow::Borrowed(alloc))
691 }
692693fn init_local_allocation(
694 _ecx: &InterpCx<$tcx, Self>,
695 _id: AllocId,
696 _kind: MemoryKind<Self::MemoryKind>,
697 _size: Size,
698 _align: Align,
699 ) -> InterpResult<$tcx, Self::AllocExtra> {
700 interp_ok(())
701 }
702703fn extern_static_pointer(
704 ecx: &InterpCx<$tcx, Self>,
705 def_id: DefId,
706 ) -> InterpResult<$tcx, Pointer> {
707// Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
708interp_ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id).into(), Size::ZERO))
709 }
710711#[inline(always)]
712fn adjust_alloc_root_pointer(
713 _ecx: &InterpCx<$tcx, Self>,
714 ptr: Pointer<CtfeProvenance>,
715 _kind: Option<MemoryKind<Self::MemoryKind>>,
716 ) -> InterpResult<$tcx, Pointer<CtfeProvenance>> {
717 interp_ok(ptr)
718 }
719720#[inline(always)]
721fn ptr_from_addr_cast(
722 _ecx: &InterpCx<$tcx, Self>,
723 addr: u64,
724 ) -> InterpResult<$tcx, Pointer<Option<CtfeProvenance>>> {
725// Allow these casts, but make the pointer not dereferenceable.
726 // (I.e., they behave like transmutation.)
727 // This is correct because no pointers can ever be exposed in compile-time evaluation.
728interp_ok(Pointer::without_provenance(addr))
729 }
730731#[inline(always)]
732fn ptr_get_alloc(
733 _ecx: &InterpCx<$tcx, Self>,
734 ptr: Pointer<CtfeProvenance>,
735 _size: i64,
736 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
737let (prov, offset) = ptr.prov_and_relative_offset();
738Some((prov.alloc_id(), offset, prov.immutable()))
739 }
740741#[inline(always)]
742fn get_global_alloc_salt(
743 _ecx: &InterpCx<$tcx, Self>,
744 _instance: Option<ty::Instance<$tcx>>,
745 ) -> usize {
746 CTFE_ALLOC_SALT
747 }
748}