pub trait Machine<'tcx>: Sized {
type MemoryKind: Debug + Display + MayLeak + Eq + 'static;
type Provenance: Provenance + Eq + Hash + 'static;
type ProvenanceExtra: Copy + 'static;
type ExtraFnVal: Debug + Copy;
type FrameExtra;
type AllocExtra: Debug + Clone + 'tcx;
type Bytes: AllocBytes + 'static;
type MemoryMap: AllocMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>)> + Default + Clone;
const GLOBAL_KIND: Option<Self::MemoryKind>;
const PANIC_ON_ALLOC_FAIL: bool;
const ALL_CONSTS_ARE_PRECHECKED: bool = true;
Show 47 methods
// Required methods
fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool;
fn enforce_validity(
ecx: &InterpCx<'tcx, Self>,
layout: TyAndLayout<'tcx>,
) -> bool;
fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool;
fn find_mir_or_eval_fn(
ecx: &mut InterpCx<'tcx, Self>,
instance: Instance<'tcx>,
abi: ExternAbi,
args: &[FnArg<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx, Option<(&'tcx Body<'tcx>, Instance<'tcx>)>>;
fn call_extra_fn(
ecx: &mut InterpCx<'tcx, Self>,
fn_val: Self::ExtraFnVal,
abi: ExternAbi,
args: &[FnArg<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx>;
fn call_intrinsic(
ecx: &mut InterpCx<'tcx, Self>,
instance: Instance<'tcx>,
args: &[OpTy<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx, Option<Instance<'tcx>>>;
fn check_fn_target_features(
_ecx: &InterpCx<'tcx, Self>,
_instance: Instance<'tcx>,
) -> InterpResult<'tcx>;
fn assert_panic(
ecx: &mut InterpCx<'tcx, Self>,
msg: &AssertMessage<'tcx>,
unwind: UnwindAction,
) -> InterpResult<'tcx>;
fn panic_nounwind(
_ecx: &mut InterpCx<'tcx, Self>,
msg: &str,
) -> InterpResult<'tcx>;
fn unwind_terminate(
ecx: &mut InterpCx<'tcx, Self>,
reason: UnwindTerminateReason,
) -> InterpResult<'tcx>;
fn binary_ptr_op(
ecx: &InterpCx<'tcx, Self>,
bin_op: BinOp,
left: &ImmTy<'tcx, Self::Provenance>,
right: &ImmTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>;
fn ub_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>;
fn extern_static_pointer(
ecx: &InterpCx<'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
fn ptr_from_addr_cast(
ecx: &InterpCx<'tcx, Self>,
addr: u64,
) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>;
fn expose_ptr(
ecx: &mut InterpCx<'tcx, Self>,
ptr: Pointer<Self::Provenance>,
) -> InterpResult<'tcx>;
fn ptr_get_alloc(
ecx: &InterpCx<'tcx, Self>,
ptr: Pointer<Self::Provenance>,
size: i64,
) -> Option<(AllocId, Size, Self::ProvenanceExtra)>;
fn adjust_global_allocation<'b>(
ecx: &InterpCx<'tcx, Self>,
id: AllocId,
alloc: &'b Allocation,
) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>;
fn init_alloc_extra(
ecx: &InterpCx<'tcx, Self>,
id: AllocId,
kind: MemoryKind<Self::MemoryKind>,
size: Size,
align: Align,
) -> InterpResult<'tcx, Self::AllocExtra>;
fn adjust_alloc_root_pointer(
ecx: &InterpCx<'tcx, Self>,
ptr: Pointer,
kind: Option<MemoryKind<Self::MemoryKind>>,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
fn init_frame(
ecx: &mut InterpCx<'tcx, Self>,
frame: Frame<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
fn stack<'a>(
ecx: &'a InterpCx<'tcx, Self>,
) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>];
fn stack_mut<'a>(
ecx: &'a mut InterpCx<'tcx, Self>,
) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
fn get_global_alloc_salt(
ecx: &InterpCx<'tcx, Self>,
instance: Option<Instance<'tcx>>,
) -> usize;
// Provided methods
fn alignment_check(
_ecx: &InterpCx<'tcx, Self>,
_alloc_id: AllocId,
_alloc_align: Align,
_alloc_kind: AllocKind,
_offset: Size,
_align: Align,
) -> Option<Misalignment> { ... }
fn enforce_validity_recursively(
_ecx: &InterpCx<'tcx, Self>,
_layout: TyAndLayout<'tcx>,
) -> bool { ... }
fn load_mir(
ecx: &InterpCx<'tcx, Self>,
instance: InstanceKind<'tcx>,
) -> InterpResult<'tcx, &'tcx Body<'tcx>> { ... }
fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
_ecx: &InterpCx<'tcx, Self>,
_inputs: &[F1],
) -> F2 { ... }
fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> { ... }
fn increment_const_eval_counter(
_ecx: &mut InterpCx<'tcx, Self>,
) -> InterpResult<'tcx> { ... }
fn before_access_global(
_tcx: TyCtxtAt<'tcx>,
_machine: &Self,
_alloc_id: AllocId,
_allocation: ConstAllocation<'tcx>,
_static_def_id: Option<DefId>,
_is_write: bool,
) -> InterpResult<'tcx> { ... }
fn thread_local_static_pointer(
_ecx: &mut InterpCx<'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Self::Provenance>> { ... }
fn eval_inline_asm(
_ecx: &mut InterpCx<'tcx, Self>,
_template: &'tcx [InlineAsmTemplatePiece],
_operands: &[InlineAsmOperand<'tcx>],
_options: InlineAsmOptions,
_targets: &[BasicBlock],
) -> InterpResult<'tcx> { ... }
fn before_memory_read(
_tcx: TyCtxtAt<'tcx>,
_machine: &Self,
_alloc_extra: &Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange,
) -> InterpResult<'tcx> { ... }
fn before_alloc_read(
_ecx: &InterpCx<'tcx, Self>,
_alloc_id: AllocId,
) -> InterpResult<'tcx> { ... }
fn before_memory_write(
_tcx: TyCtxtAt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange,
) -> InterpResult<'tcx> { ... }
fn before_memory_deallocation(
_tcx: TyCtxtAt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_size: Size,
_align: Align,
_kind: MemoryKind<Self::MemoryKind>,
) -> InterpResult<'tcx> { ... }
fn retag_ptr_value(
_ecx: &mut InterpCx<'tcx, Self>,
_kind: RetagKind,
val: &ImmTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> { ... }
fn retag_place_contents(
_ecx: &mut InterpCx<'tcx, Self>,
_kind: RetagKind,
_place: &PlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx> { ... }
fn protect_in_place_function_argument(
ecx: &mut InterpCx<'tcx, Self>,
mplace: &MPlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx> { ... }
fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> { ... }
fn before_stack_pop(
_ecx: &InterpCx<'tcx, Self>,
_frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
) -> InterpResult<'tcx> { ... }
fn after_stack_pop(
_ecx: &mut InterpCx<'tcx, Self>,
_frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>,
unwinding: bool,
) -> InterpResult<'tcx, ReturnAction> { ... }
fn after_local_read(
_ecx: &InterpCx<'tcx, Self>,
_local: Local,
) -> InterpResult<'tcx> { ... }
fn after_local_write(
_ecx: &mut InterpCx<'tcx, Self>,
_local: Local,
_storage_live: bool,
) -> InterpResult<'tcx> { ... }
fn after_local_moved_to_memory(
_ecx: &mut InterpCx<'tcx, Self>,
_local: Local,
_mplace: &MPlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx> { ... }
fn eval_mir_constant<F>(
ecx: &InterpCx<'tcx, Self>,
val: Const<'tcx>,
span: Span,
layout: Option<TyAndLayout<'tcx>>,
eval: F,
) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>
where F: Fn(&InterpCx<'tcx, Self>, Const<'tcx>, Span, Option<TyAndLayout<'tcx>>) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>> { ... }
fn cached_union_data_range<'e>(
_ecx: &'e mut InterpCx<'tcx, Self>,
_ty: Ty<'tcx>,
compute_range: impl FnOnce() -> RangeSet,
) -> Cow<'e, RangeSet> { ... }
}
Expand description
Methods of this trait signifies a point where CTFE evaluation would fail and some use case dependent behaviour can instead be applied.
Required Associated Constants§
Sourceconst GLOBAL_KIND: Option<Self::MemoryKind>
const GLOBAL_KIND: Option<Self::MemoryKind>
The memory kind to use for copied global memory (held in tcx
) –
or None if such memory should not be mutated and thus any such attempt will cause
a ModifiedStatic
error to be raised.
Statics are copied under two circumstances: When they are mutated, and when
adjust_allocation
(see below) returns an owned allocation
that is added to the memory so that the work is not done twice.
Sourceconst PANIC_ON_ALLOC_FAIL: bool
const PANIC_ON_ALLOC_FAIL: bool
Should the machine panic on allocation failures?
Provided Associated Constants§
Sourceconst ALL_CONSTS_ARE_PRECHECKED: bool = true
const ALL_CONSTS_ARE_PRECHECKED: bool = true
Determines whether eval_mir_constant
can never fail because all required consts have
already been checked before.
Required Associated Types§
Sourcetype MemoryKind: Debug + Display + MayLeak + Eq + 'static
type MemoryKind: Debug + Display + MayLeak + Eq + 'static
Additional memory kinds a machine wishes to distinguish from the builtin ones
Sourcetype Provenance: Provenance + Eq + Hash + 'static
type Provenance: Provenance + Eq + Hash + 'static
Pointers are “tagged” with provenance information; typically the AllocId
they belong to.
Sourcetype ProvenanceExtra: Copy + 'static
type ProvenanceExtra: Copy + 'static
When getting the AllocId of a pointer, some extra data is also obtained from the provenance that is passed to memory access hooks so they can do things with it.
Sourcetype ExtraFnVal: Debug + Copy
type ExtraFnVal: Debug + Copy
Machines can define extra (non-instance) things that represent values of function pointers.
For example, Miri uses this to return a function pointer from dlsym
that can later be called to execute the right thing.
Sourcetype FrameExtra
type FrameExtra
Extra data stored in every call frame.
Sourcetype AllocExtra: Debug + Clone + 'tcx
type AllocExtra: Debug + Clone + 'tcx
Extra data stored in every allocation.
Sourcetype Bytes: AllocBytes + 'static
type Bytes: AllocBytes + 'static
Type for the bytes of the allocation.
Sourcetype MemoryMap: AllocMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>)> + Default + Clone
type MemoryMap: AllocMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>)> + Default + Clone
Memory’s allocation map
Required Methods§
Sourcefn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool
fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool
Whether memory accesses should be alignment-checked.
Sourcefn enforce_validity(
ecx: &InterpCx<'tcx, Self>,
layout: TyAndLayout<'tcx>,
) -> bool
fn enforce_validity( ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>, ) -> bool
Whether to enforce the validity invariant for a specific layout.
Sourcefn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool
fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool
Whether Assert(OverflowNeg) and Assert(Overflow) MIR terminators should actually check for overflow.
Sourcefn find_mir_or_eval_fn(
ecx: &mut InterpCx<'tcx, Self>,
instance: Instance<'tcx>,
abi: ExternAbi,
args: &[FnArg<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx, Option<(&'tcx Body<'tcx>, Instance<'tcx>)>>
fn find_mir_or_eval_fn( ecx: &mut InterpCx<'tcx, Self>, instance: Instance<'tcx>, abi: ExternAbi, args: &[FnArg<'tcx, Self::Provenance>], destination: &MPlaceTy<'tcx, Self::Provenance>, target: Option<BasicBlock>, unwind: UnwindAction, ) -> InterpResult<'tcx, Option<(&'tcx Body<'tcx>, Instance<'tcx>)>>
Entry point to all function calls.
Returns either the mir to use for the call, or None
if execution should
just proceed (which usually means this hook did all the work that the
called function should usually have done). In the latter case, it is
this hook’s responsibility to advance the instruction pointer!
(This is to support functions like __rust_maybe_catch_panic
that neither find a MIR
nor just jump to ret
, but instead push their own stack frame.)
Passing dest
and ret
in the same Option
proved very annoying when only one of them
was used.
Sourcefn call_extra_fn(
ecx: &mut InterpCx<'tcx, Self>,
fn_val: Self::ExtraFnVal,
abi: ExternAbi,
args: &[FnArg<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx>
fn call_extra_fn( ecx: &mut InterpCx<'tcx, Self>, fn_val: Self::ExtraFnVal, abi: ExternAbi, args: &[FnArg<'tcx, Self::Provenance>], destination: &MPlaceTy<'tcx, Self::Provenance>, target: Option<BasicBlock>, unwind: UnwindAction, ) -> InterpResult<'tcx>
Execute fn_val
. It is the hook’s responsibility to advance the instruction
pointer as appropriate.
Sourcefn call_intrinsic(
ecx: &mut InterpCx<'tcx, Self>,
instance: Instance<'tcx>,
args: &[OpTy<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx, Option<Instance<'tcx>>>
fn call_intrinsic( ecx: &mut InterpCx<'tcx, Self>, instance: Instance<'tcx>, args: &[OpTy<'tcx, Self::Provenance>], destination: &MPlaceTy<'tcx, Self::Provenance>, target: Option<BasicBlock>, unwind: UnwindAction, ) -> InterpResult<'tcx, Option<Instance<'tcx>>>
Directly process an intrinsic without pushing a stack frame. It is the hook’s responsibility to advance the instruction pointer as appropriate.
Returns None
if the intrinsic was fully handled.
Otherwise, returns an Instance
of the function that implements the intrinsic.
Sourcefn check_fn_target_features(
_ecx: &InterpCx<'tcx, Self>,
_instance: Instance<'tcx>,
) -> InterpResult<'tcx>
fn check_fn_target_features( _ecx: &InterpCx<'tcx, Self>, _instance: Instance<'tcx>, ) -> InterpResult<'tcx>
Check whether the given function may be executed on the current machine, in terms of the target features is requires.
Sourcefn assert_panic(
ecx: &mut InterpCx<'tcx, Self>,
msg: &AssertMessage<'tcx>,
unwind: UnwindAction,
) -> InterpResult<'tcx>
fn assert_panic( ecx: &mut InterpCx<'tcx, Self>, msg: &AssertMessage<'tcx>, unwind: UnwindAction, ) -> InterpResult<'tcx>
Called to evaluate Assert
MIR terminators that trigger a panic.
Sourcefn panic_nounwind(
_ecx: &mut InterpCx<'tcx, Self>,
msg: &str,
) -> InterpResult<'tcx>
fn panic_nounwind( _ecx: &mut InterpCx<'tcx, Self>, msg: &str, ) -> InterpResult<'tcx>
Called to trigger a non-unwinding panic.
Sourcefn unwind_terminate(
ecx: &mut InterpCx<'tcx, Self>,
reason: UnwindTerminateReason,
) -> InterpResult<'tcx>
fn unwind_terminate( ecx: &mut InterpCx<'tcx, Self>, reason: UnwindTerminateReason, ) -> InterpResult<'tcx>
Called when unwinding reached a state where execution should be terminated.
Sourcefn binary_ptr_op(
ecx: &InterpCx<'tcx, Self>,
bin_op: BinOp,
left: &ImmTy<'tcx, Self::Provenance>,
right: &ImmTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
fn binary_ptr_op( ecx: &InterpCx<'tcx, Self>, bin_op: BinOp, left: &ImmTy<'tcx, Self::Provenance>, right: &ImmTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
Called for all binary operations where the LHS has pointer type.
Returns a (value, overflowed) pair if the operation succeeded
Sourcefn ub_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>
fn ub_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>
Determines the result of a NullaryOp::UbChecks
invocation.
Sourcefn extern_static_pointer(
ecx: &InterpCx<'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>
fn extern_static_pointer( ecx: &InterpCx<'tcx, Self>, def_id: DefId, ) -> InterpResult<'tcx, Pointer<Self::Provenance>>
Return the AllocId
for the given extern static
.
Sourcefn ptr_from_addr_cast(
ecx: &InterpCx<'tcx, Self>,
addr: u64,
) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>
fn ptr_from_addr_cast( ecx: &InterpCx<'tcx, Self>, addr: u64, ) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>
“Int-to-pointer cast”
Sourcefn expose_ptr(
ecx: &mut InterpCx<'tcx, Self>,
ptr: Pointer<Self::Provenance>,
) -> InterpResult<'tcx>
fn expose_ptr( ecx: &mut InterpCx<'tcx, Self>, ptr: Pointer<Self::Provenance>, ) -> InterpResult<'tcx>
Marks a pointer as exposed, allowing it’s provenance to be recovered. “Pointer-to-int cast”
Sourcefn ptr_get_alloc(
ecx: &InterpCx<'tcx, Self>,
ptr: Pointer<Self::Provenance>,
size: i64,
) -> Option<(AllocId, Size, Self::ProvenanceExtra)>
fn ptr_get_alloc( ecx: &InterpCx<'tcx, Self>, ptr: Pointer<Self::Provenance>, size: i64, ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>
Convert a pointer with provenance into an allocation-offset pair and extra provenance info.
size
says how many bytes of memory are expected at that pointer. The sign of size
can
be used to disambiguate situations where a wildcard pointer sits right in between two
allocations.
If ptr.provenance.get_alloc_id()
is Some(p)
, the returned AllocId
must be p
.
The resulting AllocId
will just be used for that one step and the forgotten again
(i.e., we’ll never turn the data returned here back into a Pointer
that might be
stored in machine state).
When this fails, that means the pointer does not point to a live allocation.
Sourcefn adjust_global_allocation<'b>(
ecx: &InterpCx<'tcx, Self>,
id: AllocId,
alloc: &'b Allocation,
) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
fn adjust_global_allocation<'b>( ecx: &InterpCx<'tcx, Self>, id: AllocId, alloc: &'b Allocation, ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
Called to adjust global allocations to the Provenance and AllocExtra of this machine.
If alloc
contains pointers, then they are all pointing to globals.
This should avoid copying if no work has to be done! If this returns an owned
allocation (because a copy had to be done to adjust things), machine memory will
cache the result. (This relies on AllocMap::get_or
being able to add the
owned allocation to the map even when the map is shared.)
Sourcefn init_alloc_extra(
ecx: &InterpCx<'tcx, Self>,
id: AllocId,
kind: MemoryKind<Self::MemoryKind>,
size: Size,
align: Align,
) -> InterpResult<'tcx, Self::AllocExtra>
fn init_alloc_extra( ecx: &InterpCx<'tcx, Self>, id: AllocId, kind: MemoryKind<Self::MemoryKind>, size: Size, align: Align, ) -> InterpResult<'tcx, Self::AllocExtra>
Initialize the extra state of an allocation.
This is guaranteed to be called exactly once on all allocations that are accessed by the program.
Sourcefn adjust_alloc_root_pointer(
ecx: &InterpCx<'tcx, Self>,
ptr: Pointer,
kind: Option<MemoryKind<Self::MemoryKind>>,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>
fn adjust_alloc_root_pointer( ecx: &InterpCx<'tcx, Self>, ptr: Pointer, kind: Option<MemoryKind<Self::MemoryKind>>, ) -> InterpResult<'tcx, Pointer<Self::Provenance>>
Return a “root” pointer for the given allocation: the one that is used for direct accesses to this static/const/fn allocation, or the one returned from the heap allocator.
Not called on extern
or thread-local statics (those use the methods above).
kind
is the kind of the allocation the pointer points to; it can be None
when
it’s a global and GLOBAL_KIND
is None
.
Sourcefn init_frame(
ecx: &mut InterpCx<'tcx, Self>,
frame: Frame<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>
fn init_frame( ecx: &mut InterpCx<'tcx, Self>, frame: Frame<'tcx, Self::Provenance>, ) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>
Called immediately before a new stack frame gets pushed.
Sourcefn stack<'a>(
ecx: &'a InterpCx<'tcx, Self>,
) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>]
fn stack<'a>( ecx: &'a InterpCx<'tcx, Self>, ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>]
Borrow the current thread’s stack.
Sourcefn stack_mut<'a>(
ecx: &'a mut InterpCx<'tcx, Self>,
) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>
fn stack_mut<'a>( ecx: &'a mut InterpCx<'tcx, Self>, ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>
Mutably borrow the current thread’s stack.
Provided Methods§
Sourcefn alignment_check(
_ecx: &InterpCx<'tcx, Self>,
_alloc_id: AllocId,
_alloc_align: Align,
_alloc_kind: AllocKind,
_offset: Size,
_align: Align,
) -> Option<Misalignment>
fn alignment_check( _ecx: &InterpCx<'tcx, Self>, _alloc_id: AllocId, _alloc_align: Align, _alloc_kind: AllocKind, _offset: Size, _align: Align, ) -> Option<Misalignment>
Gives the machine a chance to detect more misalignment than the built-in checks would catch.
Sourcefn enforce_validity_recursively(
_ecx: &InterpCx<'tcx, Self>,
_layout: TyAndLayout<'tcx>,
) -> bool
fn enforce_validity_recursively( _ecx: &InterpCx<'tcx, Self>, _layout: TyAndLayout<'tcx>, ) -> bool
Whether to enforce the validity invariant recursively.
Sourcefn load_mir(
ecx: &InterpCx<'tcx, Self>,
instance: InstanceKind<'tcx>,
) -> InterpResult<'tcx, &'tcx Body<'tcx>>
fn load_mir( ecx: &InterpCx<'tcx, Self>, instance: InstanceKind<'tcx>, ) -> InterpResult<'tcx, &'tcx Body<'tcx>>
Entry point for obtaining the MIR of anything that should get evaluated. So not just functions and shims, but also const/static initializers, anonymous constants, …
Sourcefn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
_ecx: &InterpCx<'tcx, Self>,
_inputs: &[F1],
) -> F2
fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>( _ecx: &InterpCx<'tcx, Self>, _inputs: &[F1], ) -> F2
Generate the NaN returned by a float operation, given the list of inputs. (This is all inputs, not just NaN inputs!)
Sourcefn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
Called before a basic block terminator is executed.
Sourcefn increment_const_eval_counter(
_ecx: &mut InterpCx<'tcx, Self>,
) -> InterpResult<'tcx>
fn increment_const_eval_counter( _ecx: &mut InterpCx<'tcx, Self>, ) -> InterpResult<'tcx>
Called when the interpreter encounters a StatementKind::ConstEvalCounter
instruction.
You can use this to detect long or endlessly running programs.
Sourcefn before_access_global(
_tcx: TyCtxtAt<'tcx>,
_machine: &Self,
_alloc_id: AllocId,
_allocation: ConstAllocation<'tcx>,
_static_def_id: Option<DefId>,
_is_write: bool,
) -> InterpResult<'tcx>
fn before_access_global( _tcx: TyCtxtAt<'tcx>, _machine: &Self, _alloc_id: AllocId, _allocation: ConstAllocation<'tcx>, _static_def_id: Option<DefId>, _is_write: bool, ) -> InterpResult<'tcx>
Called before a global allocation is accessed.
def_id
is Some
if this is the “lazy” allocation of a static.
Sourcefn thread_local_static_pointer(
_ecx: &mut InterpCx<'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>
fn thread_local_static_pointer( _ecx: &mut InterpCx<'tcx, Self>, def_id: DefId, ) -> InterpResult<'tcx, Pointer<Self::Provenance>>
Return the AllocId
for the given thread-local static in the current thread.
Sourcefn eval_inline_asm(
_ecx: &mut InterpCx<'tcx, Self>,
_template: &'tcx [InlineAsmTemplatePiece],
_operands: &[InlineAsmOperand<'tcx>],
_options: InlineAsmOptions,
_targets: &[BasicBlock],
) -> InterpResult<'tcx>
fn eval_inline_asm( _ecx: &mut InterpCx<'tcx, Self>, _template: &'tcx [InlineAsmTemplatePiece], _operands: &[InlineAsmOperand<'tcx>], _options: InlineAsmOptions, _targets: &[BasicBlock], ) -> InterpResult<'tcx>
Evaluate the inline assembly.
This should take care of jumping to the next block (one of targets
) when asm goto
is triggered, targets[0]
when the assembly falls through, or diverge in case of
naked_asm! or InlineAsmOptions::NORETURN
being set.
Sourcefn before_memory_read(
_tcx: TyCtxtAt<'tcx>,
_machine: &Self,
_alloc_extra: &Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange,
) -> InterpResult<'tcx>
fn before_memory_read( _tcx: TyCtxtAt<'tcx>, _machine: &Self, _alloc_extra: &Self::AllocExtra, _prov: (AllocId, Self::ProvenanceExtra), _range: AllocRange, ) -> InterpResult<'tcx>
Hook for performing extra checks on a memory read access.
This will not be called during validation!
Takes read-only access to the allocation so we can keep all the memory read
operations take &self
. Use a RefCell
in AllocExtra
if you
need to mutate.
This is not invoked for ZST accesses, as no read actually happens.
Sourcefn before_alloc_read(
_ecx: &InterpCx<'tcx, Self>,
_alloc_id: AllocId,
) -> InterpResult<'tcx>
fn before_alloc_read( _ecx: &InterpCx<'tcx, Self>, _alloc_id: AllocId, ) -> InterpResult<'tcx>
Hook for performing extra checks on any memory read access, that involves an allocation, even ZST reads.
This will not be called during validation!
Used to prevent statics from self-initializing by reading from their own memory as it is being initialized.
Sourcefn before_memory_write(
_tcx: TyCtxtAt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange,
) -> InterpResult<'tcx>
fn before_memory_write( _tcx: TyCtxtAt<'tcx>, _machine: &mut Self, _alloc_extra: &mut Self::AllocExtra, _prov: (AllocId, Self::ProvenanceExtra), _range: AllocRange, ) -> InterpResult<'tcx>
Hook for performing extra checks on a memory write access. This is not invoked for ZST accesses, as no write actually happens.
Sourcefn before_memory_deallocation(
_tcx: TyCtxtAt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_size: Size,
_align: Align,
_kind: MemoryKind<Self::MemoryKind>,
) -> InterpResult<'tcx>
fn before_memory_deallocation( _tcx: TyCtxtAt<'tcx>, _machine: &mut Self, _alloc_extra: &mut Self::AllocExtra, _prov: (AllocId, Self::ProvenanceExtra), _size: Size, _align: Align, _kind: MemoryKind<Self::MemoryKind>, ) -> InterpResult<'tcx>
Hook for performing extra operations on a memory deallocation.
Sourcefn retag_ptr_value(
_ecx: &mut InterpCx<'tcx, Self>,
_kind: RetagKind,
val: &ImmTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
fn retag_ptr_value( _ecx: &mut InterpCx<'tcx, Self>, _kind: RetagKind, val: &ImmTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
Executes a retagging operation for a single pointer. Returns the possibly adjusted pointer.
Sourcefn retag_place_contents(
_ecx: &mut InterpCx<'tcx, Self>,
_kind: RetagKind,
_place: &PlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx>
fn retag_place_contents( _ecx: &mut InterpCx<'tcx, Self>, _kind: RetagKind, _place: &PlaceTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx>
Executes a retagging operation on a compound value. Replaces all pointers stored in the given place.
Sourcefn protect_in_place_function_argument(
ecx: &mut InterpCx<'tcx, Self>,
mplace: &MPlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx>
fn protect_in_place_function_argument( ecx: &mut InterpCx<'tcx, Self>, mplace: &MPlaceTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx>
Called on places used for in-place function argument and return value handling.
These places need to be protected to make sure the program cannot tell whether the argument/return value was actually copied or passed in-place..
Sourcefn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
Called immediately after a stack frame got pushed and its locals got initialized.
Sourcefn before_stack_pop(
_ecx: &InterpCx<'tcx, Self>,
_frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
) -> InterpResult<'tcx>
fn before_stack_pop( _ecx: &InterpCx<'tcx, Self>, _frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>, ) -> InterpResult<'tcx>
Called just before the return value is copied to the caller-provided return place.
Sourcefn after_stack_pop(
_ecx: &mut InterpCx<'tcx, Self>,
_frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>,
unwinding: bool,
) -> InterpResult<'tcx, ReturnAction>
fn after_stack_pop( _ecx: &mut InterpCx<'tcx, Self>, _frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>, unwinding: bool, ) -> InterpResult<'tcx, ReturnAction>
Called immediately after a stack frame got popped, but before jumping back to the caller.
The locals
have already been destroyed!
Sourcefn after_local_read(
_ecx: &InterpCx<'tcx, Self>,
_local: Local,
) -> InterpResult<'tcx>
fn after_local_read( _ecx: &InterpCx<'tcx, Self>, _local: Local, ) -> InterpResult<'tcx>
Called immediately after an “immediate” local variable is read (i.e., this is called for reads that do not end up accessing addressable memory).
Sourcefn after_local_write(
_ecx: &mut InterpCx<'tcx, Self>,
_local: Local,
_storage_live: bool,
) -> InterpResult<'tcx>
fn after_local_write( _ecx: &mut InterpCx<'tcx, Self>, _local: Local, _storage_live: bool, ) -> InterpResult<'tcx>
Called immediately after an “immediate” local variable is assigned a new value
(i.e., this is called for writes that do not end up in memory).
storage_live
indicates whether this is the initial write upon StorageLive
.
Sourcefn after_local_moved_to_memory(
_ecx: &mut InterpCx<'tcx, Self>,
_local: Local,
_mplace: &MPlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx>
fn after_local_moved_to_memory( _ecx: &mut InterpCx<'tcx, Self>, _local: Local, _mplace: &MPlaceTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx>
Called immediately after actual memory was allocated for a local but before the local’s stack frame is updated to point to that memory.
Sourcefn eval_mir_constant<F>(
ecx: &InterpCx<'tcx, Self>,
val: Const<'tcx>,
span: Span,
layout: Option<TyAndLayout<'tcx>>,
eval: F,
) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>where
F: Fn(&InterpCx<'tcx, Self>, Const<'tcx>, Span, Option<TyAndLayout<'tcx>>) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>,
fn eval_mir_constant<F>(
ecx: &InterpCx<'tcx, Self>,
val: Const<'tcx>,
span: Span,
layout: Option<TyAndLayout<'tcx>>,
eval: F,
) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>where
F: Fn(&InterpCx<'tcx, Self>, Const<'tcx>, Span, Option<TyAndLayout<'tcx>>) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>,
Evaluate the given constant. The eval
function will do all the required evaluation,
but this hook has the chance to do some pre/postprocessing.
fn cached_union_data_range<'e>( _ecx: &'e mut InterpCx<'tcx, Self>, _ty: Ty<'tcx>, compute_range: impl FnOnce() -> RangeSet, ) -> Cow<'e, RangeSet>
Dyn Compatibility§
This trait is not dyn compatible.
In older versions of Rust, dyn compatibility was called "object safety", so this trait is not object safe.