pub struct MiriMachine<'tcx> {Show 49 fields
pub tcx: TyCtxt<'tcx>,
pub borrow_tracker: Option<RefCell<GlobalStateInner>>,
pub data_race: Option<GlobalState>,
pub alloc_addresses: RefCell<GlobalStateInner>,
pub(crate) env_vars: EnvVars<'tcx>,
pub(crate) main_fn_ret_place: Option<MPlaceTy<'tcx>>,
pub(crate) argc: Option<Pointer>,
pub(crate) argv: Option<Pointer>,
pub(crate) cmd_line: Option<Pointer>,
pub(crate) tls: TlsData<'tcx>,
pub(crate) isolated_op: IsolatedOp,
pub(crate) validation: ValidationMode,
pub(crate) fds: FdTable,
pub(crate) dirs: DirTable,
pub(crate) epoll_interests: EpollInterestTable,
pub(crate) clock: Clock,
pub(crate) threads: ThreadManager<'tcx>,
pub(crate) thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>,
pub(crate) sync: SynchronizationObjects,
pub(crate) layouts: PrimitiveLayouts<'tcx>,
pub(crate) static_roots: Vec<AllocId>,
profiler: Option<Profiler>,
string_cache: FxHashMap<String, StringId>,
pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
pub(crate) backtrace_style: BacktraceStyle,
pub(crate) local_crates: Vec<CrateNum>,
extern_statics: FxHashMap<Symbol, StrictPointer>,
pub(crate) rng: RefCell<StdRng>,
tracked_alloc_ids: FxHashSet<AllocId>,
track_alloc_accesses: bool,
pub(crate) check_alignment: AlignmentCheck,
pub(crate) cmpxchg_weak_failure_rate: f64,
pub(crate) mute_stdout_stderr: bool,
pub(crate) weak_memory: bool,
pub(crate) preemption_rate: f64,
pub(crate) report_progress: Option<u32>,
pub(crate) basic_block_count: u64,
pub native_lib: Option<(Library, PathBuf)>,
pub(crate) gc_interval: u32,
pub(crate) since_gc: u32,
pub(crate) num_cpus: u32,
pub(crate) page_size: u64,
pub(crate) stack_addr: u64,
pub(crate) stack_size: u64,
pub(crate) collect_leak_backtraces: bool,
pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>,
const_cache: RefCell<FxHashMap<(Const<'tcx>, usize), OpTy<'tcx>>>,
pub(crate) symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>,
union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
}
Expand description
The machine itself.
If you add anything here that stores machine values, remember to update
visit_all_machine_values
!
Fields§
§tcx: TyCtxt<'tcx>
§borrow_tracker: Option<RefCell<GlobalStateInner>>
Global data for borrow tracking.
data_race: Option<GlobalState>
Data race detector global data.
alloc_addresses: RefCell<GlobalStateInner>
Ptr-int-cast module global data.
env_vars: EnvVars<'tcx>
Environment variables.
main_fn_ret_place: Option<MPlaceTy<'tcx>>
Return place of the main function.
argc: Option<Pointer>
Program arguments (Option
because we can only initialize them after creating the ecx).
These are pointers to argc/argv because macOS.
We also need the full command line as one string because of Windows.
argv: Option<Pointer>
§cmd_line: Option<Pointer>
§tls: TlsData<'tcx>
TLS state.
isolated_op: IsolatedOp
What should Miri do when an op requires communicating with the host, such as accessing host env vars, random number generation, and file system access.
validation: ValidationMode
Whether to enforce the validity invariant.
fds: FdTable
The table of file descriptors.
dirs: DirTable
The table of directory descriptors.
epoll_interests: EpollInterestTable
The list of all EpollEventInterest.
clock: Clock
This machine’s monotone clock.
threads: ThreadManager<'tcx>
The set of threads.
thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>
Stores which thread is eligible to run on which CPUs.
This has no effect at all, it is just tracked to produce the correct result
in sched_getaffinity
sync: SynchronizationObjects
The state of the primitive synchronization objects.
layouts: PrimitiveLayouts<'tcx>
Precomputed TyLayout
s for primitive data types that are commonly used inside Miri.
static_roots: Vec<AllocId>
Allocations that are considered roots of static memory (that may leak).
profiler: Option<Profiler>
The measureme
profiler used to record timing information about
the emulated program.
string_cache: FxHashMap<String, StringId>
Used with profiler
to cache the StringId
s for event names
used with measureme
.
exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>
Cache of Instance
exported under the given Symbol
name.
None
means no Instance
exported under the given name is found.
backtrace_style: BacktraceStyle
Equivalent setting as RUST_BACKTRACE on encountering an error.
local_crates: Vec<CrateNum>
Crates which are considered local for the purposes of error reporting.
extern_statics: FxHashMap<Symbol, StrictPointer>
Mapping extern static names to their pointer.
rng: RefCell<StdRng>
The random number generator used for resolving non-determinism. Needs to be queried by ptr_to_int, hence needs interior mutability.
tracked_alloc_ids: FxHashSet<AllocId>
The allocation IDs to report when they are being allocated (helps for debugging memory leaks and use after free bugs).
track_alloc_accesses: bool
For the tracked alloc ids, also report read/write accesses.
check_alignment: AlignmentCheck
Controls whether alignment of memory accesses is being checked.
cmpxchg_weak_failure_rate: f64
Failure rate of compare_exchange_weak, between 0.0 and 1.0
mute_stdout_stderr: bool
Corresponds to -Zmiri-mute-stdout-stderr and doesn’t write the output but acts as if it succeeded.
weak_memory: bool
Whether weak memory emulation is enabled
preemption_rate: f64
The probability of the active thread being preempted at the end of each basic block.
report_progress: Option<u32>
If Some
, we will report the current stack every N basic blocks.
basic_block_count: u64
§native_lib: Option<(Library, PathBuf)>
Handle of the optional shared object file for native functions.
gc_interval: u32
Run a garbage collector for BorTags every N basic blocks.
since_gc: u32
The number of blocks that passed since the last BorTag GC pass.
num_cpus: u32
The number of CPUs to be reported by miri.
page_size: u64
Determines Miri’s page size and associated values
stack_addr: u64
§stack_size: u64
§collect_leak_backtraces: bool
Whether to collect a backtrace when each allocation is created, just in case it leaks.
allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>
The spans we will use to report where an allocation was created and deallocated in diagnostics.
const_cache: RefCell<FxHashMap<(Const<'tcx>, usize), OpTy<'tcx>>>
Maps MIR consts to their evaluated result. We combine the const with a “salt” (usize
)
that is fixed per stack frame; this lets us have sometimes different results for the
same const while ensuring consistent results within a single call.
symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>
For each allocation, an offset inside that allocation that was deemed aligned even for
symbolic alignment checks. This cannot be stored in AllocExtra
since it needs to be
tracked for vtables and function allocations as well as regular allocations.
Invariant: the promised alignment will never be less than the native alignment of the allocation.
union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>
A cache of “data range” computations for unions (i.e., the offsets of non-padding bytes).
Implementations§
Source§impl<'tcx> MiriMachine<'tcx>
impl<'tcx> MiriMachine<'tcx>
pub fn free_alloc_id( &mut self, dead_id: AllocId, size: Size, align: Align, kind: MemoryKind, )
Source§impl<'tcx> MiriMachine<'tcx>
impl<'tcx> MiriMachine<'tcx>
pub fn emit_diagnostic(&self, e: NonHaltingDiagnostic)
Source§impl<'tcx> MiriMachine<'tcx>
impl<'tcx> MiriMachine<'tcx>
Sourcepub fn current_span(&self) -> Span
pub fn current_span(&self) -> Span
Get the current span in the topmost function which is workspace-local and not
#[track_caller]
.
This function is backed by a cache, and can be assumed to be very fast.
It will work even when the stack is empty.
Sourcepub fn caller_span(&self) -> Span
pub fn caller_span(&self) -> Span
Returns the span of the caller of the current operation, again walking down the stack to find the closest frame in a local crate, if the caller of the current operation is not in a local crate. This is useful when we are processing something which occurs on function-entry and we want to point at the call to the function, not the function definition generally.
fn stack(&self) -> &[Frame<'tcx, Provenance, FrameExtra<'tcx>>]
fn top_user_relevant_frame(&self) -> Option<usize>
Sourcepub fn is_user_relevant(&self, frame: &Frame<'tcx, Provenance>) -> bool
pub fn is_user_relevant(&self, frame: &Frame<'tcx, Provenance>) -> bool
This is the source of truth for the is_user_relevant
flag in our FrameExtra
.
Source§impl<'tcx> MiriMachine<'tcx>
impl<'tcx> MiriMachine<'tcx>
pub(crate) fn new(config: &MiriConfig, layout_cx: LayoutCx<'tcx>) -> Self
pub(crate) fn late_init( this: &mut MiriInterpCx<'tcx>, config: &MiriConfig, on_main_stack_empty: StackEmptyCallback<'tcx>, ) -> InterpResult<'tcx>
pub(crate) fn add_extern_static( this: &mut MiriInterpCx<'tcx>, name: &str, ptr: Pointer, )
pub(crate) fn communicate(&self) -> bool
Sourcepub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool
pub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool
Check whether the stack frame that this FrameInfo
refers to is part of a local crate.
Sourcepub(crate) fn handle_abnormal_termination(&mut self)
pub(crate) fn handle_abnormal_termination(&mut self)
Called when the interpreter is going to shut down abnormally, such as due to a Ctrl-C.
pub(crate) fn page_align(&self) -> Align
pub(crate) fn allocated_span(&self, alloc_id: AllocId) -> Option<SpanData>
pub(crate) fn deallocated_span(&self, alloc_id: AllocId) -> Option<SpanData>
Source§impl<'tcx> MiriMachine<'tcx>
impl<'tcx> MiriMachine<'tcx>
fn alloc_extern_static( this: &mut MiriInterpCx<'tcx>, name: &str, val: ImmTy<'tcx>, ) -> InterpResult<'tcx>
Sourcefn null_ptr_extern_statics(
this: &mut MiriInterpCx<'tcx>,
names: &[&str],
) -> InterpResult<'tcx>
fn null_ptr_extern_statics( this: &mut MiriInterpCx<'tcx>, names: &[&str], ) -> InterpResult<'tcx>
Zero-initialized pointer-sized extern statics are pretty common. Most of them are for weak symbols, which we all set to null (indicating that the symbol is not supported, and triggering fallback code which ends up calling some other shim that we do support).
Sourcefn weak_symbol_extern_statics(
this: &mut MiriInterpCx<'tcx>,
names: &[&str],
) -> InterpResult<'tcx>
fn weak_symbol_extern_statics( this: &mut MiriInterpCx<'tcx>, names: &[&str], ) -> InterpResult<'tcx>
Extern statics that are initialized with function pointers to the symbols of the same name.
Sourcepub fn init_extern_statics(this: &mut MiriInterpCx<'tcx>) -> InterpResult<'tcx>
pub fn init_extern_statics(this: &mut MiriInterpCx<'tcx>) -> InterpResult<'tcx>
Sets up the “extern statics” for this machine.
Trait Implementations§
Source§impl<'tcx> Machine<'tcx> for MiriMachine<'tcx>
impl<'tcx> Machine<'tcx> for MiriMachine<'tcx>
Machine hook implementations.
Source§fn ptr_from_addr_cast(
ecx: &MiriInterpCx<'tcx>,
addr: u64,
) -> InterpResult<'tcx, Pointer>
fn ptr_from_addr_cast( ecx: &MiriInterpCx<'tcx>, addr: u64, ) -> InterpResult<'tcx, Pointer>
Called on usize as ptr
casts.
Source§fn expose_ptr(
ecx: &mut InterpCx<'tcx, Self>,
ptr: StrictPointer,
) -> InterpResult<'tcx>
fn expose_ptr( ecx: &mut InterpCx<'tcx, Self>, ptr: StrictPointer, ) -> InterpResult<'tcx>
Called on ptr as usize
casts.
(Actually computing the resulting usize
doesn’t need machine help,
that’s just Scalar::try_to_int
.)
Source§fn ptr_get_alloc(
ecx: &MiriInterpCx<'tcx>,
ptr: StrictPointer,
size: i64,
) -> Option<(AllocId, Size, Self::ProvenanceExtra)>
fn ptr_get_alloc( ecx: &MiriInterpCx<'tcx>, ptr: StrictPointer, size: i64, ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>
Convert a pointer with provenance into an allocation-offset pair and extra provenance info.
size
says how many bytes of memory are expected at that pointer. The sign of size
can
be used to disambiguate situations where a wildcard pointer sits right in between two
allocations.
If ptr.provenance.get_alloc_id()
is Some(p)
, the returned AllocId
must be p
.
The resulting AllocId
will just be used for that one step and the forgotten again
(i.e., we’ll never turn the data returned here back into a Pointer
that might be
stored in machine state).
When this fails, that means the pointer does not point to a live allocation.
Source§fn adjust_global_allocation<'b>(
ecx: &InterpCx<'tcx, Self>,
id: AllocId,
alloc: &'b Allocation,
) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
fn adjust_global_allocation<'b>( ecx: &InterpCx<'tcx, Self>, id: AllocId, alloc: &'b Allocation, ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
Called to adjust global allocations to the Provenance and AllocExtra of this machine.
If alloc
contains pointers, then they are all pointing to globals.
This should avoid copying if no work has to be done! If this returns an owned
allocation (because a copy had to be done to adjust things), machine memory will
cache the result. (This relies on AllocMap::get_or
being able to add the
owned allocation to the map even when the map is shared.)
Source§const GLOBAL_KIND: Option<MiriMemoryKind> = _
const GLOBAL_KIND: Option<MiriMemoryKind> = _
tcx
) –
or None if such memory should not be mutated and thus any such attempt will cause
a ModifiedStatic
error to be raised.
Statics are copied under two circumstances: When they are mutated, and when
adjust_allocation
(see below) returns an owned allocation
that is added to the memory so that the work is not done twice.Source§const PANIC_ON_ALLOC_FAIL: bool = false
const PANIC_ON_ALLOC_FAIL: bool = false
Source§type MemoryKind = MiriMemoryKind
type MemoryKind = MiriMemoryKind
Source§type ExtraFnVal = DynSym
type ExtraFnVal = DynSym
dlsym
that can later be called to execute the right thing.Source§type FrameExtra = FrameExtra<'tcx>
type FrameExtra = FrameExtra<'tcx>
Source§type AllocExtra = AllocExtra<'tcx>
type AllocExtra = AllocExtra<'tcx>
Source§type Provenance = Provenance
type Provenance = Provenance
AllocId
they belong to.Source§type ProvenanceExtra = ProvenanceExtra
type ProvenanceExtra = ProvenanceExtra
Source§type Bytes = MiriAllocBytes
type Bytes = MiriAllocBytes
Source§type MemoryMap = MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Provenance, <MiriMachine<'tcx> as Machine<'tcx>>::AllocExtra, <MiriMachine<'tcx> as Machine<'tcx>>::Bytes>)>
type MemoryMap = MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Provenance, <MiriMachine<'tcx> as Machine<'tcx>>::AllocExtra, <MiriMachine<'tcx> as Machine<'tcx>>::Bytes>)>
Source§fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool
fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool
Source§fn alignment_check(
ecx: &MiriInterpCx<'tcx>,
alloc_id: AllocId,
alloc_align: Align,
alloc_kind: AllocKind,
offset: Size,
align: Align,
) -> Option<Misalignment>
fn alignment_check( ecx: &MiriInterpCx<'tcx>, alloc_id: AllocId, alloc_align: Align, alloc_kind: AllocKind, offset: Size, align: Align, ) -> Option<Misalignment>
Source§fn enforce_validity(
ecx: &MiriInterpCx<'tcx>,
_layout: TyAndLayout<'tcx>,
) -> bool
fn enforce_validity( ecx: &MiriInterpCx<'tcx>, _layout: TyAndLayout<'tcx>, ) -> bool
Source§fn enforce_validity_recursively(
ecx: &InterpCx<'tcx, Self>,
_layout: TyAndLayout<'tcx>,
) -> bool
fn enforce_validity_recursively( ecx: &InterpCx<'tcx, Self>, _layout: TyAndLayout<'tcx>, ) -> bool
Source§fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool
fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool
Source§fn check_fn_target_features(
ecx: &MiriInterpCx<'tcx>,
instance: Instance<'tcx>,
) -> InterpResult<'tcx>
fn check_fn_target_features( ecx: &MiriInterpCx<'tcx>, instance: Instance<'tcx>, ) -> InterpResult<'tcx>
Source§fn find_mir_or_eval_fn(
ecx: &mut MiriInterpCx<'tcx>,
instance: Instance<'tcx>,
abi: ExternAbi,
args: &[FnArg<'tcx, Provenance>],
dest: &MPlaceTy<'tcx>,
ret: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx, Option<(&'tcx Body<'tcx>, Instance<'tcx>)>>
fn find_mir_or_eval_fn( ecx: &mut MiriInterpCx<'tcx>, instance: Instance<'tcx>, abi: ExternAbi, args: &[FnArg<'tcx, Provenance>], dest: &MPlaceTy<'tcx>, ret: Option<BasicBlock>, unwind: UnwindAction, ) -> InterpResult<'tcx, Option<(&'tcx Body<'tcx>, Instance<'tcx>)>>
Source§fn call_extra_fn(
ecx: &mut MiriInterpCx<'tcx>,
fn_val: DynSym,
abi: ExternAbi,
args: &[FnArg<'tcx, Provenance>],
dest: &MPlaceTy<'tcx>,
ret: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx>
fn call_extra_fn( ecx: &mut MiriInterpCx<'tcx>, fn_val: DynSym, abi: ExternAbi, args: &[FnArg<'tcx, Provenance>], dest: &MPlaceTy<'tcx>, ret: Option<BasicBlock>, unwind: UnwindAction, ) -> InterpResult<'tcx>
fn_val
. It is the hook’s responsibility to advance the instruction
pointer as appropriate.Source§fn call_intrinsic(
ecx: &mut MiriInterpCx<'tcx>,
instance: Instance<'tcx>,
args: &[OpTy<'tcx>],
dest: &MPlaceTy<'tcx>,
ret: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx, Option<Instance<'tcx>>>
fn call_intrinsic( ecx: &mut MiriInterpCx<'tcx>, instance: Instance<'tcx>, args: &[OpTy<'tcx>], dest: &MPlaceTy<'tcx>, ret: Option<BasicBlock>, unwind: UnwindAction, ) -> InterpResult<'tcx, Option<Instance<'tcx>>>
Source§fn assert_panic(
ecx: &mut MiriInterpCx<'tcx>,
msg: &AssertMessage<'tcx>,
unwind: UnwindAction,
) -> InterpResult<'tcx>
fn assert_panic( ecx: &mut MiriInterpCx<'tcx>, msg: &AssertMessage<'tcx>, unwind: UnwindAction, ) -> InterpResult<'tcx>
Assert
MIR terminators that trigger a panic.Source§fn panic_nounwind(
ecx: &mut InterpCx<'tcx, Self>,
msg: &str,
) -> InterpResult<'tcx>
fn panic_nounwind( ecx: &mut InterpCx<'tcx, Self>, msg: &str, ) -> InterpResult<'tcx>
Source§fn unwind_terminate(
ecx: &mut InterpCx<'tcx, Self>,
reason: UnwindTerminateReason,
) -> InterpResult<'tcx>
fn unwind_terminate( ecx: &mut InterpCx<'tcx, Self>, reason: UnwindTerminateReason, ) -> InterpResult<'tcx>
Source§fn binary_ptr_op(
ecx: &MiriInterpCx<'tcx>,
bin_op: BinOp,
left: &ImmTy<'tcx>,
right: &ImmTy<'tcx>,
) -> InterpResult<'tcx, ImmTy<'tcx>>
fn binary_ptr_op( ecx: &MiriInterpCx<'tcx>, bin_op: BinOp, left: &ImmTy<'tcx>, right: &ImmTy<'tcx>, ) -> InterpResult<'tcx, ImmTy<'tcx>>
Source§fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
ecx: &InterpCx<'tcx, Self>,
inputs: &[F1],
) -> F2
fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>( ecx: &InterpCx<'tcx, Self>, inputs: &[F1], ) -> F2
Source§fn ub_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>
fn ub_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>
NullaryOp::UbChecks
invocation.Source§fn thread_local_static_pointer(
ecx: &mut MiriInterpCx<'tcx>,
def_id: DefId,
) -> InterpResult<'tcx, StrictPointer>
fn thread_local_static_pointer( ecx: &mut MiriInterpCx<'tcx>, def_id: DefId, ) -> InterpResult<'tcx, StrictPointer>
AllocId
for the given thread-local static in the current thread.Source§fn extern_static_pointer(
ecx: &MiriInterpCx<'tcx>,
def_id: DefId,
) -> InterpResult<'tcx, StrictPointer>
fn extern_static_pointer( ecx: &MiriInterpCx<'tcx>, def_id: DefId, ) -> InterpResult<'tcx, StrictPointer>
AllocId
for the given extern static
.Source§fn init_alloc_extra(
ecx: &MiriInterpCx<'tcx>,
id: AllocId,
kind: MemoryKind,
size: Size,
align: Align,
) -> InterpResult<'tcx, Self::AllocExtra>
fn init_alloc_extra( ecx: &MiriInterpCx<'tcx>, id: AllocId, kind: MemoryKind, size: Size, align: Align, ) -> InterpResult<'tcx, Self::AllocExtra>
Source§fn adjust_alloc_root_pointer(
ecx: &MiriInterpCx<'tcx>,
ptr: Pointer<CtfeProvenance>,
kind: Option<MemoryKind>,
) -> InterpResult<'tcx, Pointer<Provenance>>
fn adjust_alloc_root_pointer( ecx: &MiriInterpCx<'tcx>, ptr: Pointer<CtfeProvenance>, kind: Option<MemoryKind>, ) -> InterpResult<'tcx, Pointer<Provenance>>
Source§fn before_memory_read(
_tcx: TyCtxtAt<'tcx>,
machine: &Self,
alloc_extra: &AllocExtra<'tcx>,
(alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
range: AllocRange,
) -> InterpResult<'tcx>
fn before_memory_read( _tcx: TyCtxtAt<'tcx>, machine: &Self, alloc_extra: &AllocExtra<'tcx>, (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra), range: AllocRange, ) -> InterpResult<'tcx>
Source§fn before_memory_write(
_tcx: TyCtxtAt<'tcx>,
machine: &mut Self,
alloc_extra: &mut AllocExtra<'tcx>,
(alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
range: AllocRange,
) -> InterpResult<'tcx>
fn before_memory_write( _tcx: TyCtxtAt<'tcx>, machine: &mut Self, alloc_extra: &mut AllocExtra<'tcx>, (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra), range: AllocRange, ) -> InterpResult<'tcx>
Source§fn before_memory_deallocation(
_tcx: TyCtxtAt<'tcx>,
machine: &mut Self,
alloc_extra: &mut AllocExtra<'tcx>,
(alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
size: Size,
align: Align,
kind: MemoryKind,
) -> InterpResult<'tcx>
fn before_memory_deallocation( _tcx: TyCtxtAt<'tcx>, machine: &mut Self, alloc_extra: &mut AllocExtra<'tcx>, (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra), size: Size, align: Align, kind: MemoryKind, ) -> InterpResult<'tcx>
Source§fn retag_ptr_value(
ecx: &mut InterpCx<'tcx, Self>,
kind: RetagKind,
val: &ImmTy<'tcx>,
) -> InterpResult<'tcx, ImmTy<'tcx>>
fn retag_ptr_value( ecx: &mut InterpCx<'tcx, Self>, kind: RetagKind, val: &ImmTy<'tcx>, ) -> InterpResult<'tcx, ImmTy<'tcx>>
Source§fn retag_place_contents(
ecx: &mut InterpCx<'tcx, Self>,
kind: RetagKind,
place: &PlaceTy<'tcx>,
) -> InterpResult<'tcx>
fn retag_place_contents( ecx: &mut InterpCx<'tcx, Self>, kind: RetagKind, place: &PlaceTy<'tcx>, ) -> InterpResult<'tcx>
Source§fn protect_in_place_function_argument(
ecx: &mut InterpCx<'tcx, Self>,
place: &MPlaceTy<'tcx>,
) -> InterpResult<'tcx>
fn protect_in_place_function_argument( ecx: &mut InterpCx<'tcx, Self>, place: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx>
Source§fn init_frame(
ecx: &mut InterpCx<'tcx, Self>,
frame: Frame<'tcx, Provenance>,
) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>>
fn init_frame( ecx: &mut InterpCx<'tcx, Self>, frame: Frame<'tcx, Provenance>, ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>>
Source§fn stack<'a>(
ecx: &'a InterpCx<'tcx, Self>,
) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>]
fn stack<'a>( ecx: &'a InterpCx<'tcx, Self>, ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>]
Source§fn stack_mut<'a>(
ecx: &'a mut InterpCx<'tcx, Self>,
) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>
fn stack_mut<'a>( ecx: &'a mut InterpCx<'tcx, Self>, ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>
Source§fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
Source§fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
Source§fn before_stack_pop(
ecx: &InterpCx<'tcx, Self>,
frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
) -> InterpResult<'tcx>
fn before_stack_pop( ecx: &InterpCx<'tcx, Self>, frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>, ) -> InterpResult<'tcx>
Source§fn after_stack_pop(
ecx: &mut InterpCx<'tcx, Self>,
frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>,
unwinding: bool,
) -> InterpResult<'tcx, ReturnAction>
fn after_stack_pop( ecx: &mut InterpCx<'tcx, Self>, frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>, unwinding: bool, ) -> InterpResult<'tcx, ReturnAction>
locals
have already been destroyed!Source§fn after_local_read(
ecx: &InterpCx<'tcx, Self>,
local: Local,
) -> InterpResult<'tcx>
fn after_local_read( ecx: &InterpCx<'tcx, Self>, local: Local, ) -> InterpResult<'tcx>
Source§fn after_local_write(
ecx: &mut InterpCx<'tcx, Self>,
local: Local,
storage_live: bool,
) -> InterpResult<'tcx>
fn after_local_write( ecx: &mut InterpCx<'tcx, Self>, local: Local, storage_live: bool, ) -> InterpResult<'tcx>
storage_live
indicates whether this is the initial write upon StorageLive
.Source§fn after_local_moved_to_memory(
ecx: &mut InterpCx<'tcx, Self>,
local: Local,
mplace: &MPlaceTy<'tcx>,
) -> InterpResult<'tcx>
fn after_local_moved_to_memory( ecx: &mut InterpCx<'tcx, Self>, local: Local, mplace: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx>
Source§fn eval_mir_constant<F>(
ecx: &InterpCx<'tcx, Self>,
val: Const<'tcx>,
span: Span,
layout: Option<TyAndLayout<'tcx>>,
eval: F,
) -> InterpResult<'tcx, OpTy<'tcx>>where
F: Fn(&InterpCx<'tcx, Self>, Const<'tcx>, Span, Option<TyAndLayout<'tcx>>) -> InterpResult<'tcx, OpTy<'tcx>>,
fn eval_mir_constant<F>(
ecx: &InterpCx<'tcx, Self>,
val: Const<'tcx>,
span: Span,
layout: Option<TyAndLayout<'tcx>>,
eval: F,
) -> InterpResult<'tcx, OpTy<'tcx>>where
F: Fn(&InterpCx<'tcx, Self>, Const<'tcx>, Span, Option<TyAndLayout<'tcx>>) -> InterpResult<'tcx, OpTy<'tcx>>,
eval
function will do all the required evaluation,
but this hook has the chance to do some pre/postprocessing.Source§fn get_global_alloc_salt(
ecx: &InterpCx<'tcx, Self>,
instance: Option<Instance<'tcx>>,
) -> usize
fn get_global_alloc_salt( ecx: &InterpCx<'tcx, Self>, instance: Option<Instance<'tcx>>, ) -> usize
fn cached_union_data_range<'e>( ecx: &'e mut InterpCx<'tcx, Self>, ty: Ty<'tcx>, compute_range: impl FnOnce() -> RangeSet, ) -> Cow<'e, RangeSet>
Source§const ALL_CONSTS_ARE_PRECHECKED: bool = true
const ALL_CONSTS_ARE_PRECHECKED: bool = true
eval_mir_constant
can never fail because all required consts have
already been checked before.Source§fn load_mir(
ecx: &InterpCx<'tcx, Self>,
instance: InstanceKind<'tcx>,
) -> InterpResult_<'tcx, &'tcx Body<'tcx>>
fn load_mir( ecx: &InterpCx<'tcx, Self>, instance: InstanceKind<'tcx>, ) -> InterpResult_<'tcx, &'tcx Body<'tcx>>
Source§fn increment_const_eval_counter(
_ecx: &mut InterpCx<'tcx, Self>,
) -> InterpResult_<'tcx, ()>
fn increment_const_eval_counter( _ecx: &mut InterpCx<'tcx, Self>, ) -> InterpResult_<'tcx, ()>
StatementKind::ConstEvalCounter
instruction.
You can use this to detect long or endlessly running programs.Source§fn before_access_global(
_tcx: TyCtxtAt<'tcx>,
_machine: &Self,
_alloc_id: AllocId,
_allocation: ConstAllocation<'tcx>,
_static_def_id: Option<DefId>,
_is_write: bool,
) -> InterpResult_<'tcx, ()>
fn before_access_global( _tcx: TyCtxtAt<'tcx>, _machine: &Self, _alloc_id: AllocId, _allocation: ConstAllocation<'tcx>, _static_def_id: Option<DefId>, _is_write: bool, ) -> InterpResult_<'tcx, ()>
def_id
is Some
if this is the “lazy” allocation of a static.Source§fn eval_inline_asm(
_ecx: &mut InterpCx<'tcx, Self>,
_template: &'tcx [InlineAsmTemplatePiece],
_operands: &[InlineAsmOperand<'tcx>],
_options: InlineAsmOptions,
_targets: &[BasicBlock],
) -> InterpResult_<'tcx, ()>
fn eval_inline_asm( _ecx: &mut InterpCx<'tcx, Self>, _template: &'tcx [InlineAsmTemplatePiece], _operands: &[InlineAsmOperand<'tcx>], _options: InlineAsmOptions, _targets: &[BasicBlock], ) -> InterpResult_<'tcx, ()>
Source§fn before_alloc_read(
_ecx: &InterpCx<'tcx, Self>,
_alloc_id: AllocId,
) -> InterpResult_<'tcx, ()>
fn before_alloc_read( _ecx: &InterpCx<'tcx, Self>, _alloc_id: AllocId, ) -> InterpResult_<'tcx, ()>
Source§impl VisitProvenance for MiriMachine<'_>
impl VisitProvenance for MiriMachine<'_>
fn visit_provenance(&self, visit: &mut VisitWith<'_>)
Auto Trait Implementations§
impl<'tcx> !Freeze for MiriMachine<'tcx>
impl<'tcx> !RefUnwindSafe for MiriMachine<'tcx>
impl<'tcx> !Send for MiriMachine<'tcx>
impl<'tcx> !Sync for MiriMachine<'tcx>
impl<'tcx> Unpin for MiriMachine<'tcx>
impl<'tcx> !UnwindSafe for MiriMachine<'tcx>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Layout§
Note: Most layout information is completely unstable and may even differ between compilations. The only exception is types with certain repr(...)
attributes. Please see the Rust Reference's “Type Layout” chapter for details on type layout guarantees.
Size: 2416 bytes