pub struct CompileTimeMachine<'tcx> {
pub(super) num_evaluated_steps: usize,
pub(super) stack: Vec<Frame<'tcx>>,
pub(super) can_access_mut_global: CanAccessMutGlobal,
pub(super) check_alignment: CheckAlignment,
pub(crate) static_root_ids: Option<(AllocId, LocalDefId)>,
union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
}Expand description
Extra machine state for CTFE, and the Machine instance.
Fields§
§num_evaluated_steps: usizeThe number of terminators that have been evaluated.
This is used to produce lints informing the user that the compiler is not stuck.
Set to usize::MAX to never report anything.
stack: Vec<Frame<'tcx>>The virtual call stack.
can_access_mut_global: CanAccessMutGlobalPattern matching on consts with references would be unsound if those references could point to anything mutable. Therefore, when evaluating consts and when constructing valtrees, we ensure that only immutable global memory can be accessed.
check_alignment: CheckAlignmentWhether to check alignment during evaluation.
static_root_ids: Option<(AllocId, LocalDefId)>If Some, we are evaluating the initializer of the static with the given LocalDefId,
storing the result in the given AllocId.
Used to prevent accesses to a static’s base allocation, as that may allow for self-initialization loops.
union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>A cache of “data range” computations for unions (i.e., the offsets of non-padding bytes).
Implementations§
Source§impl<'tcx> CompileTimeMachine<'tcx>
impl<'tcx> CompileTimeMachine<'tcx>
pub(crate) fn new( can_access_mut_global: CanAccessMutGlobal, check_alignment: CheckAlignment, ) -> Self
Source§impl<'tcx> CompileTimeMachine<'tcx>
impl<'tcx> CompileTimeMachine<'tcx>
Sourcepub fn best_lint_scope(&self, tcx: TyCtxt<'tcx>) -> HirId
pub fn best_lint_scope(&self, tcx: TyCtxt<'tcx>) -> HirId
Find the first stack frame that is within the current crate, if any. Otherwise, return the crate’s HirId
Trait Implementations§
Source§impl HasStaticRootDefId for CompileTimeMachine<'_>
impl HasStaticRootDefId for CompileTimeMachine<'_>
Source§fn static_def_id(&self) -> Option<LocalDefId>
fn static_def_id(&self) -> Option<LocalDefId>
DefId of the static item that is currently being evaluated.
Used for interning to be able to handle nested allocations.Source§impl<'tcx> Machine<'tcx> for CompileTimeMachine<'tcx>
impl<'tcx> Machine<'tcx> for CompileTimeMachine<'tcx>
Source§const GLOBAL_KIND: Option<Self::MemoryKind> = None
const GLOBAL_KIND: Option<Self::MemoryKind> = None
tcx) –
or None if such memory should not be mutated and thus any such attempt will cause
a ModifiedStatic error to be raised.
Statics are copied under two circumstances: When they are mutated, and when
adjust_allocation (see below) returns an owned allocation
that is added to the memory so that the work is not done twice.Source§const PANIC_ON_ALLOC_FAIL: bool = false
const PANIC_ON_ALLOC_FAIL: bool = false
Source§type Provenance = CtfeProvenance
type Provenance = CtfeProvenance
AllocId they belong to.Source§type ProvenanceExtra = bool
type ProvenanceExtra = bool
Source§type ExtraFnVal = !
type ExtraFnVal = !
dlsym
that can later be called to execute the right thing.Source§type MemoryKind = MemoryKind
type MemoryKind = MemoryKind
Source§type MemoryMap = IndexMap<AllocId, (MemoryKind<MemoryKind>, Allocation), BuildHasherDefault<FxHasher>>
type MemoryMap = IndexMap<AllocId, (MemoryKind<MemoryKind>, Allocation), BuildHasherDefault<FxHasher>>
Source§type AllocExtra = ()
type AllocExtra = ()
Source§type FrameExtra = ()
type FrameExtra = ()
Source§fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool
fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool
Source§fn unwind_terminate(
_ecx: &mut InterpCx<'tcx, Self>,
_reason: UnwindTerminateReason,
) -> InterpResult<'tcx>
fn unwind_terminate( _ecx: &mut InterpCx<'tcx, Self>, _reason: UnwindTerminateReason, ) -> InterpResult<'tcx>
Source§fn check_fn_target_features(
_ecx: &InterpCx<'tcx, Self>,
_instance: Instance<'tcx>,
) -> InterpResult<'tcx>
fn check_fn_target_features( _ecx: &InterpCx<'tcx, Self>, _instance: Instance<'tcx>, ) -> InterpResult<'tcx>
Source§fn call_extra_fn(
_ecx: &mut InterpCx<'tcx, Self>,
fn_val: !,
_abi: &FnAbi<'tcx, Ty<'tcx>>,
_args: &[FnArg<'tcx>],
_destination: &PlaceTy<'tcx, Self::Provenance>,
_target: Option<BasicBlock>,
_unwind: UnwindAction,
) -> InterpResult<'tcx>
fn call_extra_fn( _ecx: &mut InterpCx<'tcx, Self>, fn_val: !, _abi: &FnAbi<'tcx, Ty<'tcx>>, _args: &[FnArg<'tcx>], _destination: &PlaceTy<'tcx, Self::Provenance>, _target: Option<BasicBlock>, _unwind: UnwindAction, ) -> InterpResult<'tcx>
fn_val. It is the hook’s responsibility to advance the instruction
pointer as appropriate.Source§fn float_fuse_mul_add(_ecx: &mut InterpCx<'tcx, Self>) -> bool
fn float_fuse_mul_add(_ecx: &mut InterpCx<'tcx, Self>) -> bool
fmuladd intrinsics fuse the multiply-add or use separate operations.Source§fn ub_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>
fn ub_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>
NullaryOp::UbChecks invocation.Source§fn contract_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>
fn contract_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>
NullaryOp::ContractChecks invocation.Source§fn adjust_global_allocation<'b>(
_ecx: &InterpCx<'tcx, Self>,
_id: AllocId,
alloc: &'b Allocation,
) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance>>>
fn adjust_global_allocation<'b>( _ecx: &InterpCx<'tcx, Self>, _id: AllocId, alloc: &'b Allocation, ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance>>>
Source§fn init_local_allocation(
_ecx: &InterpCx<'tcx, Self>,
_id: AllocId,
_kind: MemoryKind<Self::MemoryKind>,
_size: Size,
_align: Align,
) -> InterpResult<'tcx, Self::AllocExtra>
fn init_local_allocation( _ecx: &InterpCx<'tcx, Self>, _id: AllocId, _kind: MemoryKind<Self::MemoryKind>, _size: Size, _align: Align, ) -> InterpResult<'tcx, Self::AllocExtra>
Source§fn extern_static_pointer(
ecx: &InterpCx<'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer>
fn extern_static_pointer( ecx: &InterpCx<'tcx, Self>, def_id: DefId, ) -> InterpResult<'tcx, Pointer>
AllocId for the given extern static.Source§fn adjust_alloc_root_pointer(
_ecx: &InterpCx<'tcx, Self>,
ptr: Pointer<CtfeProvenance>,
_kind: Option<MemoryKind<Self::MemoryKind>>,
) -> InterpResult<'tcx, Pointer<CtfeProvenance>>
fn adjust_alloc_root_pointer( _ecx: &InterpCx<'tcx, Self>, ptr: Pointer<CtfeProvenance>, _kind: Option<MemoryKind<Self::MemoryKind>>, ) -> InterpResult<'tcx, Pointer<CtfeProvenance>>
Source§fn ptr_from_addr_cast(
_ecx: &InterpCx<'tcx, Self>,
addr: u64,
) -> InterpResult<'tcx, Pointer<Option<CtfeProvenance>>>
fn ptr_from_addr_cast( _ecx: &InterpCx<'tcx, Self>, addr: u64, ) -> InterpResult<'tcx, Pointer<Option<CtfeProvenance>>>
Source§fn ptr_get_alloc(
_ecx: &InterpCx<'tcx, Self>,
ptr: Pointer<CtfeProvenance>,
_size: i64,
) -> Option<(AllocId, Size, Self::ProvenanceExtra)>
fn ptr_get_alloc( _ecx: &InterpCx<'tcx, Self>, ptr: Pointer<CtfeProvenance>, _size: i64, ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>
size says how many bytes of memory are expected at that pointer. The sign of size can
be used to disambiguate situations where a wildcard pointer sits right in between two
allocations. Read moreSource§fn get_global_alloc_salt(
_ecx: &InterpCx<'tcx, Self>,
_instance: Option<Instance<'tcx>>,
) -> usize
fn get_global_alloc_salt( _ecx: &InterpCx<'tcx, Self>, _instance: Option<Instance<'tcx>>, ) -> usize
Source§fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool
fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool
Source§fn enforce_validity(
ecx: &InterpCx<'tcx, Self>,
layout: TyAndLayout<'tcx>,
) -> bool
fn enforce_validity( ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>, ) -> bool
Source§fn load_mir(
ecx: &InterpCx<'tcx, Self>,
instance: InstanceKind<'tcx>,
) -> &'tcx Body<'tcx>
fn load_mir( ecx: &InterpCx<'tcx, Self>, instance: InstanceKind<'tcx>, ) -> &'tcx Body<'tcx>
Source§fn find_mir_or_eval_fn(
ecx: &mut InterpCx<'tcx, Self>,
orig_instance: Instance<'tcx>,
_abi: &FnAbi<'tcx, Ty<'tcx>>,
args: &[FnArg<'tcx>],
dest: &PlaceTy<'tcx>,
ret: Option<BasicBlock>,
_unwind: UnwindAction,
) -> InterpResult<'tcx, Option<(&'tcx Body<'tcx>, Instance<'tcx>)>>
fn find_mir_or_eval_fn( ecx: &mut InterpCx<'tcx, Self>, orig_instance: Instance<'tcx>, _abi: &FnAbi<'tcx, Ty<'tcx>>, args: &[FnArg<'tcx>], dest: &PlaceTy<'tcx>, ret: Option<BasicBlock>, _unwind: UnwindAction, ) -> InterpResult<'tcx, Option<(&'tcx Body<'tcx>, Instance<'tcx>)>>
Source§fn panic_nounwind(
ecx: &mut InterpCx<'tcx, Self>,
msg: &str,
) -> InterpResult<'tcx>
fn panic_nounwind( ecx: &mut InterpCx<'tcx, Self>, msg: &str, ) -> InterpResult<'tcx>
Source§fn call_intrinsic(
ecx: &mut InterpCx<'tcx, Self>,
instance: Instance<'tcx>,
args: &[OpTy<'tcx>],
dest: &PlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
_unwind: UnwindAction,
) -> InterpResult<'tcx, Option<Instance<'tcx>>>
fn call_intrinsic( ecx: &mut InterpCx<'tcx, Self>, instance: Instance<'tcx>, args: &[OpTy<'tcx>], dest: &PlaceTy<'tcx, Self::Provenance>, target: Option<BasicBlock>, _unwind: UnwindAction, ) -> InterpResult<'tcx, Option<Instance<'tcx>>>
Source§fn assert_panic(
ecx: &mut InterpCx<'tcx, Self>,
msg: &AssertMessage<'tcx>,
_unwind: UnwindAction,
) -> InterpResult<'tcx>
fn assert_panic( ecx: &mut InterpCx<'tcx, Self>, msg: &AssertMessage<'tcx>, _unwind: UnwindAction, ) -> InterpResult<'tcx>
Assert MIR terminators that trigger a panic.Source§fn binary_ptr_op(
_ecx: &InterpCx<'tcx, Self>,
_bin_op: BinOp,
_left: &ImmTy<'tcx>,
_right: &ImmTy<'tcx>,
) -> InterpResult<'tcx, ImmTy<'tcx>>
fn binary_ptr_op( _ecx: &InterpCx<'tcx, Self>, _bin_op: BinOp, _left: &ImmTy<'tcx>, _right: &ImmTy<'tcx>, ) -> InterpResult<'tcx, ImmTy<'tcx>>
Source§fn increment_const_eval_counter(
ecx: &mut InterpCx<'tcx, Self>,
) -> InterpResult<'tcx>
fn increment_const_eval_counter( ecx: &mut InterpCx<'tcx, Self>, ) -> InterpResult<'tcx>
StatementKind::ConstEvalCounter instruction.
You can use this to detect long or endlessly running programs.Source§fn expose_provenance(
_ecx: &InterpCx<'tcx, Self>,
_provenance: Self::Provenance,
) -> InterpResult<'tcx>
fn expose_provenance( _ecx: &InterpCx<'tcx, Self>, _provenance: Self::Provenance, ) -> InterpResult<'tcx>
Source§fn init_frame(
ecx: &mut InterpCx<'tcx, Self>,
frame: Frame<'tcx>,
) -> InterpResult<'tcx, Frame<'tcx>>
fn init_frame( ecx: &mut InterpCx<'tcx, Self>, frame: Frame<'tcx>, ) -> InterpResult<'tcx, Frame<'tcx>>
Source§fn stack<'a>(
ecx: &'a InterpCx<'tcx, Self>,
) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>]
fn stack<'a>( ecx: &'a InterpCx<'tcx, Self>, ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>]
Source§fn stack_mut<'a>(
ecx: &'a mut InterpCx<'tcx, Self>,
) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>
fn stack_mut<'a>( ecx: &'a mut InterpCx<'tcx, Self>, ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>
Source§fn before_access_global(
_tcx: TyCtxtAt<'tcx>,
machine: &Self,
alloc_id: AllocId,
alloc: ConstAllocation<'tcx>,
_static_def_id: Option<DefId>,
is_write: bool,
) -> InterpResult<'tcx>
fn before_access_global( _tcx: TyCtxtAt<'tcx>, machine: &Self, alloc_id: AllocId, alloc: ConstAllocation<'tcx>, _static_def_id: Option<DefId>, is_write: bool, ) -> InterpResult<'tcx>
def_id is Some if this is the “lazy” allocation of a static.Source§fn retag_ptr_value(
ecx: &mut InterpCx<'tcx, Self>,
_kind: RetagKind,
val: &ImmTy<'tcx, CtfeProvenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, CtfeProvenance>>
fn retag_ptr_value( ecx: &mut InterpCx<'tcx, Self>, _kind: RetagKind, val: &ImmTy<'tcx, CtfeProvenance>, ) -> InterpResult<'tcx, ImmTy<'tcx, CtfeProvenance>>
Source§fn before_memory_write(
_tcx: TyCtxtAt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_ptr: Pointer<Option<Self::Provenance>>,
(_alloc_id, immutable): (AllocId, bool),
range: AllocRange,
) -> InterpResult<'tcx>
fn before_memory_write( _tcx: TyCtxtAt<'tcx>, _machine: &mut Self, _alloc_extra: &mut Self::AllocExtra, _ptr: Pointer<Option<Self::Provenance>>, (_alloc_id, immutable): (AllocId, bool), range: AllocRange, ) -> InterpResult<'tcx>
ptr will always be a pointer with the provenance in prov pointing to the beginning of
range.Source§fn before_alloc_access(
tcx: TyCtxtAt<'tcx>,
machine: &Self,
alloc_id: AllocId,
) -> InterpResult<'tcx>
fn before_alloc_access( tcx: TyCtxtAt<'tcx>, machine: &Self, alloc_id: AllocId, ) -> InterpResult<'tcx>
fn cached_union_data_range<'e>( ecx: &'e mut InterpCx<'tcx, Self>, ty: Ty<'tcx>, compute_range: impl FnOnce() -> RangeSet, ) -> Cow<'e, RangeSet>
Source§fn get_default_alloc_params(&self)
fn get_default_alloc_params(&self)
AllocBytes type for
abstract machine allocations.Source§const ALL_CONSTS_ARE_PRECHECKED: bool = true
const ALL_CONSTS_ARE_PRECHECKED: bool = true
eval_mir_constant can never fail because all required consts have
already been checked before.Source§fn alignment_check(
_ecx: &InterpCx<'tcx, Self>,
_alloc_id: AllocId,
_alloc_align: Align,
_alloc_kind: AllocKind,
_offset: Size,
_align: Align,
) -> Option<Misalignment>
fn alignment_check( _ecx: &InterpCx<'tcx, Self>, _alloc_id: AllocId, _alloc_align: Align, _alloc_kind: AllocKind, _offset: Size, _align: Align, ) -> Option<Misalignment>
Source§fn enforce_validity_recursively(
_ecx: &InterpCx<'tcx, Self>,
_layout: TyAndLayout<'tcx>,
) -> bool
fn enforce_validity_recursively( _ecx: &InterpCx<'tcx, Self>, _layout: TyAndLayout<'tcx>, ) -> bool
Source§fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
_ecx: &InterpCx<'tcx, Self>,
_inputs: &[F1],
) -> F2
fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>( _ecx: &InterpCx<'tcx, Self>, _inputs: &[F1], ) -> F2
Source§fn apply_float_nondet(
_ecx: &mut InterpCx<'tcx, Self>,
val: ImmTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
fn apply_float_nondet( _ecx: &mut InterpCx<'tcx, Self>, val: ImmTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
Source§fn equal_float_min_max<F: Float>(_ecx: &InterpCx<'tcx, Self>, a: F, _b: F) -> F
fn equal_float_min_max<F: Float>(_ecx: &InterpCx<'tcx, Self>, a: F, _b: F) -> F
min/max on floats when the arguments are equal.Source§fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
Source§fn thread_local_static_pointer(
_ecx: &mut InterpCx<'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>
fn thread_local_static_pointer( _ecx: &mut InterpCx<'tcx, Self>, def_id: DefId, ) -> InterpResult<'tcx, Pointer<Self::Provenance>>
AllocId for the given thread-local static in the current thread.Source§fn before_memory_read(
_tcx: TyCtxtAt<'tcx>,
_machine: &Self,
_alloc_extra: &Self::AllocExtra,
_ptr: Pointer<Option<Self::Provenance>>,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange,
) -> InterpResult<'tcx>
fn before_memory_read( _tcx: TyCtxtAt<'tcx>, _machine: &Self, _alloc_extra: &Self::AllocExtra, _ptr: Pointer<Option<Self::Provenance>>, _prov: (AllocId, Self::ProvenanceExtra), _range: AllocRange, ) -> InterpResult<'tcx>
ptr will always be a pointer with the provenance in prov pointing to the beginning of
range. Read moreSource§fn before_memory_deallocation(
_tcx: TyCtxtAt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_ptr: Pointer<Option<Self::Provenance>>,
_prov: (AllocId, Self::ProvenanceExtra),
_size: Size,
_align: Align,
_kind: MemoryKind<Self::MemoryKind>,
) -> InterpResult<'tcx>
fn before_memory_deallocation( _tcx: TyCtxtAt<'tcx>, _machine: &mut Self, _alloc_extra: &mut Self::AllocExtra, _ptr: Pointer<Option<Self::Provenance>>, _prov: (AllocId, Self::ProvenanceExtra), _size: Size, _align: Align, _kind: MemoryKind<Self::MemoryKind>, ) -> InterpResult<'tcx>
ptr will always be a pointer with the provenance in prov pointing to the beginning of
the allocation.Source§fn retag_place_contents(
_ecx: &mut InterpCx<'tcx, Self>,
_kind: RetagKind,
_place: &PlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx>
fn retag_place_contents( _ecx: &mut InterpCx<'tcx, Self>, _kind: RetagKind, _place: &PlaceTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx>
Source§fn protect_in_place_function_argument(
ecx: &mut InterpCx<'tcx, Self>,
mplace: &MPlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx>
fn protect_in_place_function_argument( ecx: &mut InterpCx<'tcx, Self>, mplace: &MPlaceTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx>
Source§fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
Source§fn before_stack_pop(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
fn before_stack_pop(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
Source§fn after_stack_pop(
_ecx: &mut InterpCx<'tcx, Self>,
_frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>,
unwinding: bool,
) -> InterpResult<'tcx, ReturnAction>
fn after_stack_pop( _ecx: &mut InterpCx<'tcx, Self>, _frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>, unwinding: bool, ) -> InterpResult<'tcx, ReturnAction>
locals have already been destroyed!Source§fn after_local_read(
_ecx: &InterpCx<'tcx, Self>,
_frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
_local: Local,
) -> InterpResult<'tcx>
fn after_local_read( _ecx: &InterpCx<'tcx, Self>, _frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>, _local: Local, ) -> InterpResult<'tcx>
Source§fn after_local_write(
_ecx: &mut InterpCx<'tcx, Self>,
_local: Local,
_storage_live: bool,
) -> InterpResult<'tcx>
fn after_local_write( _ecx: &mut InterpCx<'tcx, Self>, _local: Local, _storage_live: bool, ) -> InterpResult<'tcx>
storage_live indicates whether this is the initial write upon StorageLive.Source§fn after_local_moved_to_memory(
_ecx: &mut InterpCx<'tcx, Self>,
_local: Local,
_mplace: &MPlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx>
fn after_local_moved_to_memory( _ecx: &mut InterpCx<'tcx, Self>, _local: Local, _mplace: &MPlaceTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx>
Source§fn enter_trace_span(_span: impl FnOnce() -> Span) -> impl EnteredTraceSpan
fn enter_trace_span(_span: impl FnOnce() -> Span) -> impl EnteredTraceSpan
rustc_const_eval at compile time, by
delegating the entering of tracing::Spans to implementors of the Machine trait. The
default implementation corresponds to tracing being disabled, meaning the tracing calls will
supposedly be optimized out completely. To enable tracing, override this trait method and
return span.entered(). Also see crate::enter_trace_span.Auto Trait Implementations§
impl<'tcx> !DynSend for CompileTimeMachine<'tcx>
impl<'tcx> !DynSync for CompileTimeMachine<'tcx>
impl<'tcx> Freeze for CompileTimeMachine<'tcx>
impl<'tcx> !RefUnwindSafe for CompileTimeMachine<'tcx>
impl<'tcx> !Send for CompileTimeMachine<'tcx>
impl<'tcx> !Sync for CompileTimeMachine<'tcx>
impl<'tcx> Unpin for CompileTimeMachine<'tcx>
impl<'tcx> !UnwindSafe for CompileTimeMachine<'tcx>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Source§impl<T, R> CollectAndApply<T, R> for T
impl<T, R> CollectAndApply<T, R> for T
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<P> IntoQueryParam<P> for P
impl<P> IntoQueryParam<P> for P
fn into_query_param(self) -> P
Source§impl<T> MaybeResult<T> for T
impl<T> MaybeResult<T> for T
Source§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<I, T, U> Upcast<I, U> for Twhere
U: UpcastFrom<I, T>,
impl<I, T, U> Upcast<I, U> for Twhere
U: UpcastFrom<I, T>,
Source§impl<I, T> UpcastFrom<I, T> for T
impl<I, T> UpcastFrom<I, T> for T
fn upcast_from(from: T, _tcx: I) -> T
Source§impl<Tcx, T> Value<Tcx> for Twhere
Tcx: DepContext,
impl<Tcx, T> Value<Tcx> for Twhere
Tcx: DepContext,
default fn from_cycle_error( tcx: Tcx, cycle_error: &CycleError, _guar: ErrorGuaranteed, ) -> T
Source§impl<T> WithSubscriber for T
impl<T> WithSubscriber for T
Source§fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
Source§fn with_current_subscriber(self) -> WithDispatch<Self>
fn with_current_subscriber(self) -> WithDispatch<Self>
impl<T> ErasedDestructor for Twhere
T: 'static,
Layout§
Note: Most layout information is completely unstable and may even differ between compilations. The only exception is types with certain repr(...) attributes. Please see the Rust Reference's “Type Layout” chapter for details on type layout guarantees.
Size: 88 bytes