1use std::borrow::{Borrow, Cow};
2use std::fmt;
3use std::hash::Hash;
45use rustc_abi::{Align, FIRST_VARIANT, Size};
6use rustc_ast::Mutability;
7use rustc_data_structures::fx::{FxHashMap, FxIndexMap, IndexEntry};
8use rustc_errors::msg;
9use rustc_hir::def_id::{DefId, LocalDefId};
10use rustc_hir::{selfas hir, CRATE_HIR_ID, LangItem, find_attr};
11use rustc_middle::mir::AssertMessage;
12use rustc_middle::mir::interpret::ReportedErrorInfo;
13use rustc_middle::query::TyCtxtAt;
14use rustc_middle::ty::layout::{HasTypingEnv, TyAndLayout, ValidityRequirement};
15use rustc_middle::ty::{self, FieldInfo, Ty, TyCtxt};
16use rustc_middle::{bug, mir, span_bug};
17use rustc_span::{Span, Symbol, sym};
18use rustc_target::callconv::FnAbi;
19use tracing::debug;
2021use super::error::*;
22use crate::errors::{LongRunning, LongRunningWarn};
23use crate::interpret::{
24self, AllocId, AllocInit, AllocRange, ConstAllocation, CtfeProvenance, FnArg, Frame,
25GlobalAlloc, ImmTy, InterpCx, InterpResult, OpTy, PlaceTy, Pointer, RangeSet, Scalar,
26compile_time_machine, ensure_monomorphic_enough, err_inval, interp_ok, throw_exhaust,
27throw_inval, throw_ub, throw_ub_custom, throw_unsup, throw_unsup_format,
28type_implements_dyn_trait,
29};
3031/// When hitting this many interpreted terminators we emit a deny by default lint
32/// that notfies the user that their constant takes a long time to evaluate. If that's
33/// what they intended, they can just allow the lint.
34const LINT_TERMINATOR_LIMIT: usize = 2_000_000;
35/// The limit used by `-Z tiny-const-eval-limit`. This smaller limit is useful for internal
36/// tests not needing to run 30s or more to show some behaviour.
37const TINY_LINT_TERMINATOR_LIMIT: usize = 20;
38/// After this many interpreted terminators, we start emitting progress indicators at every
39/// power of two of interpreted terminators.
40const PROGRESS_INDICATOR_START: usize = 4_000_000;
4142/// Extra machine state for CTFE, and the Machine instance.
43//
44// Should be public because out-of-tree rustc consumers need this
45// if they want to interact with constant values.
46pub struct CompileTimeMachine<'tcx> {
47/// The number of terminators that have been evaluated.
48 ///
49 /// This is used to produce lints informing the user that the compiler is not stuck.
50 /// Set to `usize::MAX` to never report anything.
51pub(super) num_evaluated_steps: usize,
5253/// The virtual call stack.
54pub(super) stack: Vec<Frame<'tcx>>,
5556/// Pattern matching on consts with references would be unsound if those references
57 /// could point to anything mutable. Therefore, when evaluating consts and when constructing valtrees,
58 /// we ensure that only immutable global memory can be accessed.
59pub(super) can_access_mut_global: CanAccessMutGlobal,
6061/// Whether to check alignment during evaluation.
62pub(super) check_alignment: CheckAlignment,
6364/// If `Some`, we are evaluating the initializer of the static with the given `LocalDefId`,
65 /// storing the result in the given `AllocId`.
66 /// Used to prevent accesses to a static's base allocation, as that may allow for self-initialization loops.
67pub(crate) static_root_ids: Option<(AllocId, LocalDefId)>,
6869/// A cache of "data range" computations for unions (i.e., the offsets of non-padding bytes).
70union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
71}
7273#[derive(#[automatically_derived]
impl ::core::marker::Copy for CheckAlignment { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CheckAlignment {
#[inline]
fn clone(&self) -> CheckAlignment { *self }
}Clone)]
74pub enum CheckAlignment {
75/// Ignore all alignment requirements.
76 /// This is mainly used in interning.
77No,
78/// Hard error when dereferencing a misaligned pointer.
79Error,
80}
8182#[derive(#[automatically_derived]
impl ::core::marker::Copy for CanAccessMutGlobal { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CanAccessMutGlobal {
#[inline]
fn clone(&self) -> CanAccessMutGlobal { *self }
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for CanAccessMutGlobal {
#[inline]
fn eq(&self, other: &CanAccessMutGlobal) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq)]
83pub(crate) enum CanAccessMutGlobal {
84 No,
85 Yes,
86}
8788impl From<bool> for CanAccessMutGlobal {
89fn from(value: bool) -> Self {
90if value { Self::Yes } else { Self::No }
91 }
92}
9394impl<'tcx> CompileTimeMachine<'tcx> {
95pub(crate) fn new(
96 can_access_mut_global: CanAccessMutGlobal,
97 check_alignment: CheckAlignment,
98 ) -> Self {
99CompileTimeMachine {
100 num_evaluated_steps: 0,
101 stack: Vec::new(),
102can_access_mut_global,
103check_alignment,
104 static_root_ids: None,
105 union_data_ranges: FxHashMap::default(),
106 }
107 }
108}
109110impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxIndexMap<K, V> {
111#[inline(always)]
112fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool113where
114K: Borrow<Q>,
115 {
116FxIndexMap::contains_key(self, k)
117 }
118119#[inline(always)]
120fn contains_key_ref<Q: ?Sized + Hash + Eq>(&self, k: &Q) -> bool121where
122K: Borrow<Q>,
123 {
124FxIndexMap::contains_key(self, k)
125 }
126127#[inline(always)]
128fn insert(&mut self, k: K, v: V) -> Option<V> {
129FxIndexMap::insert(self, k, v)
130 }
131132#[inline(always)]
133fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
134where
135K: Borrow<Q>,
136 {
137// FIXME(#120456) - is `swap_remove` correct?
138FxIndexMap::swap_remove(self, k)
139 }
140141#[inline(always)]
142fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
143self.iter().filter_map(move |(k, v)| f(k, v)).collect()
144 }
145146#[inline(always)]
147fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
148match self.get(&k) {
149Some(v) => Ok(v),
150None => {
151vacant()?;
152::rustc_middle::util::bug::bug_fmt(format_args!("The CTFE machine shouldn\'t ever need to extend the alloc_map when reading"))bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")153 }
154 }
155 }
156157#[inline(always)]
158fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
159match self.entry(k) {
160IndexEntry::Occupied(e) => Ok(e.into_mut()),
161IndexEntry::Vacant(e) => {
162let v = vacant()?;
163Ok(e.insert(v))
164 }
165 }
166 }
167}
168169pub type CompileTimeInterpCx<'tcx> = InterpCx<'tcx, CompileTimeMachine<'tcx>>;
170171#[derive(#[automatically_derived]
impl ::core::fmt::Debug for MemoryKind {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
MemoryKind::Heap { was_made_global: __self_0 } =>
::core::fmt::Formatter::debug_struct_field1_finish(f, "Heap",
"was_made_global", &__self_0),
}
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for MemoryKind {
#[inline]
fn eq(&self, other: &MemoryKind) -> bool {
match (self, other) {
(MemoryKind::Heap { was_made_global: __self_0 },
MemoryKind::Heap { was_made_global: __arg1_0 }) =>
__self_0 == __arg1_0,
}
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for MemoryKind {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<bool>;
}
}Eq, #[automatically_derived]
impl ::core::marker::Copy for MemoryKind { }Copy, #[automatically_derived]
impl ::core::clone::Clone for MemoryKind {
#[inline]
fn clone(&self) -> MemoryKind {
let _: ::core::clone::AssertParamIsClone<bool>;
*self
}
}Clone)]
172pub enum MemoryKind {
173 Heap {
174/// Indicates whether `make_global` was called on this allocation.
175 /// If this is `true`, the allocation must be immutable.
176was_made_global: bool,
177 },
178}
179180impl fmt::Displayfor MemoryKind {
181fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
182match self {
183 MemoryKind::Heap { was_made_global } => {
184f.write_fmt(format_args!("heap allocation{0}",
if *was_made_global { " (made global)" } else { "" }))write!(f, "heap allocation{}", if *was_made_global { " (made global)" } else { "" })185 }
186 }
187 }
188}
189190impl interpret::MayLeakfor MemoryKind {
191#[inline(always)]
192fn may_leak(self) -> bool {
193match self {
194 MemoryKind::Heap { was_made_global } => was_made_global,
195 }
196 }
197}
198199impl interpret::MayLeakfor ! {
200#[inline(always)]
201fn may_leak(self) -> bool {
202// `self` is uninhabited
203self204 }
205}
206207impl<'tcx> CompileTimeInterpCx<'tcx> {
208fn location_triple_for_span(&self, span: Span) -> (Symbol, u32, u32) {
209let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
210let caller = self.tcx.sess.source_map().lookup_char_pos(topmost.lo());
211212use rustc_span::RemapPathScopeComponents;
213 (
214Symbol::intern(
215&caller.file.name.display(RemapPathScopeComponents::DIAGNOSTICS).to_string_lossy(),
216 ),
217u32::try_from(caller.line).unwrap(),
218u32::try_from(caller.col_display).unwrap().checked_add(1).unwrap(),
219 )
220 }
221222/// "Intercept" a function call, because we have something special to do for it.
223 /// All `#[rustc_do_not_const_check]` functions MUST be hooked here.
224 /// If this returns `Some` function, which may be `instance` or a different function with
225 /// compatible arguments, then evaluation should continue with that function.
226 /// If this returns `None`, the function call has been handled and the function has returned.
227fn hook_special_const_fn(
228&mut self,
229 instance: ty::Instance<'tcx>,
230 args: &[FnArg<'tcx>],
231 _dest: &PlaceTy<'tcx>,
232 _ret: Option<mir::BasicBlock>,
233 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
234let def_id = instance.def_id();
235236if self.tcx.is_lang_item(def_id, LangItem::PanicDisplay)
237 || self.tcx.is_lang_item(def_id, LangItem::BeginPanic)
238 {
239let args = Self::copy_fn_args(args);
240// &str or &&str
241if !(args.len() == 1) {
::core::panicking::panic("assertion failed: args.len() == 1")
};assert!(args.len() == 1);
242243let mut msg_place = self.deref_pointer(&args[0])?;
244while msg_place.layout.ty.is_ref() {
245 msg_place = self.deref_pointer(&msg_place)?;
246 }
247248let msg = Symbol::intern(self.read_str(&msg_place)?);
249let span = self.find_closest_untracked_caller_location();
250let (file, line, col) = self.location_triple_for_span(span);
251return Err(ConstEvalErrKind::Panic { msg, file, line, col }).into();
252 } else if self.tcx.is_lang_item(def_id, LangItem::PanicFmt) {
253// For panic_fmt, call const_panic_fmt instead.
254let const_def_id = self.tcx.require_lang_item(LangItem::ConstPanicFmt, self.tcx.span);
255let new_instance = ty::Instance::expect_resolve(
256*self.tcx,
257self.typing_env(),
258const_def_id,
259instance.args,
260self.cur_span(),
261 );
262263return interp_ok(Some(new_instance));
264 }
265interp_ok(Some(instance))
266 }
267268/// See documentation on the `ptr_guaranteed_cmp` intrinsic.
269 /// Returns `2` if the result is unknown.
270 /// Returns `1` if the pointers are guaranteed equal.
271 /// Returns `0` if the pointers are guaranteed inequal.
272 ///
273 /// Note that this intrinsic is exposed on stable for comparison with null. In other words, any
274 /// change to this function that affects comparison with null is insta-stable!
275fn guaranteed_cmp(&mut self, a: Scalar, b: Scalar) -> InterpResult<'tcx, u8> {
276interp_ok(match (a, b) {
277// Comparisons between integers are always known.
278(Scalar::Int(a), Scalar::Int(b)) => (a == b) as u8,
279// Comparing a pointer `ptr` with an integer `int` is equivalent to comparing
280 // `ptr-int` with null, so we can reduce this case to a `scalar_may_be_null` test.
281(Scalar::Int(int), Scalar::Ptr(ptr, _)) | (Scalar::Ptr(ptr, _), Scalar::Int(int)) => {
282let int = int.to_target_usize(*self.tcx);
283// The `wrapping_neg` here may produce a value that is not
284 // a valid target usize any more... but `wrapping_offset` handles that correctly.
285let offset_ptr = ptr.wrapping_offset(Size::from_bytes(int.wrapping_neg()), self);
286if !self.scalar_may_be_null(Scalar::from_pointer(offset_ptr, self))? {
287// `ptr.wrapping_sub(int)` is definitely not equal to `0`, so `ptr != int`
2880
289} else {
290// `ptr.wrapping_sub(int)` could be equal to `0`, but might not be,
291 // so we cannot know for sure if `ptr == int` or not
2922
293}
294 }
295 (Scalar::Ptr(a, _), Scalar::Ptr(b, _)) => {
296let (a_prov, a_offset) = a.prov_and_relative_offset();
297let (b_prov, b_offset) = b.prov_and_relative_offset();
298let a_allocid = a_prov.alloc_id();
299let b_allocid = b_prov.alloc_id();
300let a_info = self.get_alloc_info(a_allocid);
301let b_info = self.get_alloc_info(b_allocid);
302303// Check if the pointers cannot be equal due to alignment
304if a_info.align > Align::ONE && b_info.align > Align::ONE {
305let min_align = Ord::min(a_info.align.bytes(), b_info.align.bytes());
306let a_residue = a_offset.bytes() % min_align;
307let b_residue = b_offset.bytes() % min_align;
308if a_residue != b_residue {
309// If the two pointers have a different residue modulo their
310 // common alignment, they cannot be equal.
311return interp_ok(0);
312 }
313// The pointers have the same residue modulo their common alignment,
314 // so they could be equal. Try the other checks.
315}
316317if let (Some(GlobalAlloc::Static(a_did)), Some(GlobalAlloc::Static(b_did))) = (
318self.tcx.try_get_global_alloc(a_allocid),
319self.tcx.try_get_global_alloc(b_allocid),
320 ) {
321if a_allocid == b_allocid {
322if true {
match (&a_did, &b_did) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val,
::core::option::Option::Some(format_args!("different static item DefIds had same AllocId? {0:?} == {1:?}, {2:?} != {3:?}",
a_allocid, b_allocid, a_did, b_did)));
}
}
};
};debug_assert_eq!(
323 a_did, b_did,
324"different static item DefIds had same AllocId? {a_allocid:?} == {b_allocid:?}, {a_did:?} != {b_did:?}"
325);
326// Comparing two pointers into the same static. As per
327 // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.intro
328 // a static cannot be duplicated, so if two pointers are into the same
329 // static, they are equal if and only if their offsets are equal.
330(a_offset == b_offset) as u8331 } else {
332if true {
match (&(a_did), &(b_did)) {
(left_val, right_val) => {
if *left_val == *right_val {
let kind = ::core::panicking::AssertKind::Ne;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val,
::core::option::Option::Some(format_args!("same static item DefId had two different AllocIds? {0:?} != {1:?}, {2:?} == {3:?}",
a_allocid, b_allocid, a_did, b_did)));
}
}
};
};debug_assert_ne!(
333 a_did, b_did,
334"same static item DefId had two different AllocIds? {a_allocid:?} != {b_allocid:?}, {a_did:?} == {b_did:?}"
335);
336// Comparing two pointers into the different statics.
337 // We can never determine for sure that two pointers into different statics
338 // are *equal*, but we can know that they are *inequal* if they are both
339 // strictly in-bounds (i.e. in-bounds and not one-past-the-end) of
340 // their respective static, as different non-zero-sized statics cannot
341 // overlap or be deduplicated as per
342 // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.intro
343 // (non-deduplication), and
344 // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness
345 // (non-overlapping).
346if a_offset < a_info.size && b_offset < b_info.size {
3470
348} else {
349// Otherwise, conservatively say we don't know.
350 // There are some cases we could still return `0` for, e.g.
351 // if the pointers being equal would require their statics to overlap
352 // one or more bytes, but for simplicity we currently only check
353 // strictly in-bounds pointers.
3542
355}
356 }
357 } else {
358// All other cases we conservatively say we don't know.
359 //
360 // For comparing statics to non-statics, as per https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness
361 // immutable statics can overlap with other kinds of allocations sometimes.
362 //
363 // FIXME: We could be more decisive for (non-zero-sized) mutable statics,
364 // which cannot overlap with other kinds of allocations.
365 //
366 // Functions and vtables can be duplicated and deduplicated, so we
367 // cannot be sure of runtime equality of pointers to the same one, or the
368 // runtime inequality of pointers to different ones (see e.g. #73722),
369 // so comparing those should return 2, whether they are the same allocation
370 // or not.
371 //
372 // `GlobalAlloc::TypeId` exists mostly to prevent consteval from comparing
373 // `TypeId`s, so comparing those should always return 2, whether they are the
374 // same allocation or not.
375 //
376 // FIXME: We could revisit comparing pointers into the same
377 // `GlobalAlloc::Memory` once https://github.com/rust-lang/rust/issues/128775
378 // is fixed (but they can be deduplicated, so comparing pointers into different
379 // ones should return 2).
3802
381}
382 }
383 })
384 }
385}
386387impl<'tcx> CompileTimeMachine<'tcx> {
388#[inline(always)]
389/// Find the first stack frame that is within the current crate, if any.
390 /// Otherwise, return the crate's HirId
391pub fn best_lint_scope(&self, tcx: TyCtxt<'tcx>) -> hir::HirId {
392self.stack.iter().find_map(|frame| frame.lint_root(tcx)).unwrap_or(CRATE_HIR_ID)
393 }
394}
395396impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> {
397CtfeProvenance
bool
!
crate::const_eval::MemoryKind
rustc_data_structures::fx::FxIndexMap<AllocId,
(MemoryKind<Self::MemoryKind>, Allocation)>
Option<Self::MemoryKind>
None
()
()
Box<[u8]>
&InterpCx<'tcx, Self>
_ecx
bool
false;
&mut InterpCx<'tcx, Self>
_ecx
mir::UnwindTerminateReason
_reason
InterpResult<'tcx>
{
::core::panicking::panic_fmt(format_args!("internal error: entered unreachable code: {0}",
format_args!("unwinding cannot happen during compile-time evaluation")));
}
&InterpCx<'tcx, Self>
_ecx
ty::Instance<'tcx>
_instance
InterpResult<'tcx>
interp_ok(());
&mut InterpCx<'tcx, Self>
_ecx
!
fn_val
&FnAbi<'tcx, Ty<'tcx>>
_abi
&[FnArg<'tcx>]
_args
&PlaceTy<'tcx, Self::Provenance>
_destination
Option<mir::BasicBlock>
_target
mir::UnwindAction
_unwind
InterpResult<'tcx>
match fn_val {}
&InterpCx<'tcx, Self>
_ecx
bool
true;
&InterpCx<'tcx, Self>
_ecx
AllocId
_id
&'b Allocation
alloc
InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance>>>
interp_ok(Cow::Borrowed(alloc));
&InterpCx<'tcx, Self>
_ecx
AllocId
_id
MemoryKind<Self::MemoryKind>
_kind
Size
_size
Align
_align
InterpResult<'tcx, Self::AllocExtra>
interp_ok(());
&InterpCx<'tcx, Self>
ecx
DefId
def_id
InterpResult<'tcx, Pointer>
interp_ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id).into(),
Size::ZERO));
&InterpCx<'tcx, Self>
_ecx
Pointer<CtfeProvenance>
ptr
Option<MemoryKind<Self::MemoryKind>>
_kind
InterpResult<'tcx, Pointer<CtfeProvenance>>
interp_ok(ptr);
&InterpCx<'tcx, Self>
_ecx
u64
addr
InterpResult<'tcx, Pointer<Option<CtfeProvenance>>>
interp_ok(Pointer::without_provenance(addr));
&InterpCx<'tcx, Self>
_ecx
Pointer<CtfeProvenance>
ptr
i64
_size
Option<(AllocId, Size, Self::ProvenanceExtra)>
let (prov, offset) = ptr.prov_and_relative_offset();
Some((prov.alloc_id(), offset, prov.immutable()));
&InterpCx<'tcx, Self>
_ecx
Option<ty::Instance<'tcx>>
_instance
usize
CTFE_ALLOC_SALT;compile_time_machine!(<'tcx>);
398399const PANIC_ON_ALLOC_FAIL: bool = false; // will be raised as a proper error
400401#[inline(always)]
402fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool {
403#[allow(non_exhaustive_omitted_patterns)] match ecx.machine.check_alignment {
CheckAlignment::Error => true,
_ => false,
}matches!(ecx.machine.check_alignment, CheckAlignment::Error)404 }
405406#[inline(always)]
407fn enforce_validity(ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool {
408ecx.tcx.sess.opts.unstable_opts.extra_const_ub_checks || layout.is_uninhabited()
409 }
410411fn load_mir(
412 ecx: &InterpCx<'tcx, Self>,
413 instance: ty::InstanceKind<'tcx>,
414 ) -> &'tcx mir::Body<'tcx> {
415match instance {
416 ty::InstanceKind::Item(def) => ecx.tcx.mir_for_ctfe(def),
417_ => ecx.tcx.instance_mir(instance),
418 }
419 }
420421fn find_mir_or_eval_fn(
422 ecx: &mut InterpCx<'tcx, Self>,
423 orig_instance: ty::Instance<'tcx>,
424 _abi: &FnAbi<'tcx, Ty<'tcx>>,
425 args: &[FnArg<'tcx>],
426 dest: &PlaceTy<'tcx>,
427 ret: Option<mir::BasicBlock>,
428 _unwind: mir::UnwindAction, // unwinding is not supported in consts
429) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
430{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/const_eval/machine.rs:430",
"rustc_const_eval::const_eval::machine",
::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/const_eval/machine.rs"),
::tracing_core::__macro_support::Option::Some(430u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::const_eval::machine"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("find_mir_or_eval_fn: {0:?}",
orig_instance) as &dyn Value))])
});
} else { ; }
};debug!("find_mir_or_eval_fn: {:?}", orig_instance);
431432// Replace some functions.
433let Some(instance) = ecx.hook_special_const_fn(orig_instance, args, dest, ret)? else {
434// Call has already been handled.
435return interp_ok(None);
436 };
437438// Only check non-glue functions
439if let ty::InstanceKind::Item(def) = instance.def {
440// Execution might have wandered off into other crates, so we cannot do a stability-
441 // sensitive check here. But we can at least rule out functions that are not const at
442 // all. That said, we have to allow calling functions inside a `const trait`. These
443 // *are* const-checked!
444if !ecx.tcx.is_const_fn(def) || {
#[allow(deprecated)]
{
{
'done:
{
for i in ecx.tcx.get_all_attrs(def) {
#[allow(unused_imports)]
use rustc_hir::attrs::AttributeKind::*;
let i: &rustc_hir::Attribute = i;
match i {
rustc_hir::Attribute::Parsed(RustcDoNotConstCheck) => {
break 'done Some(());
}
rustc_hir::Attribute::Unparsed(..) =>
{}
#[deny(unreachable_patterns)]
_ => {}
}
}
None
}
}
}
}.is_some()find_attr!(ecx.tcx, def, RustcDoNotConstCheck) {
445// We certainly do *not* want to actually call the fn
446 // though, so be sure we return here.
447do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::Unsupported(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("calling non-const function `{0}`",
instance))
})))throw_unsup_format!("calling non-const function `{}`", instance)448 }
449 }
450451// This is a const fn. Call it.
452 // In case of replacement, we return the *original* instance to make backtraces work out
453 // (and we hope this does not confuse the FnAbi checks too much).
454interp_ok(Some((ecx.load_mir(instance.def, None)?, orig_instance)))
455 }
456457fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
458let msg = Symbol::intern(msg);
459let span = ecx.find_closest_untracked_caller_location();
460let (file, line, col) = ecx.location_triple_for_span(span);
461Err(ConstEvalErrKind::Panic { msg, file, line, col }).into()
462 }
463464fn call_intrinsic(
465 ecx: &mut InterpCx<'tcx, Self>,
466 instance: ty::Instance<'tcx>,
467 args: &[OpTy<'tcx>],
468 dest: &PlaceTy<'tcx, Self::Provenance>,
469 target: Option<mir::BasicBlock>,
470 _unwind: mir::UnwindAction,
471 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
472// Shared intrinsics.
473if ecx.eval_intrinsic(instance, args, dest, target)? {
474return interp_ok(None);
475 }
476let intrinsic_name = ecx.tcx.item_name(instance.def_id());
477478// CTFE-specific intrinsics.
479match intrinsic_name {
480 sym::ptr_guaranteed_cmp => {
481let a = ecx.read_scalar(&args[0])?;
482let b = ecx.read_scalar(&args[1])?;
483let cmp = ecx.guaranteed_cmp(a, b)?;
484ecx.write_scalar(Scalar::from_u8(cmp), dest)?;
485 }
486 sym::const_allocate => {
487let size = ecx.read_scalar(&args[0])?.to_target_usize(ecx)?;
488let align = ecx.read_scalar(&args[1])?.to_target_usize(ecx)?;
489490let align = match Align::from_bytes(align) {
491Ok(a) => a,
492Err(err) => do yeet {
let (name, err_kind, align) =
("const_allocate", err.diag_ident(), err.align());
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: ||
rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed("invalid align passed to `{$name}`: {$align} is {$err_kind ->\n [not_power_of_two] not a power of 2\n [too_large] too large\n *[other] {\"\"}\n }")),
add_args: Box::new(move |mut set_arg|
{
set_arg("name".into(),
rustc_errors::IntoDiagArg::into_diag_arg(name, &mut None));
set_arg("err_kind".into(),
rustc_errors::IntoDiagArg::into_diag_arg(err_kind,
&mut None));
set_arg("align".into(),
rustc_errors::IntoDiagArg::into_diag_arg(align, &mut None));
}),
}))
}throw_ub_custom!(
493msg!(
494"invalid align passed to `{$name}`: {$align} is {$err_kind ->
495 [not_power_of_two] not a power of 2
496 [too_large] too large
497 *[other] {\"\"}
498 }"
499),
500 name = "const_allocate",
501 err_kind = err.diag_ident(),
502 align = err.align()
503 ),
504 };
505506let ptr = ecx.allocate_ptr(
507Size::from_bytes(size),
508align,
509 interpret::MemoryKind::Machine(MemoryKind::Heap { was_made_global: false }),
510 AllocInit::Uninit,
511 )?;
512ecx.write_pointer(ptr, dest)?;
513 }
514 sym::const_deallocate => {
515let ptr = ecx.read_pointer(&args[0])?;
516let size = ecx.read_scalar(&args[1])?.to_target_usize(ecx)?;
517let align = ecx.read_scalar(&args[2])?.to_target_usize(ecx)?;
518519let size = Size::from_bytes(size);
520let align = match Align::from_bytes(align) {
521Ok(a) => a,
522Err(err) => do yeet {
let (name, err_kind, align) =
("const_deallocate", err.diag_ident(), err.align());
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::Custom(::rustc_middle::error::CustomSubdiagnostic {
msg: ||
rustc_errors::DiagMessage::Inline(std::borrow::Cow::Borrowed("invalid align passed to `{$name}`: {$align} is {$err_kind ->\n [not_power_of_two] not a power of 2\n [too_large] too large\n *[other] {\"\"}\n }")),
add_args: Box::new(move |mut set_arg|
{
set_arg("name".into(),
rustc_errors::IntoDiagArg::into_diag_arg(name, &mut None));
set_arg("err_kind".into(),
rustc_errors::IntoDiagArg::into_diag_arg(err_kind,
&mut None));
set_arg("align".into(),
rustc_errors::IntoDiagArg::into_diag_arg(align, &mut None));
}),
}))
}throw_ub_custom!(
523msg!(
524"invalid align passed to `{$name}`: {$align} is {$err_kind ->
525 [not_power_of_two] not a power of 2
526 [too_large] too large
527 *[other] {\"\"}
528 }"
529),
530 name = "const_deallocate",
531 err_kind = err.diag_ident(),
532 align = err.align()
533 ),
534 };
535536// If an allocation is created in an another const,
537 // we don't deallocate it.
538let (alloc_id, _, _) = ecx.ptr_get_alloc_id(ptr, 0)?;
539let is_allocated_in_another_const = #[allow(non_exhaustive_omitted_patterns)] match ecx.tcx.try_get_global_alloc(alloc_id)
{
Some(interpret::GlobalAlloc::Memory(_)) => true,
_ => false,
}matches!(
540 ecx.tcx.try_get_global_alloc(alloc_id),
541Some(interpret::GlobalAlloc::Memory(_))
542 );
543544if !is_allocated_in_another_const {
545ecx.deallocate_ptr(
546ptr,
547Some((size, align)),
548 interpret::MemoryKind::Machine(MemoryKind::Heap { was_made_global: false }),
549 )?;
550 }
551 }
552553 sym::const_make_global => {
554let ptr = ecx.read_pointer(&args[0])?;
555ecx.make_const_heap_ptr_global(ptr)?;
556ecx.write_pointer(ptr, dest)?;
557 }
558559// The intrinsic represents whether the value is known to the optimizer (LLVM).
560 // We're not doing any optimizations here, so there is no optimizer that could know the value.
561 // (We know the value here in the machine of course, but this is the runtime of that code,
562 // not the optimization stage.)
563sym::is_val_statically_known => ecx.write_scalar(Scalar::from_bool(false), dest)?,
564565// We handle these here since Miri does not want to have them.
566sym::assert_inhabited567 | sym::assert_zero_valid568 | sym::assert_mem_uninitialized_valid => {
569let ty = instance.args.type_at(0);
570let requirement = ValidityRequirement::from_intrinsic(intrinsic_name).unwrap();
571572let should_panic = !ecx573 .tcx
574 .check_validity_requirement((requirement, ecx.typing_env().as_query_input(ty)))
575 .map_err(|_| ::rustc_middle::mir::interpret::InterpErrorKind::InvalidProgram(::rustc_middle::mir::interpret::InvalidProgramInfo::TooGeneric)err_inval!(TooGeneric))?;
576577if should_panic {
578let layout = ecx.layout_of(ty)?;
579580let msg = match requirement {
581// For *all* intrinsics we first check `is_uninhabited` to give a more specific
582 // error message.
583_ if layout.is_uninhabited() => ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("aborted execution: attempted to instantiate uninhabited type `{0}`",
ty))
})format!(
584"aborted execution: attempted to instantiate uninhabited type `{ty}`"
585),
586 ValidityRequirement::Inhabited => ::rustc_middle::util::bug::bug_fmt(format_args!("handled earlier"))bug!("handled earlier"),
587 ValidityRequirement::Zero => ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("aborted execution: attempted to zero-initialize type `{0}`, which is invalid",
ty))
})format!(
588"aborted execution: attempted to zero-initialize type `{ty}`, which is invalid"
589),
590 ValidityRequirement::UninitMitigated0x01Fill => ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("aborted execution: attempted to leave type `{0}` uninitialized, which is invalid",
ty))
})format!(
591"aborted execution: attempted to leave type `{ty}` uninitialized, which is invalid"
592),
593 ValidityRequirement::Uninit => ::rustc_middle::util::bug::bug_fmt(format_args!("assert_uninit_valid doesn\'t exist"))bug!("assert_uninit_valid doesn't exist"),
594 };
595596Self::panic_nounwind(ecx, &msg)?;
597// Skip the `return_to_block` at the end (we panicked, we do not return).
598return interp_ok(None);
599 }
600 }
601602 sym::type_id_vtable => {
603let tp_ty = ecx.read_type_id(&args[0])?;
604let result_ty = ecx.read_type_id(&args[1])?;
605606let (implements_trait, preds) = type_implements_dyn_trait(ecx, tp_ty, result_ty)?;
607608if implements_trait {
609let vtable_ptr = ecx.get_vtable_ptr(tp_ty, preds)?;
610// Writing a non-null pointer into an `Option<NonNull>` will automatically make it `Some`.
611ecx.write_pointer(vtable_ptr, dest)?;
612 } else {
613// Write `None`
614ecx.write_discriminant(FIRST_VARIANT, dest)?;
615 }
616 }
617618 sym::type_of => {
619let ty = ecx.read_type_id(&args[0])?;
620ecx.write_type_info(ty, dest)?;
621 }
622623 sym::field_offset => {
624let frt_ty = instance.args.type_at(0);
625ensure_monomorphic_enough(ecx.tcx.tcx, frt_ty)?;
626627let (ty, variant, field) = if let ty::Adt(def, args) = frt_ty.kind()
628 && let Some(FieldInfo { base, variant_idx, field_idx, .. }) =
629def.field_representing_type_info(ecx.tcx.tcx, args)
630 {
631 (base, variant_idx, field_idx)
632 } else {
633::rustc_middle::util::bug::span_bug_fmt(ecx.cur_span(),
format_args!("expected field representing type, got {0}", frt_ty))span_bug!(ecx.cur_span(), "expected field representing type, got {frt_ty}")634 };
635let layout = ecx.layout_of(ty)?;
636let cx = ty::layout::LayoutCx::new(ecx.tcx.tcx, ecx.typing_env());
637638let layout = layout.for_variant(&cx, variant);
639let offset = layout.fields.offset(field.index()).bytes();
640641ecx.write_scalar(Scalar::from_target_usize(offset, ecx), dest)?;
642 }
643644_ => {
645// We haven't handled the intrinsic, let's see if we can use a fallback body.
646if ecx.tcx.intrinsic(instance.def_id()).unwrap().must_be_overridden {
647do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::Unsupported(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("intrinsic `{0}` is not supported at compile-time",
intrinsic_name))
})));throw_unsup_format!(
648"intrinsic `{intrinsic_name}` is not supported at compile-time"
649);
650 }
651return interp_ok(Some(ty::Instance {
652 def: ty::InstanceKind::Item(instance.def_id()),
653 args: instance.args,
654 }));
655 }
656 }
657658// Intrinsic is done, jump to next block.
659ecx.return_to_block(target)?;
660interp_ok(None)
661 }
662663fn assert_panic(
664 ecx: &mut InterpCx<'tcx, Self>,
665 msg: &AssertMessage<'tcx>,
666 _unwind: mir::UnwindAction,
667 ) -> InterpResult<'tcx> {
668use rustc_middle::mir::AssertKind::*;
669// Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
670let eval_to_int =
671 |op| ecx.read_immediate(&ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
672let err = match msg {
673BoundsCheck { len, index } => {
674let len = eval_to_int(len)?;
675let index = eval_to_int(index)?;
676BoundsCheck { len, index }
677 }
678Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?),
679OverflowNeg(op) => OverflowNeg(eval_to_int(op)?),
680DivisionByZero(op) => DivisionByZero(eval_to_int(op)?),
681RemainderByZero(op) => RemainderByZero(eval_to_int(op)?),
682ResumedAfterReturn(coroutine_kind) => ResumedAfterReturn(*coroutine_kind),
683ResumedAfterPanic(coroutine_kind) => ResumedAfterPanic(*coroutine_kind),
684ResumedAfterDrop(coroutine_kind) => ResumedAfterDrop(*coroutine_kind),
685MisalignedPointerDereference { required, found } => MisalignedPointerDereference {
686 required: eval_to_int(required)?,
687 found: eval_to_int(found)?,
688 },
689NullPointerDereference => NullPointerDereference,
690InvalidEnumConstruction(source) => InvalidEnumConstruction(eval_to_int(source)?),
691 };
692Err(ConstEvalErrKind::AssertFailure(err)).into()
693 }
694695#[inline(always)]
696fn runtime_checks(
697 _ecx: &InterpCx<'tcx, Self>,
698 _r: mir::RuntimeChecks,
699 ) -> InterpResult<'tcx, bool> {
700// We can't look at `tcx.sess` here as that can differ across crates, which can lead to
701 // unsound differences in evaluating the same constant at different instantiation sites.
702interp_ok(true)
703 }
704705fn binary_ptr_op(
706 _ecx: &InterpCx<'tcx, Self>,
707 _bin_op: mir::BinOp,
708 _left: &ImmTy<'tcx>,
709 _right: &ImmTy<'tcx>,
710 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
711do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::Unsupported(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("pointer arithmetic or comparison is not supported at compile-time"))
})));throw_unsup_format!("pointer arithmetic or comparison is not supported at compile-time");
712 }
713714fn increment_const_eval_counter(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
715// The step limit has already been hit in a previous call to `increment_const_eval_counter`.
716717if let Some(new_steps) = ecx.machine.num_evaluated_steps.checked_add(1) {
718let (limit, start) = if ecx.tcx.sess.opts.unstable_opts.tiny_const_eval_limit {
719 (TINY_LINT_TERMINATOR_LIMIT, TINY_LINT_TERMINATOR_LIMIT)
720 } else {
721 (LINT_TERMINATOR_LIMIT, PROGRESS_INDICATOR_START)
722 };
723724ecx.machine.num_evaluated_steps = new_steps;
725// By default, we have a *deny* lint kicking in after some time
726 // to ensure `loop {}` doesn't just go forever.
727 // In case that lint got reduced, in particular for `--cap-lint` situations, we also
728 // have a hard warning shown every now and then for really long executions.
729if new_steps == limit {
730// By default, we stop after a million steps, but the user can disable this lint
731 // to be able to run until the heat death of the universe or power loss, whichever
732 // comes first.
733let hir_id = ecx.machine.best_lint_scope(*ecx.tcx);
734let is_error = ecx735 .tcx
736 .lint_level_at_node(
737 rustc_session::lint::builtin::LONG_RUNNING_CONST_EVAL,
738hir_id,
739 )
740 .level
741 .is_error();
742let span = ecx.cur_span();
743ecx.tcx.emit_node_span_lint(
744 rustc_session::lint::builtin::LONG_RUNNING_CONST_EVAL,
745hir_id,
746span,
747LongRunning { item_span: ecx.tcx.span },
748 );
749// If this was a hard error, don't bother continuing evaluation.
750if is_error {
751let guard = ecx752 .tcx
753 .dcx()
754 .span_delayed_bug(span, "The deny lint should have already errored");
755do yeet ::rustc_middle::mir::interpret::InterpErrorKind::InvalidProgram(::rustc_middle::mir::interpret::InvalidProgramInfo::AlreadyReported(ReportedErrorInfo::allowed_in_infallible(guard)));throw_inval!(AlreadyReported(ReportedErrorInfo::allowed_in_infallible(guard)));
756 }
757 } else if new_steps > start && new_steps.is_power_of_two() {
758// Only report after a certain number of terminators have been evaluated and the
759 // current number of evaluated terminators is a power of 2. The latter gives us a cheap
760 // way to implement exponential backoff.
761let span = ecx.cur_span();
762// We store a unique number in `force_duplicate` to evade `-Z deduplicate-diagnostics`.
763 // `new_steps` is guaranteed to be unique because `ecx.machine.num_evaluated_steps` is
764 // always increasing.
765ecx.tcx.dcx().emit_warn(LongRunningWarn {
766span,
767 item_span: ecx.tcx.span,
768 force_duplicate: new_steps,
769 });
770 }
771 }
772773interp_ok(())
774 }
775776#[inline(always)]
777fn expose_provenance(
778 _ecx: &InterpCx<'tcx, Self>,
779 _provenance: Self::Provenance,
780 ) -> InterpResult<'tcx> {
781// This is only reachable with -Zunleash-the-miri-inside-of-you.
782do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::Unsupported(::alloc::__export::must_use({
::alloc::fmt::format(format_args!("exposing pointers is not possible at compile-time"))
})))throw_unsup_format!("exposing pointers is not possible at compile-time")783 }
784785#[inline(always)]
786fn init_frame(
787 ecx: &mut InterpCx<'tcx, Self>,
788 frame: Frame<'tcx>,
789 ) -> InterpResult<'tcx, Frame<'tcx>> {
790// Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
791if !ecx.recursion_limit.value_within_limit(ecx.stack().len() + 1) {
792do yeet ::rustc_middle::mir::interpret::InterpErrorKind::ResourceExhaustion(::rustc_middle::mir::interpret::ResourceExhaustionInfo::StackFrameLimitReached)throw_exhaust!(StackFrameLimitReached)793 } else {
794interp_ok(frame)
795 }
796 }
797798#[inline(always)]
799fn stack<'a>(
800 ecx: &'a InterpCx<'tcx, Self>,
801 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
802&ecx.machine.stack
803 }
804805#[inline(always)]
806fn stack_mut<'a>(
807 ecx: &'a mut InterpCx<'tcx, Self>,
808 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
809&mut ecx.machine.stack
810 }
811812fn before_access_global(
813 _tcx: TyCtxtAt<'tcx>,
814 machine: &Self,
815 alloc_id: AllocId,
816 alloc: ConstAllocation<'tcx>,
817 _static_def_id: Option<DefId>,
818 is_write: bool,
819 ) -> InterpResult<'tcx> {
820let alloc = alloc.inner();
821if is_write {
822// Write access. These are never allowed, but we give a targeted error message.
823match alloc.mutability {
824 Mutability::Not => do yeet ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::WriteToReadOnly(alloc_id))throw_ub!(WriteToReadOnly(alloc_id)),
825 Mutability::Mut => Err(ConstEvalErrKind::ModifiedGlobal).into(),
826 }
827 } else {
828// Read access. These are usually allowed, with some exceptions.
829if machine.can_access_mut_global == CanAccessMutGlobal::Yes {
830// Machine configuration allows us read from anything (e.g., `static` initializer).
831interp_ok(())
832 } else if alloc.mutability == Mutability::Mut {
833// Machine configuration does not allow us to read statics (e.g., `const`
834 // initializer).
835Err(ConstEvalErrKind::ConstAccessesMutGlobal).into()
836 } else {
837// Immutable global, this read is fine.
838match (&alloc.mutability, &Mutability::Not) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(alloc.mutability, Mutability::Not);
839interp_ok(())
840 }
841 }
842 }
843844fn retag_ptr_value(
845 ecx: &mut InterpCx<'tcx, Self>,
846 _kind: mir::RetagKind,
847 val: &ImmTy<'tcx, CtfeProvenance>,
848 ) -> InterpResult<'tcx, ImmTy<'tcx, CtfeProvenance>> {
849// If it's a frozen shared reference that's not already immutable, potentially make it immutable.
850 // (Do nothing on `None` provenance, that cannot store immutability anyway.)
851if let ty::Ref(_, ty, mutbl) = val.layout.ty.kind()
852 && *mutbl == Mutability::Not853 && val854 .to_scalar_and_meta()
855 .0
856.to_pointer(ecx)?
857.provenance
858 .is_some_and(|p| !p.immutable())
859 {
860// That next check is expensive, that's why we have all the guards above.
861let is_immutable = ty.is_freeze(*ecx.tcx, ecx.typing_env());
862let place = ecx.ref_to_mplace(val)?;
863let new_place = if is_immutable {
864place.map_provenance(CtfeProvenance::as_immutable)
865 } else {
866// Even if it is not immutable, remember that it is a shared reference.
867 // This allows it to become part of the final value of the constant.
868 // (See <https://github.com/rust-lang/rust/pull/128543> for why we allow this
869 // even when there is interior mutability.)
870place.map_provenance(CtfeProvenance::as_shared_ref)
871 };
872interp_ok(ImmTy::from_immediate(new_place.to_ref(ecx), val.layout))
873 } else {
874interp_ok(val.clone())
875 }
876 }
877878fn before_memory_write(
879 _tcx: TyCtxtAt<'tcx>,
880 _machine: &mut Self,
881 _alloc_extra: &mut Self::AllocExtra,
882 _ptr: Pointer<Option<Self::Provenance>>,
883 (_alloc_id, immutable): (AllocId, bool),
884 range: AllocRange,
885 ) -> InterpResult<'tcx> {
886if range.size == Size::ZERO {
887// Nothing to check.
888return interp_ok(());
889 }
890// Reject writes through immutable pointers.
891if immutable {
892return Err(ConstEvalErrKind::WriteThroughImmutablePointer).into();
893 }
894// Everything else is fine.
895interp_ok(())
896 }
897898fn before_alloc_access(
899 tcx: TyCtxtAt<'tcx>,
900 machine: &Self,
901 alloc_id: AllocId,
902 ) -> InterpResult<'tcx> {
903if machine.stack.is_empty() {
904// Get out of the way for the final copy.
905return interp_ok(());
906 }
907// Check if this is the currently evaluated static.
908if Some(alloc_id) == machine.static_root_ids.map(|(id, _)| id) {
909return Err(ConstEvalErrKind::RecursiveStatic).into();
910 }
911// If this is another static, make sure we fire off the query to detect cycles.
912 // But only do that when checks for static recursion are enabled.
913if machine.static_root_ids.is_some() {
914if let Some(GlobalAlloc::Static(def_id)) = tcx.try_get_global_alloc(alloc_id) {
915if tcx.is_foreign_item(def_id) {
916do yeet ::rustc_middle::mir::interpret::InterpErrorKind::Unsupported(::rustc_middle::mir::interpret::UnsupportedOpInfo::ExternStatic(def_id));throw_unsup!(ExternStatic(def_id));
917 }
918tcx.eval_static_initializer(def_id)?;
919 }
920 }
921interp_ok(())
922 }
923924fn cached_union_data_range<'e>(
925 ecx: &'e mut InterpCx<'tcx, Self>,
926 ty: Ty<'tcx>,
927 compute_range: impl FnOnce() -> RangeSet,
928 ) -> Cow<'e, RangeSet> {
929if ecx.tcx.sess.opts.unstable_opts.extra_const_ub_checks {
930 Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
931 } else {
932// Don't bother caching, we're only doing one validation at the end anyway.
933Cow::Owned(compute_range())
934 }
935 }
936937fn get_default_alloc_params(&self) -> <Self::Bytes as mir::interpret::AllocBytes>::AllocParams {
938 }
939}
940941// Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
942// so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
943// at the bottom of this file.