Skip to main content

rustc_const_eval/interpret/
validity.rs

1//! Check the validity invariant of a given value, and tell the user
2//! where in the value it got violated.
3//! In const context, this goes even further and tries to approximate const safety.
4//! That's useful because it means other passes (e.g. promotion) can rely on `const`s
5//! to be const-safe.
6
7use std::borrow::Cow;
8use std::fmt::Write;
9use std::hash::Hash;
10use std::num::NonZero;
11
12use either::{Left, Right};
13use hir::def::DefKind;
14use rustc_abi::{
15    BackendRepr, FieldIdx, FieldsShape, Scalar as ScalarAbi, Size, VariantIdx, Variants,
16    WrappingRange,
17};
18use rustc_ast::Mutability;
19use rustc_data_structures::fx::FxHashSet;
20use rustc_hir as hir;
21use rustc_middle::bug;
22use rustc_middle::mir::interpret::ValidationErrorKind::{self, *};
23use rustc_middle::mir::interpret::{
24    ExpectedKind, InterpErrorKind, InvalidMetaKind, Misalignment, PointerKind, Provenance,
25    UnsupportedOpInfo, ValidationErrorInfo, alloc_range, interp_ok,
26};
27use rustc_middle::ty::layout::{LayoutCx, TyAndLayout};
28use rustc_middle::ty::{self, Ty};
29use rustc_span::{Symbol, sym};
30use tracing::trace;
31
32use super::machine::AllocMap;
33use super::{
34    AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
35    Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
36    format_interp_error,
37};
38use crate::enter_trace_span;
39
40// for the validation errors
41#[rustfmt::skip]
42use super::InterpErrorKind::UndefinedBehavior as Ub;
43use super::InterpErrorKind::Unsupported as Unsup;
44use super::UndefinedBehaviorInfo::*;
45use super::UnsupportedOpInfo::*;
46
47macro_rules! err_validation_failure {
48    ($where:expr, $kind: expr) => {{
49        let where_ = &$where;
50        let path = if !where_.is_empty() {
51            let mut path = String::new();
52            write_path(&mut path, where_);
53            Some(path)
54        } else {
55            None
56        };
57
58        err_ub!(ValidationError(ValidationErrorInfo { path, kind: $kind }))
59    }};
60}
61
62macro_rules! throw_validation_failure {
63    ($where:expr, $kind: expr) => {
64        do yeet err_validation_failure!($where, $kind)
65    };
66}
67
68/// If $e throws an error matching the pattern, throw a validation failure.
69/// Other errors are passed back to the caller, unchanged -- and if they reach the root of
70/// the visitor, we make sure only validation errors and `InvalidProgram` errors are left.
71/// This lets you use the patterns as a kind of validation list, asserting which errors
72/// can possibly happen:
73///
74/// ```ignore(illustrative)
75/// let v = try_validation!(some_fn(), some_path, {
76///     Foo | Bar | Baz => { "some failure" },
77/// });
78/// ```
79///
80/// The patterns must be of type `UndefinedBehaviorInfo`.
81/// An additional expected parameter can also be added to the failure message:
82///
83/// ```ignore(illustrative)
84/// let v = try_validation!(some_fn(), some_path, {
85///     Foo | Bar | Baz => { "some failure" } expected { "something that wasn't a failure" },
86/// });
87/// ```
88///
89/// An additional nicety is that both parameters actually take format args, so you can just write
90/// the format string in directly:
91///
92/// ```ignore(illustrative)
93/// let v = try_validation!(some_fn(), some_path, {
94///     Foo | Bar | Baz => { "{:?}", some_failure } expected { "{}", expected_value },
95/// });
96/// ```
97///
98macro_rules! try_validation {
99    ($e:expr, $where:expr,
100    $( $( $p:pat_param )|+ => $kind: expr ),+ $(,)?
101    ) => {{
102        $e.map_err_kind(|e| {
103            // We catch the error and turn it into a validation failure. We are okay with
104            // allocation here as this can only slow down builds that fail anyway.
105            match e {
106                $(
107                    $($p)|+ => {
108                        err_validation_failure!(
109                            $where,
110                            $kind
111                        )
112                    }
113                ),+,
114                e => e,
115            }
116        })?
117    }};
118}
119
120/// We want to show a nice path to the invalid field for diagnostics,
121/// but avoid string operations in the happy case where no error happens.
122/// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
123/// need to later print something for the user.
124#[derive(#[automatically_derived]
impl ::core::marker::Copy for PathElem { }Copy, #[automatically_derived]
impl ::core::clone::Clone for PathElem {
    #[inline]
    fn clone(&self) -> PathElem {
        let _: ::core::clone::AssertParamIsClone<Symbol>;
        let _: ::core::clone::AssertParamIsClone<VariantIdx>;
        let _: ::core::clone::AssertParamIsClone<usize>;
        *self
    }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for PathElem {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        match self {
            PathElem::Field(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f, "Field",
                    &__self_0),
            PathElem::Variant(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "Variant", &__self_0),
            PathElem::CoroutineState(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "CoroutineState", &__self_0),
            PathElem::CapturedVar(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "CapturedVar", &__self_0),
            PathElem::ArrayElem(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "ArrayElem", &__self_0),
            PathElem::TupleElem(__self_0) =>
                ::core::fmt::Formatter::debug_tuple_field1_finish(f,
                    "TupleElem", &__self_0),
            PathElem::Deref => ::core::fmt::Formatter::write_str(f, "Deref"),
            PathElem::EnumTag =>
                ::core::fmt::Formatter::write_str(f, "EnumTag"),
            PathElem::CoroutineTag =>
                ::core::fmt::Formatter::write_str(f, "CoroutineTag"),
            PathElem::DynDowncast =>
                ::core::fmt::Formatter::write_str(f, "DynDowncast"),
            PathElem::Vtable =>
                ::core::fmt::Formatter::write_str(f, "Vtable"),
        }
    }
}Debug)]
125pub enum PathElem {
126    Field(Symbol),
127    Variant(Symbol),
128    CoroutineState(VariantIdx),
129    CapturedVar(Symbol),
130    ArrayElem(usize),
131    TupleElem(usize),
132    Deref,
133    EnumTag,
134    CoroutineTag,
135    DynDowncast,
136    Vtable,
137}
138
139/// Extra things to check for during validation of CTFE results.
140#[derive(#[automatically_derived]
impl ::core::marker::Copy for CtfeValidationMode { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CtfeValidationMode {
    #[inline]
    fn clone(&self) -> CtfeValidationMode {
        let _: ::core::clone::AssertParamIsClone<Mutability>;
        let _: ::core::clone::AssertParamIsClone<bool>;
        *self
    }
}Clone)]
141pub enum CtfeValidationMode {
142    /// Validation of a `static`
143    Static { mutbl: Mutability },
144    /// Validation of a promoted.
145    Promoted,
146    /// Validation of a `const`.
147    /// `allow_immutable_unsafe_cell` says whether we allow `UnsafeCell` in immutable memory (which is the
148    /// case for the top-level allocation of a `const`, where this is fine because the allocation will be
149    /// copied at each use site).
150    Const { allow_immutable_unsafe_cell: bool },
151}
152
153impl CtfeValidationMode {
154    fn allow_immutable_unsafe_cell(self) -> bool {
155        match self {
156            CtfeValidationMode::Static { .. } => false,
157            CtfeValidationMode::Promoted { .. } => false,
158            CtfeValidationMode::Const { allow_immutable_unsafe_cell, .. } => {
159                allow_immutable_unsafe_cell
160            }
161        }
162    }
163}
164
165/// State for tracking recursive validation of references
166pub struct RefTracking<T, PATH = ()> {
167    seen: FxHashSet<T>,
168    todo: Vec<(T, PATH)>,
169}
170
171impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
172    pub fn empty() -> Self {
173        RefTracking { seen: FxHashSet::default(), todo: ::alloc::vec::Vec::new()vec![] }
174    }
175    pub fn new(val: T) -> Self {
176        let mut ref_tracking_for_consts =
177            RefTracking { seen: FxHashSet::default(), todo: ::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
        [(val.clone(), PATH::default())]))vec![(val.clone(), PATH::default())] };
178        ref_tracking_for_consts.seen.insert(val);
179        ref_tracking_for_consts
180    }
181    pub fn next(&mut self) -> Option<(T, PATH)> {
182        self.todo.pop()
183    }
184
185    fn track(&mut self, val: T, path: impl FnOnce() -> PATH) {
186        if self.seen.insert(val.clone()) {
187            {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:187",
                        "rustc_const_eval::interpret::validity",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
                        ::tracing_core::__macro_support::Option::Some(187u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("Recursing below ptr {0:#?}",
                                                    val) as &dyn Value))])
            });
    } else { ; }
};trace!("Recursing below ptr {:#?}", val);
188            let path = path();
189            // Remember to come back to this later.
190            self.todo.push((val, path));
191        }
192    }
193}
194
195// FIXME make this translatable as well?
196/// Format a path
197fn write_path(out: &mut String, path: &[PathElem]) {
198    use self::PathElem::*;
199
200    for elem in path.iter() {
201        match elem {
202            Field(name) => out.write_fmt(format_args!(".{0}", name))write!(out, ".{name}"),
203            EnumTag => out.write_fmt(format_args!(".<enum-tag>"))write!(out, ".<enum-tag>"),
204            Variant(name) => out.write_fmt(format_args!(".<enum-variant({0})>", name))write!(out, ".<enum-variant({name})>"),
205            CoroutineTag => out.write_fmt(format_args!(".<coroutine-tag>"))write!(out, ".<coroutine-tag>"),
206            CoroutineState(idx) => out.write_fmt(format_args!(".<coroutine-state({0})>", idx.index()))write!(out, ".<coroutine-state({})>", idx.index()),
207            CapturedVar(name) => out.write_fmt(format_args!(".<captured-var({0})>", name))write!(out, ".<captured-var({name})>"),
208            TupleElem(idx) => out.write_fmt(format_args!(".{0}", idx))write!(out, ".{idx}"),
209            ArrayElem(idx) => out.write_fmt(format_args!("[{0}]", idx))write!(out, "[{idx}]"),
210            // `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
211            // some of the other items here also are not Rust syntax. Actually we can't
212            // even use the usual syntax because we are just showing the projections,
213            // not the root.
214            Deref => out.write_fmt(format_args!(".<deref>"))write!(out, ".<deref>"),
215            DynDowncast => out.write_fmt(format_args!(".<dyn-downcast>"))write!(out, ".<dyn-downcast>"),
216            Vtable => out.write_fmt(format_args!(".<vtable>"))write!(out, ".<vtable>"),
217        }
218        .unwrap()
219    }
220}
221
222/// Represents a set of `Size` values as a sorted list of ranges.
223// These are (offset, length) pairs, and they are sorted and mutually disjoint,
224// and never adjacent (i.e. there's always a gap between two of them).
225#[derive(#[automatically_derived]
impl ::core::fmt::Debug for RangeSet {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_tuple_field1_finish(f, "RangeSet",
            &&self.0)
    }
}Debug, #[automatically_derived]
impl ::core::clone::Clone for RangeSet {
    #[inline]
    fn clone(&self) -> RangeSet {
        RangeSet(::core::clone::Clone::clone(&self.0))
    }
}Clone)]
226pub struct RangeSet(Vec<(Size, Size)>);
227
228impl RangeSet {
229    fn add_range(&mut self, offset: Size, size: Size) {
230        if size.bytes() == 0 {
231            // No need to track empty ranges.
232            return;
233        }
234        let v = &mut self.0;
235        // We scan for a partition point where the left partition is all the elements that end
236        // strictly before we start. Those are elements that are too "low" to merge with us.
237        let idx =
238            v.partition_point(|&(other_offset, other_size)| other_offset + other_size < offset);
239        // Now we want to either merge with the first element of the second partition, or insert ourselves before that.
240        if let Some(&(other_offset, other_size)) = v.get(idx)
241            && offset + size >= other_offset
242        {
243            // Their end is >= our start (otherwise it would not be in the 2nd partition) and
244            // our end is >= their start. This means we can merge the ranges.
245            let new_start = other_offset.min(offset);
246            let mut new_end = (other_offset + other_size).max(offset + size);
247            // We grew to the right, so merge with overlapping/adjacent elements.
248            // (We also may have grown to the left, but that can never make us adjacent with
249            // anything there since we selected the first such candidate via `partition_point`.)
250            let mut scan_right = 1;
251            while let Some(&(next_offset, next_size)) = v.get(idx + scan_right)
252                && new_end >= next_offset
253            {
254                // Increase our size to absorb the next element.
255                new_end = new_end.max(next_offset + next_size);
256                // Look at the next element.
257                scan_right += 1;
258            }
259            // Update the element we grew.
260            v[idx] = (new_start, new_end - new_start);
261            // Remove the elements we absorbed (if any).
262            if scan_right > 1 {
263                drop(v.drain((idx + 1)..(idx + scan_right)));
264            }
265        } else {
266            // Insert new element.
267            v.insert(idx, (offset, size));
268        }
269    }
270}
271
272struct ValidityVisitor<'rt, 'tcx, M: Machine<'tcx>> {
273    /// The `path` may be pushed to, but the part that is present when a function
274    /// starts must not be changed!  `visit_fields` and `visit_array` rely on
275    /// this stack discipline.
276    path: Vec<PathElem>,
277    ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
278    /// `None` indicates this is not validating for CTFE (but for runtime).
279    ctfe_mode: Option<CtfeValidationMode>,
280    ecx: &'rt mut InterpCx<'tcx, M>,
281    /// Whether provenance should be reset outside of pointers (emulating the effect of a typed
282    /// copy).
283    reset_provenance_and_padding: bool,
284    /// This tracks which byte ranges in this value contain data; the remaining bytes are padding.
285    /// The ideal representation here would be pointer-length pairs, but to keep things more compact
286    /// we only store a (range) set of offsets -- the base pointer is the same throughout the entire
287    /// visit, after all.
288    /// If this is `Some`, then `reset_provenance_and_padding` must be true (but not vice versa:
289    /// we might not track data vs padding bytes if the operand isn't stored in memory anyway).
290    data_bytes: Option<RangeSet>,
291}
292
293impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
294    fn aggregate_field_path_elem(&mut self, layout: TyAndLayout<'tcx>, field: usize) -> PathElem {
295        // First, check if we are projecting to a variant.
296        match layout.variants {
297            Variants::Multiple { tag_field, .. } => {
298                if tag_field.as_usize() == field {
299                    return match layout.ty.kind() {
300                        ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,
301                        ty::Coroutine(..) => PathElem::CoroutineTag,
302                        _ => ::rustc_middle::util::bug::bug_fmt(format_args!("non-variant type {0:?}",
        layout.ty))bug!("non-variant type {:?}", layout.ty),
303                    };
304                }
305            }
306            Variants::Single { .. } | Variants::Empty => {}
307        }
308
309        // Now we know we are projecting to a field, so figure out which one.
310        match layout.ty.kind() {
311            // coroutines, closures, and coroutine-closures all have upvars that may be named.
312            ty::Closure(def_id, _) | ty::Coroutine(def_id, _) | ty::CoroutineClosure(def_id, _) => {
313                let mut name = None;
314                // FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar
315                // https://github.com/rust-lang/project-rfc-2229/issues/46
316                if let Some(local_def_id) = def_id.as_local() {
317                    let captures = self.ecx.tcx.closure_captures(local_def_id);
318                    if let Some(captured_place) = captures.get(field) {
319                        // Sometimes the index is beyond the number of upvars (seen
320                        // for a coroutine).
321                        let var_hir_id = captured_place.get_root_variable();
322                        let node = self.ecx.tcx.hir_node(var_hir_id);
323                        if let hir::Node::Pat(pat) = node
324                            && let hir::PatKind::Binding(_, _, ident, _) = pat.kind
325                        {
326                            name = Some(ident.name);
327                        }
328                    }
329                }
330
331                PathElem::CapturedVar(name.unwrap_or_else(|| {
332                    // Fall back to showing the field index.
333                    sym::integer(field)
334                }))
335            }
336
337            // tuples
338            ty::Tuple(_) => PathElem::TupleElem(field),
339
340            // enums
341            ty::Adt(def, ..) if def.is_enum() => {
342                // we might be projecting *to* a variant, or to a field *in* a variant.
343                match layout.variants {
344                    Variants::Single { index } => {
345                        // Inside a variant
346                        PathElem::Field(def.variant(index).fields[FieldIdx::from_usize(field)].name)
347                    }
348                    Variants::Empty => {
    ::core::panicking::panic_fmt(format_args!("there is no field in Variants::Empty types"));
}panic!("there is no field in Variants::Empty types"),
349                    Variants::Multiple { .. } => ::rustc_middle::util::bug::bug_fmt(format_args!("we handled variants above"))bug!("we handled variants above"),
350                }
351            }
352
353            // other ADTs
354            ty::Adt(def, _) => {
355                PathElem::Field(def.non_enum_variant().fields[FieldIdx::from_usize(field)].name)
356            }
357
358            // arrays/slices
359            ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
360
361            // dyn traits
362            ty::Dynamic(..) => {
363                match (&field, &0) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(field, 0);
364                PathElem::DynDowncast
365            }
366
367            // nothing else has an aggregate layout
368            _ => ::rustc_middle::util::bug::bug_fmt(format_args!("aggregate_field_path_elem: got non-aggregate type {0:?}",
        layout.ty))bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
369        }
370    }
371
372    fn with_elem<R>(
373        &mut self,
374        elem: PathElem,
375        f: impl FnOnce(&mut Self) -> InterpResult<'tcx, R>,
376    ) -> InterpResult<'tcx, R> {
377        // Remember the old state
378        let path_len = self.path.len();
379        // Record new element
380        self.path.push(elem);
381        // Perform operation
382        let r = f(self)?;
383        // Undo changes
384        self.path.truncate(path_len);
385        // Done
386        interp_ok(r)
387    }
388
389    fn read_immediate(
390        &self,
391        val: &PlaceTy<'tcx, M::Provenance>,
392        expected: ExpectedKind,
393    ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
394        interp_ok({
    self.ecx.read_immediate(val).map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidUninitBytes(_)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: Uninit { expected },
                                        }))
                            }
                        }
                        Unsup(ReadPointerAsInt(_)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: PointerAsInt { expected },
                                        }))
                            }
                        }
                        Unsup(ReadPartialPointer(_)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: PartialPointer,
                                        }))
                            }
                        }
                        e => e,
                    }
                })?
}try_validation!(
395            self.ecx.read_immediate(val),
396            self.path,
397            Ub(InvalidUninitBytes(_)) =>
398                Uninit { expected },
399            // The `Unsup` cases can only occur during CTFE
400            Unsup(ReadPointerAsInt(_)) =>
401                PointerAsInt { expected },
402            Unsup(ReadPartialPointer(_)) =>
403                PartialPointer,
404        ))
405    }
406
407    fn read_scalar(
408        &self,
409        val: &PlaceTy<'tcx, M::Provenance>,
410        expected: ExpectedKind,
411    ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
412        interp_ok(self.read_immediate(val, expected)?.to_scalar())
413    }
414
415    fn deref_pointer(
416        &mut self,
417        val: &PlaceTy<'tcx, M::Provenance>,
418        expected: ExpectedKind,
419    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
420        // Not using `ecx.deref_pointer` since we want to use our `read_immediate` wrapper.
421        let imm = self.read_immediate(val, expected)?;
422        // Reset provenance: ensure slice tail metadata does not preserve provenance,
423        // and ensure all pointers do not preserve partial provenance.
424        if self.reset_provenance_and_padding {
425            if #[allow(non_exhaustive_omitted_patterns)] match imm.layout.backend_repr {
    BackendRepr::Scalar(..) => true,
    _ => false,
}matches!(imm.layout.backend_repr, BackendRepr::Scalar(..)) {
426                // A thin pointer. If it has provenance, we don't have to do anything.
427                // If it does not, ensure we clear the provenance in memory.
428                if #[allow(non_exhaustive_omitted_patterns)] match imm.to_scalar() {
    Scalar::Int(..) => true,
    _ => false,
}matches!(imm.to_scalar(), Scalar::Int(..)) {
429                    self.ecx.clear_provenance(val)?;
430                }
431            } else {
432                // A wide pointer. This means we have to worry both about the pointer itself and the
433                // metadata. We do the lazy thing and just write back the value we got. Just
434                // clearing provenance in a targeted manner would be more efficient, but unless this
435                // is a perf hotspot it's just not worth the effort.
436                self.ecx.write_immediate_no_validate(*imm, val)?;
437            }
438            // The entire thing is data, not padding.
439            self.add_data_range_place(val);
440        }
441        // Now turn it into a place.
442        self.ecx.ref_to_mplace(&imm)
443    }
444
445    fn check_wide_ptr_meta(
446        &mut self,
447        meta: MemPlaceMeta<M::Provenance>,
448        pointee: TyAndLayout<'tcx>,
449    ) -> InterpResult<'tcx> {
450        let tail = self.ecx.tcx.struct_tail_for_codegen(pointee.ty, self.ecx.typing_env);
451        match tail.kind() {
452            ty::Dynamic(data, _) => {
453                let vtable = meta.unwrap_meta().to_pointer(self.ecx)?;
454                // Make sure it is a genuine vtable pointer for the right trait.
455                {
    self.ecx.get_ptr_vtable_ty(vtable,
                Some(data)).map_err_kind(|e|
                {
                    match e {
                        Ub(DanglingIntPointer { .. } | InvalidVTablePointer(..)) =>
                            {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: InvalidVTablePtr {
                                                value: ::alloc::__export::must_use({
                                                        ::alloc::fmt::format(format_args!("{0}", vtable))
                                                    }),
                                            },
                                        }))
                            }
                        }
                        Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type
                            }) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: {
                                                InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
                                            },
                                        }))
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
456                    self.ecx.get_ptr_vtable_ty(vtable, Some(data)),
457                    self.path,
458                    Ub(DanglingIntPointer{ .. } | InvalidVTablePointer(..)) =>
459                        InvalidVTablePtr { value: format!("{vtable}") },
460                    Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) => {
461                        InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
462                    },
463                );
464            }
465            ty::Slice(..) | ty::Str => {
466                let _len = meta.unwrap_meta().to_target_usize(self.ecx)?;
467                // We do not check that `len * elem_size <= isize::MAX`:
468                // that is only required for references, and there it falls out of the
469                // "dereferenceable" check performed by Stacked Borrows.
470            }
471            ty::Foreign(..) => {
472                // Unsized, but not wide.
473            }
474            _ => ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected unsized type tail: {0:?}",
        tail))bug!("Unexpected unsized type tail: {:?}", tail),
475        }
476
477        interp_ok(())
478    }
479
480    /// Check a reference or `Box`.
481    fn check_safe_pointer(
482        &mut self,
483        value: &PlaceTy<'tcx, M::Provenance>,
484        ptr_kind: PointerKind,
485    ) -> InterpResult<'tcx> {
486        let place = self.deref_pointer(value, ptr_kind.into())?;
487        // Handle wide pointers.
488        // Check metadata early, for better diagnostics
489        if place.layout.is_unsized() {
490            self.check_wide_ptr_meta(place.meta(), place.layout)?;
491        }
492        // Make sure this is dereferenceable and all.
493        let size_and_align = {
    self.ecx.size_and_align_of_val(&place).map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidMeta(msg)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: match msg {
                                                InvalidMetaKind::SliceTooBig =>
                                                    InvalidMetaSliceTooLarge { ptr_kind },
                                                InvalidMetaKind::TooBig => InvalidMetaTooLarge { ptr_kind },
                                            },
                                        }))
                            }
                        }
                        e => e,
                    }
                })?
}try_validation!(
494            self.ecx.size_and_align_of_val(&place),
495            self.path,
496            Ub(InvalidMeta(msg)) => match msg {
497                InvalidMetaKind::SliceTooBig => InvalidMetaSliceTooLarge { ptr_kind },
498                InvalidMetaKind::TooBig => InvalidMetaTooLarge { ptr_kind },
499            }
500        );
501        let (size, align) = size_and_align
502            // for the purpose of validity, consider foreign types to have
503            // alignment and size determined by the layout (size will be 0,
504            // alignment should take attributes into account).
505            .unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
506        // Direct call to `check_ptr_access_align` checks alignment even on CTFE machines.
507        {
    self.ecx.check_ptr_access(place.ptr(), size,
                CheckInAllocMsg::Dereferenceable).map_err_kind(|e|
                {
                    match e {
                        Ub(DanglingIntPointer { addr: 0, .. }) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: NullPtr { ptr_kind, maybe: false },
                                        }))
                            }
                        }
                        Ub(DanglingIntPointer { addr: i, .. }) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: DanglingPtrNoProvenance {
                                                ptr_kind,
                                                pointer: ::alloc::__export::must_use({
                                                        ::alloc::fmt::format(format_args!("{0}",
                                                                Pointer::<Option<AllocId>>::without_provenance(i)))
                                                    }),
                                            },
                                        }))
                            }
                        }
                        Ub(PointerOutOfBounds { .. }) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: DanglingPtrOutOfBounds { ptr_kind },
                                        }))
                            }
                        }
                        Ub(PointerUseAfterFree(..)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: DanglingPtrUseAfterFree { ptr_kind },
                                        }))
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
508            self.ecx.check_ptr_access(
509                place.ptr(),
510                size,
511                CheckInAllocMsg::Dereferenceable, // will anyway be replaced by validity message
512            ),
513            self.path,
514            Ub(DanglingIntPointer { addr: 0, .. }) => NullPtr { ptr_kind, maybe: false },
515            Ub(DanglingIntPointer { addr: i, .. }) => DanglingPtrNoProvenance {
516                ptr_kind,
517                // FIXME this says "null pointer" when null but we need translate
518                pointer: format!("{}", Pointer::<Option<AllocId>>::without_provenance(i))
519            },
520            Ub(PointerOutOfBounds { .. }) => DanglingPtrOutOfBounds {
521                ptr_kind
522            },
523            Ub(PointerUseAfterFree(..)) => DanglingPtrUseAfterFree {
524                ptr_kind,
525            },
526        );
527        {
    self.ecx.check_ptr_align(place.ptr(),
                align).map_err_kind(|e|
                {
                    match e {
                        Ub(AlignmentCheckFailed(Misalignment { required, has },
                            _msg)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: UnalignedPtr {
                                                ptr_kind,
                                                required_bytes: required.bytes(),
                                                found_bytes: has.bytes(),
                                            },
                                        }))
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
528            self.ecx.check_ptr_align(
529                place.ptr(),
530                align,
531            ),
532            self.path,
533            Ub(AlignmentCheckFailed(Misalignment { required, has }, _msg)) => UnalignedPtr {
534                ptr_kind,
535                required_bytes: required.bytes(),
536                found_bytes: has.bytes()
537            },
538        );
539        // Make sure this is non-null. We checked dereferenceability above, but if `size` is zero
540        // that does not imply non-null.
541        let scalar = Scalar::from_maybe_pointer(place.ptr(), self.ecx);
542        if self.ecx.scalar_may_be_null(scalar)? {
543            let maybe = !M::Provenance::OFFSET_IS_ADDR && #[allow(non_exhaustive_omitted_patterns)] match scalar {
    Scalar::Ptr(..) => true,
    _ => false,
}matches!(scalar, Scalar::Ptr(..));
544            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: NullPtr { ptr_kind, maybe },
                }))
    }throw_validation_failure!(self.path, NullPtr { ptr_kind, maybe })
545        }
546        // Do not allow references to uninhabited types.
547        if place.layout.is_uninhabited() {
548            let ty = place.layout.ty;
549            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: PtrToUninhabited { ptr_kind, ty },
                }))
    }throw_validation_failure!(self.path, PtrToUninhabited { ptr_kind, ty })
550        }
551        // Recursive checking
552        if let Some(ref_tracking) = self.ref_tracking.as_deref_mut() {
553            // Proceed recursively even for ZST, no reason to skip them!
554            // `!` is a ZST and we want to validate it.
555            if let Some(ctfe_mode) = self.ctfe_mode {
556                let mut skip_recursive_check = false;
557                // CTFE imposes restrictions on what references can point to.
558                if let Ok((alloc_id, _offset, _prov)) =
559                    self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)
560                {
561                    // Everything should be already interned.
562                    let Some(global_alloc) = self.ecx.tcx.try_get_global_alloc(alloc_id) else {
563                        if self.ecx.memory.alloc_map.contains_key(&alloc_id) {
564                            // This can happen when interning didn't complete due to, e.g.
565                            // missing `make_global`. This must mean other errors are already
566                            // being reported.
567                            self.ecx.tcx.dcx().delayed_bug(
568                                "interning did not complete, there should be an error",
569                            );
570                            return interp_ok(());
571                        }
572                        // We can't have *any* references to non-existing allocations in const-eval
573                        // as the rest of rustc isn't happy with them... so we throw an error, even
574                        // though for zero-sized references this isn't really UB.
575                        // A potential future alternative would be to resurrect this as a zero-sized allocation
576                        // (which codegen will then compile to an aligned dummy pointer anyway).
577                        do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: DanglingPtrUseAfterFree { ptr_kind },
                }))
    };throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
578                    };
579                    let (size, _align) =
580                        global_alloc.size_and_align(*self.ecx.tcx, self.ecx.typing_env);
581                    let alloc_actual_mutbl =
582                        global_alloc.mutability(*self.ecx.tcx, self.ecx.typing_env);
583
584                    match global_alloc {
585                        GlobalAlloc::Static(did) => {
586                            let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {
587                                ::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!()
588                            };
589                            if !!self.ecx.tcx.is_thread_local_static(did) {
    ::core::panicking::panic("assertion failed: !self.ecx.tcx.is_thread_local_static(did)")
};assert!(!self.ecx.tcx.is_thread_local_static(did));
590                            if !self.ecx.tcx.is_static(did) {
    ::core::panicking::panic("assertion failed: self.ecx.tcx.is_static(did)")
};assert!(self.ecx.tcx.is_static(did));
591                            match ctfe_mode {
592                                CtfeValidationMode::Static { .. }
593                                | CtfeValidationMode::Promoted { .. } => {
594                                    // We skip recursively checking other statics. These statics must be sound by
595                                    // themselves, and the only way to get broken statics here is by using
596                                    // unsafe code.
597                                    // The reasons we don't check other statics is twofold. For one, in all
598                                    // sound cases, the static was already validated on its own, and second, we
599                                    // trigger cycle errors if we try to compute the value of the other static
600                                    // and that static refers back to us (potentially through a promoted).
601                                    // This could miss some UB, but that's fine.
602                                    // We still walk nested allocations, as they are fundamentally part of this validation run.
603                                    // This means we will also recurse into nested statics of *other*
604                                    // statics, even though we do not recurse into other statics directly.
605                                    // That's somewhat inconsistent but harmless.
606                                    skip_recursive_check = !nested;
607                                }
608                                CtfeValidationMode::Const { .. } => {
609                                    // If this is mutable memory or an `extern static`, there's no point in checking it -- we'd
610                                    // just get errors trying to read the value.
611                                    if alloc_actual_mutbl.is_mut()
612                                        || self.ecx.tcx.is_foreign_item(did)
613                                    {
614                                        skip_recursive_check = true;
615                                    }
616                                }
617                            }
618                        }
619                        _ => (),
620                    }
621
622                    // If this allocation has size zero, there is no actual mutability here.
623                    if size != Size::ZERO {
624                        // Determine whether this pointer expects to be pointing to something mutable.
625                        let ptr_expected_mutbl = match ptr_kind {
626                            PointerKind::Box => Mutability::Mut,
627                            PointerKind::Ref(mutbl) => {
628                                // We do not take into account interior mutability here since we cannot know if
629                                // there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
630                                // that in the recursive descent behind this reference (controlled by
631                                // `allow_immutable_unsafe_cell`).
632                                mutbl
633                            }
634                        };
635                        // Mutable pointer to immutable memory is no good.
636                        if ptr_expected_mutbl == Mutability::Mut
637                            && alloc_actual_mutbl == Mutability::Not
638                        {
639                            // This can actually occur with transmutes.
640                            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: MutableRefToImmutable,
                }))
    };throw_validation_failure!(self.path, MutableRefToImmutable);
641                        }
642                    }
643                }
644                // Potentially skip recursive check.
645                if skip_recursive_check {
646                    return interp_ok(());
647                }
648            } else {
649                // This is not CTFE, so it's Miri with recursive checking.
650                // FIXME: should we skip `UnsafeCell` behind shared references? Currently that is
651                // not needed since validation reads bypass Stacked Borrows and data race checks,
652                // but is that really coherent?
653            }
654            let path = &self.path;
655            ref_tracking.track(place, || {
656                // We need to clone the path anyway, make sure it gets created
657                // with enough space for the additional `Deref`.
658                let mut new_path = Vec::with_capacity(path.len() + 1);
659                new_path.extend(path);
660                new_path.push(PathElem::Deref);
661                new_path
662            });
663        }
664        interp_ok(())
665    }
666
667    /// Check if this is a value of primitive type, and if yes check the validity of the value
668    /// at that type. Return `true` if the type is indeed primitive.
669    ///
670    /// Note that not all of these have `FieldsShape::Primitive`, e.g. wide references.
671    fn try_visit_primitive(
672        &mut self,
673        value: &PlaceTy<'tcx, M::Provenance>,
674    ) -> InterpResult<'tcx, bool> {
675        // Go over all the primitive types
676        let ty = value.layout.ty;
677        match ty.kind() {
678            ty::Bool => {
679                let scalar = self.read_scalar(value, ExpectedKind::Bool)?;
680                {
    scalar.to_bool().map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidBool(..)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: ValidationErrorKind::InvalidBool {
                                                value: ::alloc::__export::must_use({
                                                        ::alloc::fmt::format(format_args!("{0:x}", scalar))
                                                    }),
                                            },
                                        }))
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
681                    scalar.to_bool(),
682                    self.path,
683                    Ub(InvalidBool(..)) => ValidationErrorKind::InvalidBool {
684                        value: format!("{scalar:x}"),
685                    }
686                );
687                if self.reset_provenance_and_padding {
688                    self.ecx.clear_provenance(value)?;
689                    self.add_data_range_place(value);
690                }
691                interp_ok(true)
692            }
693            ty::Char => {
694                let scalar = self.read_scalar(value, ExpectedKind::Char)?;
695                {
    scalar.to_char().map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidChar(..)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: ValidationErrorKind::InvalidChar {
                                                value: ::alloc::__export::must_use({
                                                        ::alloc::fmt::format(format_args!("{0:x}", scalar))
                                                    }),
                                            },
                                        }))
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
696                    scalar.to_char(),
697                    self.path,
698                    Ub(InvalidChar(..)) => ValidationErrorKind::InvalidChar {
699                        value: format!("{scalar:x}"),
700                    }
701                );
702                if self.reset_provenance_and_padding {
703                    self.ecx.clear_provenance(value)?;
704                    self.add_data_range_place(value);
705                }
706                interp_ok(true)
707            }
708            ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
709                // NOTE: Keep this in sync with the array optimization for int/float
710                // types below!
711                self.read_scalar(
712                    value,
713                    if #[allow(non_exhaustive_omitted_patterns)] match ty.kind() {
    ty::Float(..) => true,
    _ => false,
}matches!(ty.kind(), ty::Float(..)) {
714                        ExpectedKind::Float
715                    } else {
716                        ExpectedKind::Int
717                    },
718                )?;
719                if self.reset_provenance_and_padding {
720                    self.ecx.clear_provenance(value)?;
721                    self.add_data_range_place(value);
722                }
723                interp_ok(true)
724            }
725            ty::RawPtr(..) => {
726                let place = self.deref_pointer(value, ExpectedKind::RawPtr)?;
727                if place.layout.is_unsized() {
728                    self.check_wide_ptr_meta(place.meta(), place.layout)?;
729                }
730                interp_ok(true)
731            }
732            ty::Ref(_, _ty, mutbl) => {
733                self.check_safe_pointer(value, PointerKind::Ref(*mutbl))?;
734                interp_ok(true)
735            }
736            ty::FnPtr(..) => {
737                let scalar = self.read_scalar(value, ExpectedKind::FnPtr)?;
738
739                // If we check references recursively, also check that this points to a function.
740                if let Some(_) = self.ref_tracking {
741                    let ptr = scalar.to_pointer(self.ecx)?;
742                    let _fn = {
    self.ecx.get_ptr_fn(ptr).map_err_kind(|e|
                {
                    match e {
                        Ub(DanglingIntPointer { .. } | InvalidFunctionPointer(..))
                            => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: InvalidFnPtr {
                                                value: ::alloc::__export::must_use({
                                                        ::alloc::fmt::format(format_args!("{0}", ptr))
                                                    }),
                                            },
                                        }))
                            }
                        }
                        e => e,
                    }
                })?
}try_validation!(
743                        self.ecx.get_ptr_fn(ptr),
744                        self.path,
745                        Ub(DanglingIntPointer{ .. } | InvalidFunctionPointer(..)) =>
746                            InvalidFnPtr { value: format!("{ptr}") },
747                    );
748                    // FIXME: Check if the signature matches
749                } else {
750                    // Otherwise (for standalone Miri and for `-Zextra-const-ub-checks`),
751                    // we have to still check it to be non-null.
752                    if self.ecx.scalar_may_be_null(scalar)? {
753                        let maybe =
754                            !M::Provenance::OFFSET_IS_ADDR && #[allow(non_exhaustive_omitted_patterns)] match scalar {
    Scalar::Ptr(..) => true,
    _ => false,
}matches!(scalar, Scalar::Ptr(..));
755                        do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: NullFnPtr { maybe },
                }))
    };throw_validation_failure!(self.path, NullFnPtr { maybe });
756                    }
757                }
758                if self.reset_provenance_and_padding {
759                    // Make sure we do not preserve partial provenance. This matches the thin
760                    // pointer handling in `deref_pointer`.
761                    if #[allow(non_exhaustive_omitted_patterns)] match scalar {
    Scalar::Int(..) => true,
    _ => false,
}matches!(scalar, Scalar::Int(..)) {
762                        self.ecx.clear_provenance(value)?;
763                    }
764                    self.add_data_range_place(value);
765                }
766                interp_ok(true)
767            }
768            ty::Never => do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: NeverVal,
                }))
    }throw_validation_failure!(self.path, NeverVal),
769            ty::Foreign(..) | ty::FnDef(..) => {
770                // Nothing to check.
771                interp_ok(true)
772            }
773            ty::UnsafeBinder(_) => {
    ::core::panicking::panic_fmt(format_args!("not yet implemented: {0}",
            format_args!("FIXME(unsafe_binder)")));
}todo!("FIXME(unsafe_binder)"),
774            // The above should be all the primitive types. The rest is compound, we
775            // check them by visiting their fields/variants.
776            ty::Adt(..)
777            | ty::Tuple(..)
778            | ty::Array(..)
779            | ty::Slice(..)
780            | ty::Str
781            | ty::Dynamic(..)
782            | ty::Closure(..)
783            | ty::Pat(..)
784            | ty::CoroutineClosure(..)
785            | ty::Coroutine(..) => interp_ok(false),
786            // Some types only occur during typechecking, they have no layout.
787            // We should not see them here and we could not check them anyway.
788            ty::Error(_)
789            | ty::Infer(..)
790            | ty::Placeholder(..)
791            | ty::Bound(..)
792            | ty::Param(..)
793            | ty::Alias(..)
794            | ty::CoroutineWitness(..) => ::rustc_middle::util::bug::bug_fmt(format_args!("Encountered invalid type {0:?}",
        ty))bug!("Encountered invalid type {:?}", ty),
795        }
796    }
797
798    fn visit_scalar(
799        &mut self,
800        scalar: Scalar<M::Provenance>,
801        scalar_layout: ScalarAbi,
802    ) -> InterpResult<'tcx> {
803        let size = scalar_layout.size(self.ecx);
804        let valid_range = scalar_layout.valid_range(self.ecx);
805        let WrappingRange { start, end } = valid_range;
806        let max_value = size.unsigned_int_max();
807        if !(end <= max_value) {
    ::core::panicking::panic("assertion failed: end <= max_value")
};assert!(end <= max_value);
808        let bits = match scalar.try_to_scalar_int() {
809            Ok(int) => int.to_bits(size),
810            Err(_) => {
811                // So this is a pointer then, and casting to an int failed.
812                // Can only happen during CTFE.
813                // We support 2 kinds of ranges here: full range, and excluding zero.
814                if start == 1 && end == max_value {
815                    // Only null is the niche. So make sure the ptr is NOT null.
816                    if self.ecx.scalar_may_be_null(scalar)? {
817                        do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: NonnullPtrMaybeNull,
                }))
    }throw_validation_failure!(self.path, NonnullPtrMaybeNull)
818                    } else {
819                        return interp_ok(());
820                    }
821                } else if scalar_layout.is_always_valid(self.ecx) {
822                    // Easy. (This is reachable if `enforce_number_validity` is set.)
823                    return interp_ok(());
824                } else {
825                    // Conservatively, we reject, because the pointer *could* have a bad
826                    // value.
827                    do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: PtrOutOfRange { range: valid_range, max_value },
                }))
    }throw_validation_failure!(
828                        self.path,
829                        PtrOutOfRange { range: valid_range, max_value }
830                    )
831                }
832            }
833        };
834        // Now compare.
835        if valid_range.contains(bits) {
836            interp_ok(())
837        } else {
838            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: OutOfRange {
                        value: ::alloc::__export::must_use({
                                ::alloc::fmt::format(format_args!("{0}", bits))
                            }),
                        range: valid_range,
                        max_value,
                    },
                }))
    }throw_validation_failure!(
839                self.path,
840                OutOfRange { value: format!("{bits}"), range: valid_range, max_value }
841            )
842        }
843    }
844
845    fn in_mutable_memory(&self, val: &PlaceTy<'tcx, M::Provenance>) -> bool {
846        if true {
    if !self.ctfe_mode.is_some() {
        ::core::panicking::panic("assertion failed: self.ctfe_mode.is_some()")
    };
};debug_assert!(self.ctfe_mode.is_some());
847        if let Some(mplace) = val.as_mplace_or_local().left() {
848            if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
849                let tcx = *self.ecx.tcx;
850                // Everything must be already interned.
851                let mutbl = tcx.global_alloc(alloc_id).mutability(tcx, self.ecx.typing_env);
852                if let Some((_, alloc)) = self.ecx.memory.alloc_map.get(alloc_id) {
853                    match (&alloc.mutability, &mutbl) {
    (left_val, right_val) => {
        if !(*left_val == *right_val) {
            let kind = ::core::panicking::AssertKind::Eq;
            ::core::panicking::assert_failed(kind, &*left_val, &*right_val,
                ::core::option::Option::None);
        }
    }
};assert_eq!(alloc.mutability, mutbl);
854                }
855                mutbl.is_mut()
856            } else {
857                // No memory at all.
858                false
859            }
860        } else {
861            // A local variable -- definitely mutable.
862            true
863        }
864    }
865
866    /// Add the given pointer-length pair to the "data" range of this visit.
867    fn add_data_range(&mut self, ptr: Pointer<Option<M::Provenance>>, size: Size) {
868        if let Some(data_bytes) = self.data_bytes.as_mut() {
869            // We only have to store the offset, the rest is the same for all pointers here.
870            // The logic is agnostic to whether the offset is relative or absolute as long as
871            // it is consistent.
872            let (_prov, offset) = ptr.into_raw_parts();
873            // Add this.
874            data_bytes.add_range(offset, size);
875        };
876    }
877
878    /// Add the entire given place to the "data" range of this visit.
879    fn add_data_range_place(&mut self, place: &PlaceTy<'tcx, M::Provenance>) {
880        // Only sized places can be added this way.
881        if true {
    if !place.layout.is_sized() {
        ::core::panicking::panic("assertion failed: place.layout.is_sized()")
    };
};debug_assert!(place.layout.is_sized());
882        if let Some(data_bytes) = self.data_bytes.as_mut() {
883            let offset = Self::data_range_offset(self.ecx, place);
884            data_bytes.add_range(offset, place.layout.size);
885        }
886    }
887
888    /// Convert a place into the offset it starts at, for the purpose of data_range tracking.
889    /// Must only be called if `data_bytes` is `Some(_)`.
890    fn data_range_offset(ecx: &InterpCx<'tcx, M>, place: &PlaceTy<'tcx, M::Provenance>) -> Size {
891        // The presence of `data_bytes` implies that our place is in memory.
892        let ptr = ecx
893            .place_to_op(place)
894            .expect("place must be in memory")
895            .as_mplace_or_imm()
896            .expect_left("place must be in memory")
897            .ptr();
898        let (_prov, offset) = ptr.into_raw_parts();
899        offset
900    }
901
902    fn reset_padding(&mut self, place: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
903        let Some(data_bytes) = self.data_bytes.as_mut() else { return interp_ok(()) };
904        // Our value must be in memory, otherwise we would not have set up `data_bytes`.
905        let mplace = self.ecx.force_allocation(place)?;
906        // Determine starting offset and size.
907        let (_prov, start_offset) = mplace.ptr().into_raw_parts();
908        let (size, _align) = self
909            .ecx
910            .size_and_align_of_val(&mplace)?
911            .unwrap_or((mplace.layout.size, mplace.layout.align.abi));
912        // If there is no padding at all, we can skip the rest: check for
913        // a single data range covering the entire value.
914        if data_bytes.0 == &[(start_offset, size)] {
915            return interp_ok(());
916        }
917        // Get a handle for the allocation. Do this only once, to avoid looking up the same
918        // allocation over and over again. (Though to be fair, iterating the value already does
919        // exactly that.)
920        let Some(mut alloc) = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)? else {
921            // A ZST, no padding to clear.
922            return interp_ok(());
923        };
924        // Add a "finalizer" data range at the end, so that the iteration below finds all gaps
925        // between ranges.
926        data_bytes.0.push((start_offset + size, Size::ZERO));
927        // Iterate, and reset gaps.
928        let mut padding_cleared_until = start_offset;
929        for &(offset, size) in data_bytes.0.iter() {
930            if !(offset >= padding_cleared_until) {
    {
        ::core::panicking::panic_fmt(format_args!("reset_padding on {0}: previous field ended at offset {1}, next field starts at {2} (and has a size of {3} bytes)",
                mplace.layout.ty,
                (padding_cleared_until - start_offset).bytes(),
                (offset - start_offset).bytes(), size.bytes()));
    }
};assert!(
931                offset >= padding_cleared_until,
932                "reset_padding on {}: previous field ended at offset {}, next field starts at {} (and has a size of {} bytes)",
933                mplace.layout.ty,
934                (padding_cleared_until - start_offset).bytes(),
935                (offset - start_offset).bytes(),
936                size.bytes(),
937            );
938            if offset > padding_cleared_until {
939                // We found padding. Adjust the range to be relative to `alloc`, and make it uninit.
940                let padding_start = padding_cleared_until - start_offset;
941                let padding_size = offset - padding_cleared_until;
942                let range = alloc_range(padding_start, padding_size);
943                {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:943",
                        "rustc_const_eval::interpret::validity",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
                        ::tracing_core::__macro_support::Option::Some(943u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("reset_padding on {0}: resetting padding range {1:?}",
                                                    mplace.layout.ty, range) as &dyn Value))])
            });
    } else { ; }
};trace!("reset_padding on {}: resetting padding range {range:?}", mplace.layout.ty);
944                alloc.write_uninit(range);
945            }
946            padding_cleared_until = offset + size;
947        }
948        if !(padding_cleared_until == start_offset + size) {
    ::core::panicking::panic("assertion failed: padding_cleared_until == start_offset + size")
};assert!(padding_cleared_until == start_offset + size);
949        interp_ok(())
950    }
951
952    /// Computes the data range of this union type:
953    /// which bytes are inside a field (i.e., not padding.)
954    fn union_data_range<'e>(
955        ecx: &'e mut InterpCx<'tcx, M>,
956        layout: TyAndLayout<'tcx>,
957    ) -> Cow<'e, RangeSet> {
958        if !layout.ty.is_union() {
    ::core::panicking::panic("assertion failed: layout.ty.is_union()")
};assert!(layout.ty.is_union());
959        if !layout.is_sized() {
    {
        ::core::panicking::panic_fmt(format_args!("there are no unsized unions"));
    }
};assert!(layout.is_sized(), "there are no unsized unions");
960        let layout_cx = LayoutCx::new(*ecx.tcx, ecx.typing_env);
961        return M::cached_union_data_range(ecx, layout.ty, || {
962            let mut out = RangeSet(Vec::new());
963            union_data_range_uncached(&layout_cx, layout, Size::ZERO, &mut out);
964            out
965        });
966
967        /// Helper for recursive traversal: add data ranges of the given type to `out`.
968        fn union_data_range_uncached<'tcx>(
969            cx: &LayoutCx<'tcx>,
970            layout: TyAndLayout<'tcx>,
971            base_offset: Size,
972            out: &mut RangeSet,
973        ) {
974            // If this is a ZST, we don't contain any data. In particular, this helps us to quickly
975            // skip over huge arrays of ZST.
976            if layout.is_zst() {
977                return;
978            }
979            // Just recursively add all the fields of everything to the output.
980            match &layout.fields {
981                FieldsShape::Primitive => {
982                    out.add_range(base_offset, layout.size);
983                }
984                &FieldsShape::Union(fields) => {
985                    // Currently, all fields start at offset 0 (relative to `base_offset`).
986                    for field in 0..fields.get() {
987                        let field = layout.field(cx, field);
988                        union_data_range_uncached(cx, field, base_offset, out);
989                    }
990                }
991                &FieldsShape::Array { stride, count } => {
992                    let elem = layout.field(cx, 0);
993
994                    // Fast-path for large arrays of simple types that do not contain any padding.
995                    if elem.backend_repr.is_scalar() {
996                        out.add_range(base_offset, elem.size * count);
997                    } else {
998                        for idx in 0..count {
999                            // This repeats the same computation for every array element... but the alternative
1000                            // is to allocate temporary storage for a dedicated `out` set for the array element,
1001                            // and replicating that N times. Is that better?
1002                            union_data_range_uncached(cx, elem, base_offset + idx * stride, out);
1003                        }
1004                    }
1005                }
1006                FieldsShape::Arbitrary { offsets, .. } => {
1007                    for (field, &offset) in offsets.iter_enumerated() {
1008                        let field = layout.field(cx, field.as_usize());
1009                        union_data_range_uncached(cx, field, base_offset + offset, out);
1010                    }
1011                }
1012            }
1013            // Don't forget potential other variants.
1014            match &layout.variants {
1015                Variants::Single { .. } | Variants::Empty => {
1016                    // Fully handled above.
1017                }
1018                Variants::Multiple { variants, .. } => {
1019                    for variant in variants.indices() {
1020                        let variant = layout.for_variant(cx, variant);
1021                        union_data_range_uncached(cx, variant, base_offset, out);
1022                    }
1023                }
1024            }
1025        }
1026    }
1027}
1028
1029impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, 'tcx, M> {
1030    type V = PlaceTy<'tcx, M::Provenance>;
1031
1032    #[inline(always)]
1033    fn ecx(&self) -> &InterpCx<'tcx, M> {
1034        self.ecx
1035    }
1036
1037    fn read_discriminant(
1038        &mut self,
1039        val: &PlaceTy<'tcx, M::Provenance>,
1040    ) -> InterpResult<'tcx, VariantIdx> {
1041        self.with_elem(PathElem::EnumTag, move |this| {
1042            interp_ok({
    this.ecx.read_discriminant(val).map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidTag(val)) => {
                            {
                                let where_ = &this.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: InvalidEnumTag {
                                                value: ::alloc::__export::must_use({
                                                        ::alloc::fmt::format(format_args!("{0:x}", val))
                                                    }),
                                            },
                                        }))
                            }
                        }
                        Ub(UninhabitedEnumVariantRead(_)) => {
                            {
                                let where_ = &this.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: UninhabitedEnumVariant,
                                        }))
                            }
                        }
                        e => e,
                    }
                })?
}try_validation!(
1043                this.ecx.read_discriminant(val),
1044                this.path,
1045                Ub(InvalidTag(val)) => InvalidEnumTag {
1046                    value: format!("{val:x}"),
1047                },
1048                Ub(UninhabitedEnumVariantRead(_)) => UninhabitedEnumVariant,
1049                // Uninit / bad provenance are not possible since the field was already previously
1050                // checked at its integer type.
1051            ))
1052        })
1053    }
1054
1055    #[inline]
1056    fn visit_field(
1057        &mut self,
1058        old_val: &PlaceTy<'tcx, M::Provenance>,
1059        field: usize,
1060        new_val: &PlaceTy<'tcx, M::Provenance>,
1061    ) -> InterpResult<'tcx> {
1062        let elem = self.aggregate_field_path_elem(old_val.layout, field);
1063        self.with_elem(elem, move |this| this.visit_value(new_val))
1064    }
1065
1066    #[inline]
1067    fn visit_variant(
1068        &mut self,
1069        old_val: &PlaceTy<'tcx, M::Provenance>,
1070        variant_id: VariantIdx,
1071        new_val: &PlaceTy<'tcx, M::Provenance>,
1072    ) -> InterpResult<'tcx> {
1073        let name = match old_val.layout.ty.kind() {
1074            ty::Adt(adt, _) => PathElem::Variant(adt.variant(variant_id).name),
1075            // Coroutines also have variants
1076            ty::Coroutine(..) => PathElem::CoroutineState(variant_id),
1077            _ => ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected type with variant: {0:?}",
        old_val.layout.ty))bug!("Unexpected type with variant: {:?}", old_val.layout.ty),
1078        };
1079        self.with_elem(name, move |this| this.visit_value(new_val))
1080    }
1081
1082    #[inline(always)]
1083    fn visit_union(
1084        &mut self,
1085        val: &PlaceTy<'tcx, M::Provenance>,
1086        _fields: NonZero<usize>,
1087    ) -> InterpResult<'tcx> {
1088        // Special check for CTFE validation, preventing `UnsafeCell` inside unions in immutable memory.
1089        if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1090            // Unsized unions are currently not a thing, but let's keep this code consistent with
1091            // the check in `visit_value`.
1092            let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1093            if !zst && !val.layout.ty.is_freeze(*self.ecx.tcx, self.ecx.typing_env) {
1094                if !self.in_mutable_memory(val) {
1095                    do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: UnsafeCellInImmutable,
                }))
    };throw_validation_failure!(self.path, UnsafeCellInImmutable);
1096                }
1097            }
1098        }
1099        if self.reset_provenance_and_padding
1100            && let Some(data_bytes) = self.data_bytes.as_mut()
1101        {
1102            let base_offset = Self::data_range_offset(self.ecx, val);
1103            // Determine and add data range for this union.
1104            let union_data_range = Self::union_data_range(self.ecx, val.layout);
1105            for &(offset, size) in union_data_range.0.iter() {
1106                data_bytes.add_range(base_offset + offset, size);
1107            }
1108        }
1109        interp_ok(())
1110    }
1111
1112    #[inline]
1113    fn visit_box(
1114        &mut self,
1115        _box_ty: Ty<'tcx>,
1116        val: &PlaceTy<'tcx, M::Provenance>,
1117    ) -> InterpResult<'tcx> {
1118        self.check_safe_pointer(val, PointerKind::Box)?;
1119        interp_ok(())
1120    }
1121
1122    #[inline]
1123    fn visit_value(&mut self, val: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
1124        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1124",
                        "rustc_const_eval::interpret::validity",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
                        ::tracing_core::__macro_support::Option::Some(1124u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("visit_value: {0:?}, {1:?}",
                                                    *val, val.layout) as &dyn Value))])
            });
    } else { ; }
};trace!("visit_value: {:?}, {:?}", *val, val.layout);
1125
1126        // Check primitive types -- the leaves of our recursive descent.
1127        // This is called even for enum discriminants (which are "fields" of their enum),
1128        // so for integer-typed discriminants the provenance reset will happen here.
1129        // We assume that the Scalar validity range does not restrict these values
1130        // any further than `try_visit_primitive` does!
1131        if self.try_visit_primitive(val)? {
1132            return interp_ok(());
1133        }
1134
1135        // Special check preventing `UnsafeCell` in the inner part of constants
1136        if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1137            // Exclude ZST values. We need to compute the dynamic size/align to properly
1138            // handle slices and trait objects.
1139            let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1140            if !zst
1141                && let Some(def) = val.layout.ty.ty_adt_def()
1142                && def.is_unsafe_cell()
1143            {
1144                if !self.in_mutable_memory(val) {
1145                    do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: UnsafeCellInImmutable,
                }))
    };throw_validation_failure!(self.path, UnsafeCellInImmutable);
1146                }
1147            }
1148        }
1149
1150        // Recursively walk the value at its type. Apply optimizations for some large types.
1151        match val.layout.ty.kind() {
1152            ty::Str => {
1153                let mplace = val.assert_mem_place(); // strings are unsized and hence never immediate
1154                let len = mplace.len(self.ecx)?;
1155                {
    self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(),
                Size::from_bytes(len)).map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidUninitBytes(..)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: Uninit { expected: ExpectedKind::Str },
                                        }))
                            }
                        }
                        Unsup(ReadPointerAsInt(_)) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: PointerAsInt { expected: ExpectedKind::Str },
                                        }))
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
1156                    self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len)),
1157                    self.path,
1158                    Ub(InvalidUninitBytes(..)) => Uninit { expected: ExpectedKind::Str },
1159                    Unsup(ReadPointerAsInt(_)) => PointerAsInt { expected: ExpectedKind::Str }
1160                );
1161            }
1162            ty::Array(tys, ..) | ty::Slice(tys)
1163                // This optimization applies for types that can hold arbitrary non-provenance bytes (such as
1164                // integer and floating point types).
1165                // FIXME(wesleywiser) This logic could be extended further to arbitrary structs or
1166                // tuples made up of integer/floating point types or inhabited ZSTs with no padding.
1167                if #[allow(non_exhaustive_omitted_patterns)] match tys.kind() {
    ty::Int(..) | ty::Uint(..) | ty::Float(..) => true,
    _ => false,
}matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..))
1168                =>
1169            {
1170                let expected = if tys.is_integral() { ExpectedKind::Int } else { ExpectedKind::Float };
1171                // Optimized handling for arrays of integer/float type.
1172
1173                // This is the length of the array/slice.
1174                let len = val.len(self.ecx)?;
1175                // This is the element type size.
1176                let layout = self.ecx.layout_of(*tys)?;
1177                // This is the size in bytes of the whole array. (This checks for overflow.)
1178                let size = layout.size * len;
1179                // If the size is 0, there is nothing to check.
1180                // (`size` can only be 0 if `len` is 0, and empty arrays are always valid.)
1181                if size == Size::ZERO {
1182                    return interp_ok(());
1183                }
1184                // Now that we definitely have a non-ZST array, we know it lives in memory -- except it may
1185                // be an uninitialized local variable, those are also "immediate".
1186                let mplace = match val.to_op(self.ecx)?.as_mplace_or_imm() {
1187                    Left(mplace) => mplace,
1188                    Right(imm) => match *imm {
1189                        Immediate::Uninit =>
1190                            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: Uninit { expected },
                }))
    }throw_validation_failure!(self.path, Uninit { expected }),
1191                        Immediate::Scalar(..) | Immediate::ScalarPair(..) =>
1192                            ::rustc_middle::util::bug::bug_fmt(format_args!("arrays/slices can never have Scalar/ScalarPair layout"))bug!("arrays/slices can never have Scalar/ScalarPair layout"),
1193                    }
1194                };
1195
1196                // Optimization: we just check the entire range at once.
1197                // NOTE: Keep this in sync with the handling of integer and float
1198                // types above, in `visit_primitive`.
1199                // No need for an alignment check here, this is not an actual memory access.
1200                let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size)?.expect("we already excluded size 0");
1201
1202                alloc.get_bytes_strip_provenance().map_err_kind(|kind| {
1203                    // Some error happened, try to provide a more detailed description.
1204                    // For some errors we might be able to provide extra information.
1205                    // (This custom logic does not fit the `try_validation!` macro.)
1206                    match kind {
1207                        Ub(InvalidUninitBytes(Some((_alloc_id, access)))) | Unsup(ReadPointerAsInt(Some((_alloc_id, access)))) => {
1208                            // Some byte was uninitialized, determine which
1209                            // element that byte belongs to so we can
1210                            // provide an index.
1211                            let i = usize::try_from(
1212                                access.bad.start.bytes() / layout.size.bytes(),
1213                            )
1214                            .unwrap();
1215                            self.path.push(PathElem::ArrayElem(i));
1216
1217                            if #[allow(non_exhaustive_omitted_patterns)] match kind {
    Ub(InvalidUninitBytes(_)) => true,
    _ => false,
}matches!(kind, Ub(InvalidUninitBytes(_))) {
1218                                {
    let where_ = &self.path;
    let path =
        if !where_.is_empty() {
            let mut path = String::new();
            write_path(&mut path, where_);
            Some(path)
        } else { None };
    ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                path,
                kind: Uninit { expected },
            }))
}err_validation_failure!(self.path, Uninit { expected })
1219                            } else {
1220                                {
    let where_ = &self.path;
    let path =
        if !where_.is_empty() {
            let mut path = String::new();
            write_path(&mut path, where_);
            Some(path)
        } else { None };
    ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                path,
                kind: PointerAsInt { expected },
            }))
}err_validation_failure!(self.path, PointerAsInt { expected })
1221                            }
1222                        }
1223
1224                        // Propagate upwards (that will also check for unexpected errors).
1225                        err => err,
1226                    }
1227                })?;
1228
1229                // Don't forget that these are all non-pointer types, and thus do not preserve
1230                // provenance.
1231                if self.reset_provenance_and_padding {
1232                    // We can't share this with above as above, we might be looking at read-only memory.
1233                    let mut alloc = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)?.expect("we already excluded size 0");
1234                    alloc.clear_provenance();
1235                    // Also, mark this as containing data, not padding.
1236                    self.add_data_range(mplace.ptr(), size);
1237                }
1238            }
1239            // Fast path for arrays and slices of ZSTs. We only need to check a single ZST element
1240            // of an array and not all of them, because there's only a single value of a specific
1241            // ZST type, so either validation fails for all elements or none.
1242            ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(*tys)?.is_zst() => {
1243                // Validate just the first element (if any).
1244                if val.len(self.ecx)? > 0 {
1245                    self.visit_field(val, 0, &self.ecx.project_index(val, 0)?)?;
1246                }
1247            }
1248            ty::Pat(base, pat) => {
1249                // First check that the base type is valid
1250                self.visit_value(&val.transmute(self.ecx.layout_of(*base)?, self.ecx)?)?;
1251                // When you extend this match, make sure to also add tests to
1252                // tests/ui/type/pattern_types/validity.rs((
1253                match **pat {
1254                    // Range and non-null patterns are precisely reflected into `valid_range` and thus
1255                    // handled fully by `visit_scalar` (called below).
1256                    ty::PatternKind::Range { .. } => {},
1257                    ty::PatternKind::NotNull => {},
1258
1259                    // FIXME(pattern_types): check that the value is covered by one of the variants.
1260                    // For now, we rely on layout computation setting the scalar's `valid_range` to
1261                    // match the pattern. However, this cannot always work; the layout may
1262                    // pessimistically cover actually illegal ranges and Miri would miss that UB.
1263                    // The consolation here is that codegen also will miss that UB, so at least
1264                    // we won't see optimizations actually breaking such programs.
1265                    ty::PatternKind::Or(_patterns) => {}
1266                }
1267            }
1268            _ => {
1269                // default handler
1270                {
    self.walk_value(val).map_err_kind(|e|
                {
                    match e {
                        Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type
                            }) => {
                            {
                                let where_ = &self.path;
                                let path =
                                    if !where_.is_empty() {
                                        let mut path = String::new();
                                        write_path(&mut path, where_);
                                        Some(path)
                                    } else { None };
                                ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                                            path,
                                            kind: {
                                                InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
                                            },
                                        }))
                            }
                        }
                        e => e,
                    }
                })?
};try_validation!(
1271                    self.walk_value(val),
1272                    self.path,
1273                    // It's not great to catch errors here, since we can't give a very good path,
1274                    // but it's better than ICEing.
1275                    Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) => {
1276                        InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
1277                    },
1278                );
1279            }
1280        }
1281
1282        // *After* all of this, check further information stored in the layout. We need to check
1283        // this to handle types like `NonNull` where the `Scalar` info is more restrictive than what
1284        // the fields say (`rustc_layout_scalar_valid_range_start`). But in most cases, this will
1285        // just propagate what the fields say, and then we want the error to point at the field --
1286        // so, we first recurse, then we do this check.
1287        //
1288        // FIXME: We could avoid some redundant checks here. For newtypes wrapping
1289        // scalars, we do the same check on every "level" (e.g., first we check
1290        // MyNewtype and then the scalar in there).
1291        if val.layout.is_uninhabited() {
1292            let ty = val.layout.ty;
1293            do yeet {
        let where_ = &self.path;
        let path =
            if !where_.is_empty() {
                let mut path = String::new();
                write_path(&mut path, where_);
                Some(path)
            } else { None };
        ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
                    path,
                    kind: UninhabitedVal { ty },
                }))
    };throw_validation_failure!(self.path, UninhabitedVal { ty });
1294        }
1295        match val.layout.backend_repr {
1296            BackendRepr::Scalar(scalar_layout) => {
1297                if !scalar_layout.is_uninit_valid() {
1298                    // There is something to check here.
1299                    let scalar = self.read_scalar(val, ExpectedKind::InitScalar)?;
1300                    self.visit_scalar(scalar, scalar_layout)?;
1301                }
1302            }
1303            BackendRepr::ScalarPair(a_layout, b_layout) => {
1304                // We can only proceed if *both* scalars need to be initialized.
1305                // FIXME: find a way to also check ScalarPair when one side can be uninit but
1306                // the other must be init.
1307                if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
1308                    let (a, b) =
1309                        self.read_immediate(val, ExpectedKind::InitScalar)?.to_scalar_pair();
1310                    self.visit_scalar(a, a_layout)?;
1311                    self.visit_scalar(b, b_layout)?;
1312                }
1313            }
1314            BackendRepr::SimdVector { .. } | BackendRepr::ScalableVector { .. } => {
1315                // No checks here, we assume layout computation gets this right.
1316                // (This is harder to check since Miri does not represent these as `Immediate`. We
1317                // also cannot use field projections since this might be a newtype around a vector.)
1318            }
1319            BackendRepr::Memory { .. } => {
1320                // Nothing to do.
1321            }
1322        }
1323
1324        interp_ok(())
1325    }
1326}
1327
1328impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1329    fn validate_operand_internal(
1330        &mut self,
1331        val: &PlaceTy<'tcx, M::Provenance>,
1332        path: Vec<PathElem>,
1333        ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
1334        ctfe_mode: Option<CtfeValidationMode>,
1335        reset_provenance_and_padding: bool,
1336    ) -> InterpResult<'tcx> {
1337        {
    use ::tracing::__macro_support::Callsite as _;
    static __CALLSITE: ::tracing::callsite::DefaultCallsite =
        {
            static META: ::tracing::Metadata<'static> =
                {
                    ::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1337",
                        "rustc_const_eval::interpret::validity",
                        ::tracing::Level::TRACE,
                        ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
                        ::tracing_core::__macro_support::Option::Some(1337u32),
                        ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
                        ::tracing_core::field::FieldSet::new(&["message"],
                            ::tracing_core::callsite::Identifier(&__CALLSITE)),
                        ::tracing::metadata::Kind::EVENT)
                };
            ::tracing::callsite::DefaultCallsite::new(&META)
        };
    let enabled =
        ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
                &&
                ::tracing::Level::TRACE <=
                    ::tracing::level_filters::LevelFilter::current() &&
            {
                let interest = __CALLSITE.interest();
                !interest.is_never() &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest)
            };
    if enabled {
        (|value_set: ::tracing::field::ValueSet|
                    {
                        let meta = __CALLSITE.metadata();
                        ::tracing::Event::dispatch(meta, &value_set);
                        ;
                    })({
                #[allow(unused_imports)]
                use ::tracing::field::{debug, display, Value};
                let mut iter = __CALLSITE.metadata().fields().iter();
                __CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                    ::tracing::__macro_support::Option::Some(&format_args!("validate_operand_internal: {0:?}, {1:?}",
                                                    *val, val.layout.ty) as &dyn Value))])
            });
    } else { ; }
};trace!("validate_operand_internal: {:?}, {:?}", *val, val.layout.ty);
1338
1339        // Run the visitor.
1340        self.run_for_validation_mut(|ecx| {
1341            let reset_padding = reset_provenance_and_padding && {
1342                // Check if `val` is actually stored in memory. If not, padding is not even
1343                // represented and we need not reset it.
1344                ecx.place_to_op(val)?.as_mplace_or_imm().is_left()
1345            };
1346            let mut v = ValidityVisitor {
1347                path,
1348                ref_tracking,
1349                ctfe_mode,
1350                ecx,
1351                reset_provenance_and_padding,
1352                data_bytes: reset_padding.then_some(RangeSet(Vec::new())),
1353            };
1354            v.visit_value(val)?;
1355            v.reset_padding(val)?;
1356            interp_ok(())
1357        })
1358        .map_err_info(|err| {
1359            if !#[allow(non_exhaustive_omitted_patterns)] match err.kind() {
    ::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
        .. }) | InterpErrorKind::InvalidProgram(_) |
        InterpErrorKind::Unsupported(UnsupportedOpInfo::ExternTypeField) =>
        true,
    _ => false,
}matches!(
1360                err.kind(),
1361                err_ub!(ValidationError { .. })
1362                    | InterpErrorKind::InvalidProgram(_)
1363                    | InterpErrorKind::Unsupported(UnsupportedOpInfo::ExternTypeField)
1364            ) {
1365                ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected error during validation: {0}",
        format_interp_error(self.tcx.dcx(), err)));bug!(
1366                    "Unexpected error during validation: {}",
1367                    format_interp_error(self.tcx.dcx(), err)
1368                );
1369            }
1370            err
1371        })
1372    }
1373
1374    /// This function checks the data at `val` to be const-valid.
1375    /// `val` is assumed to cover valid memory if it is an indirect operand.
1376    /// It will error if the bits at the destination do not match the ones described by the layout.
1377    ///
1378    /// `ref_tracking` is used to record references that we encounter so that they
1379    /// can be checked recursively by an outside driving loop.
1380    ///
1381    /// `constant` controls whether this must satisfy the rules for constants:
1382    /// - no pointers to statics.
1383    /// - no `UnsafeCell` or non-ZST `&mut`.
1384    #[inline(always)]
1385    pub(crate) fn const_validate_operand(
1386        &mut self,
1387        val: &PlaceTy<'tcx, M::Provenance>,
1388        path: Vec<PathElem>,
1389        ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>,
1390        ctfe_mode: CtfeValidationMode,
1391    ) -> InterpResult<'tcx> {
1392        self.validate_operand_internal(
1393            val,
1394            path,
1395            Some(ref_tracking),
1396            Some(ctfe_mode),
1397            /*reset_provenance*/ false,
1398        )
1399    }
1400
1401    /// This function checks the data at `val` to be runtime-valid.
1402    /// `val` is assumed to cover valid memory if it is an indirect operand.
1403    /// It will error if the bits at the destination do not match the ones described by the layout.
1404    #[inline(always)]
1405    pub fn validate_operand(
1406        &mut self,
1407        val: &PlaceTy<'tcx, M::Provenance>,
1408        recursive: bool,
1409        reset_provenance_and_padding: bool,
1410    ) -> InterpResult<'tcx> {
1411        let _trace = <M as
        crate::interpret::Machine>::enter_trace_span(||
        {
            use ::tracing::__macro_support::Callsite as _;
            static __CALLSITE: ::tracing::callsite::DefaultCallsite =
                {
                    static META: ::tracing::Metadata<'static> =
                        {
                            ::tracing_core::metadata::Metadata::new("validate_operand",
                                "rustc_const_eval::interpret::validity",
                                ::tracing::Level::INFO,
                                ::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
                                ::tracing_core::__macro_support::Option::Some(1411u32),
                                ::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
                                ::tracing_core::field::FieldSet::new(&["recursive",
                                                "reset_provenance_and_padding", "val"],
                                    ::tracing_core::callsite::Identifier(&__CALLSITE)),
                                ::tracing::metadata::Kind::SPAN)
                        };
                    ::tracing::callsite::DefaultCallsite::new(&META)
                };
            let mut interest = ::tracing::subscriber::Interest::never();
            if ::tracing::Level::INFO <=
                                ::tracing::level_filters::STATIC_MAX_LEVEL &&
                            ::tracing::Level::INFO <=
                                ::tracing::level_filters::LevelFilter::current() &&
                        { interest = __CALLSITE.interest(); !interest.is_never() }
                    &&
                    ::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
                        interest) {
                let meta = __CALLSITE.metadata();
                ::tracing::Span::new(meta,
                    &{
                            #[allow(unused_imports)]
                            use ::tracing::field::{debug, display, Value};
                            let mut iter = meta.fields().iter();
                            meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                ::tracing::__macro_support::Option::Some(&recursive as
                                                        &dyn Value)),
                                            (&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                ::tracing::__macro_support::Option::Some(&reset_provenance_and_padding
                                                        as &dyn Value)),
                                            (&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
                                                ::tracing::__macro_support::Option::Some(&debug(&val) as
                                                        &dyn Value))])
                        })
            } else {
                let span =
                    ::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
                {};
                span
            }
        })enter_trace_span!(
1412            M,
1413            "validate_operand",
1414            recursive,
1415            reset_provenance_and_padding,
1416            ?val,
1417        );
1418
1419        // Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's
1420        // still correct to not use `ctfe_mode`: that mode is for validation of the final constant
1421        // value, it rules out things like `UnsafeCell` in awkward places.
1422        if !recursive {
1423            return self.validate_operand_internal(
1424                val,
1425                ::alloc::vec::Vec::new()vec![],
1426                None,
1427                None,
1428                reset_provenance_and_padding,
1429            );
1430        }
1431        // Do a recursive check.
1432        let mut ref_tracking = RefTracking::empty();
1433        self.validate_operand_internal(
1434            val,
1435            ::alloc::vec::Vec::new()vec![],
1436            Some(&mut ref_tracking),
1437            None,
1438            reset_provenance_and_padding,
1439        )?;
1440        while let Some((mplace, path)) = ref_tracking.todo.pop() {
1441            // Things behind reference do *not* have the provenance reset.
1442            self.validate_operand_internal(
1443                &mplace.into(),
1444                path,
1445                Some(&mut ref_tracking),
1446                None,
1447                /*reset_provenance_and_padding*/ false,
1448            )?;
1449        }
1450        interp_ok(())
1451    }
1452}