1//! Check the validity invariant of a given value, and tell the user
2//! where in the value it got violated.
3//! In const context, this goes even further and tries to approximate const safety.
4//! That's useful because it means other passes (e.g. promotion) can rely on `const`s
5//! to be const-safe.
67use std::borrow::Cow;
8use std::fmt::Write;
9use std::hash::Hash;
10use std::mem;
11use std::num::NonZero;
1213use either::{Left, Right};
14use hir::def::DefKind;
15use rustc_abi::{
16BackendRepr, FieldIdx, FieldsShape, Scalaras ScalarAbi, Size, VariantIdx, Variants,
17WrappingRange,
18};
19use rustc_ast::Mutability;
20use rustc_data_structures::fx::FxHashSet;
21use rustc_hiras hir;
22use rustc_middle::bug;
23use rustc_middle::mir::interpret::ValidationErrorKind::{self, *};
24use rustc_middle::mir::interpret::{
25ExpectedKind, InterpErrorKind, InvalidMetaKind, Misalignment, PointerKind, Provenance,
26UnsupportedOpInfo, ValidationErrorInfo, alloc_range, interp_ok,
27};
28use rustc_middle::ty::layout::{LayoutCx, TyAndLayout};
29use rustc_middle::ty::{self, Ty};
30use rustc_span::{Symbol, sym};
31use tracing::trace;
3233use super::machine::AllocMap;
34use super::{
35AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
36Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
37format_interp_error,
38};
39use crate::enter_trace_span;
4041// for the validation errors
42#[rustfmt::skip]
43use super::InterpErrorKind::UndefinedBehavioras Ub;
44use super::InterpErrorKind::Unsupportedas Unsup;
45use super::UndefinedBehaviorInfo::*;
46use super::UnsupportedOpInfo::*;
4748macro_rules!err_validation_failure {
49 ($where:expr, $kind: expr) => {{
50let where_ = &$where;
51let path = if !where_.is_empty() {
52let mut path = String::new();
53 write_path(&mut path, where_);
54Some(path)
55 } else {
56None
57};
5859err_ub!(ValidationError(ValidationErrorInfo { path, kind: $kind }))
60 }};
61}
6263macro_rules!throw_validation_failure {
64 ($where:expr, $kind: expr) => {
65do yeet err_validation_failure!($where, $kind)
66 };
67}
6869/// If $e throws an error matching the pattern, throw a validation failure.
70/// Other errors are passed back to the caller, unchanged -- and if they reach the root of
71/// the visitor, we make sure only validation errors and `InvalidProgram` errors are left.
72/// This lets you use the patterns as a kind of validation list, asserting which errors
73/// can possibly happen:
74///
75/// ```ignore(illustrative)
76/// let v = try_validation!(some_fn(), some_path, {
77/// Foo | Bar | Baz => { "some failure" },
78/// });
79/// ```
80///
81/// The patterns must be of type `UndefinedBehaviorInfo`.
82/// An additional expected parameter can also be added to the failure message:
83///
84/// ```ignore(illustrative)
85/// let v = try_validation!(some_fn(), some_path, {
86/// Foo | Bar | Baz => { "some failure" } expected { "something that wasn't a failure" },
87/// });
88/// ```
89///
90/// An additional nicety is that both parameters actually take format args, so you can just write
91/// the format string in directly:
92///
93/// ```ignore(illustrative)
94/// let v = try_validation!(some_fn(), some_path, {
95/// Foo | Bar | Baz => { "{:?}", some_failure } expected { "{}", expected_value },
96/// });
97/// ```
98///
99macro_rules!try_validation {
100 ($e:expr, $where:expr,
101 $( $( $p:pat_param )|+ => $kind: expr ),+ $(,)?
102) => {{
103$e.map_err_kind(|e| {
104// We catch the error and turn it into a validation failure. We are okay with
105 // allocation here as this can only slow down builds that fail anyway.
106match e {
107 $(
108 $($p)|+ => {
109err_validation_failure!(
110$where,
111$kind
112)
113 }
114 ),+,
115 e => e,
116 }
117 })?
118}};
119}
120121/// We want to show a nice path to the invalid field for diagnostics,
122/// but avoid string operations in the happy case where no error happens.
123/// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
124/// need to later print something for the user.
125#[derive(#[automatically_derived]
impl ::core::marker::Copy for PathElem { }Copy, #[automatically_derived]
impl ::core::clone::Clone for PathElem {
#[inline]
fn clone(&self) -> PathElem {
let _: ::core::clone::AssertParamIsClone<Symbol>;
let _: ::core::clone::AssertParamIsClone<VariantIdx>;
let _: ::core::clone::AssertParamIsClone<usize>;
*self
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for PathElem {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
PathElem::Field(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Field",
&__self_0),
PathElem::Variant(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"Variant", &__self_0),
PathElem::CoroutineState(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CoroutineState", &__self_0),
PathElem::CapturedVar(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CapturedVar", &__self_0),
PathElem::ArrayElem(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"ArrayElem", &__self_0),
PathElem::TupleElem(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"TupleElem", &__self_0),
PathElem::Deref => ::core::fmt::Formatter::write_str(f, "Deref"),
PathElem::EnumTag =>
::core::fmt::Formatter::write_str(f, "EnumTag"),
PathElem::CoroutineTag =>
::core::fmt::Formatter::write_str(f, "CoroutineTag"),
PathElem::DynDowncast =>
::core::fmt::Formatter::write_str(f, "DynDowncast"),
PathElem::Vtable =>
::core::fmt::Formatter::write_str(f, "Vtable"),
}
}
}Debug)]
126pub enum PathElem {
127 Field(Symbol),
128 Variant(Symbol),
129 CoroutineState(VariantIdx),
130 CapturedVar(Symbol),
131 ArrayElem(usize),
132 TupleElem(usize),
133 Deref,
134 EnumTag,
135 CoroutineTag,
136 DynDowncast,
137 Vtable,
138}
139140/// Extra things to check for during validation of CTFE results.
141#[derive(#[automatically_derived]
impl ::core::marker::Copy for CtfeValidationMode { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CtfeValidationMode {
#[inline]
fn clone(&self) -> CtfeValidationMode {
let _: ::core::clone::AssertParamIsClone<Mutability>;
let _: ::core::clone::AssertParamIsClone<bool>;
*self
}
}Clone)]
142pub enum CtfeValidationMode {
143/// Validation of a `static`
144Static { mutbl: Mutability },
145/// Validation of a promoted.
146Promoted,
147/// Validation of a `const`.
148 /// `allow_immutable_unsafe_cell` says whether we allow `UnsafeCell` in immutable memory (which is the
149 /// case for the top-level allocation of a `const`, where this is fine because the allocation will be
150 /// copied at each use site).
151Const { allow_immutable_unsafe_cell: bool },
152}
153154impl CtfeValidationMode {
155fn allow_immutable_unsafe_cell(self) -> bool {
156match self {
157 CtfeValidationMode::Static { .. } => false,
158 CtfeValidationMode::Promoted { .. } => false,
159 CtfeValidationMode::Const { allow_immutable_unsafe_cell, .. } => {
160allow_immutable_unsafe_cell161 }
162 }
163 }
164}
165166/// State for tracking recursive validation of references
167pub struct RefTracking<T, PATH = ()> {
168 seen: FxHashSet<T>,
169 todo: Vec<(T, PATH)>,
170}
171172impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
173pub fn empty() -> Self {
174RefTracking { seen: FxHashSet::default(), todo: ::alloc::vec::Vec::new()vec![] }
175 }
176pub fn new(val: T) -> Self {
177let mut ref_tracking_for_consts =
178RefTracking { seen: FxHashSet::default(), todo: ::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[(val.clone(), PATH::default())]))vec![(val.clone(), PATH::default())] };
179ref_tracking_for_consts.seen.insert(val);
180ref_tracking_for_consts181 }
182pub fn next(&mut self) -> Option<(T, PATH)> {
183self.todo.pop()
184 }
185186fn track(&mut self, val: T, path: impl FnOnce() -> PATH) {
187if self.seen.insert(val.clone()) {
188{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:188",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(188u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Recursing below ptr {0:#?}",
val) as &dyn Value))])
});
} else { ; }
};trace!("Recursing below ptr {:#?}", val);
189let path = path();
190// Remember to come back to this later.
191self.todo.push((val, path));
192 }
193 }
194}
195196// FIXME make this translatable as well?
197/// Format a path
198fn write_path(out: &mut String, path: &[PathElem]) {
199use self::PathElem::*;
200201for elem in path.iter() {
202match elem {
203 Field(name) => out.write_fmt(format_args!(".{0}", name))write!(out, ".{name}"),
204 EnumTag => out.write_fmt(format_args!(".<enum-tag>"))write!(out, ".<enum-tag>"),
205 Variant(name) => out.write_fmt(format_args!(".<enum-variant({0})>", name))write!(out, ".<enum-variant({name})>"),
206 CoroutineTag => out.write_fmt(format_args!(".<coroutine-tag>"))write!(out, ".<coroutine-tag>"),
207 CoroutineState(idx) => out.write_fmt(format_args!(".<coroutine-state({0})>", idx.index()))write!(out, ".<coroutine-state({})>", idx.index()),
208 CapturedVar(name) => out.write_fmt(format_args!(".<captured-var({0})>", name))write!(out, ".<captured-var({name})>"),
209 TupleElem(idx) => out.write_fmt(format_args!(".{0}", idx))write!(out, ".{idx}"),
210 ArrayElem(idx) => out.write_fmt(format_args!("[{0}]", idx))write!(out, "[{idx}]"),
211// `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
212 // some of the other items here also are not Rust syntax. Actually we can't
213 // even use the usual syntax because we are just showing the projections,
214 // not the root.
215 Deref => out.write_fmt(format_args!(".<deref>"))write!(out, ".<deref>"),
216 DynDowncast => out.write_fmt(format_args!(".<dyn-downcast>"))write!(out, ".<dyn-downcast>"),
217 Vtable => out.write_fmt(format_args!(".<vtable>"))write!(out, ".<vtable>"),
218 }
219 .unwrap()
220 }
221}
222223/// Represents a set of `Size` values as a sorted list of ranges.
224// These are (offset, length) pairs, and they are sorted and mutually disjoint,
225// and never adjacent (i.e. there's always a gap between two of them).
226#[derive(#[automatically_derived]
impl ::core::fmt::Debug for RangeSet {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "RangeSet",
&&self.0)
}
}Debug, #[automatically_derived]
impl ::core::clone::Clone for RangeSet {
#[inline]
fn clone(&self) -> RangeSet {
RangeSet(::core::clone::Clone::clone(&self.0))
}
}Clone)]
227pub struct RangeSet(Vec<(Size, Size)>);
228229impl RangeSet {
230fn add_range(&mut self, offset: Size, size: Size) {
231if size.bytes() == 0 {
232// No need to track empty ranges.
233return;
234 }
235let v = &mut self.0;
236// We scan for a partition point where the left partition is all the elements that end
237 // strictly before we start. Those are elements that are too "low" to merge with us.
238let idx =
239v.partition_point(|&(other_offset, other_size)| other_offset + other_size < offset);
240// Now we want to either merge with the first element of the second partition, or insert ourselves before that.
241if let Some(&(other_offset, other_size)) = v.get(idx)
242 && offset + size >= other_offset243 {
244// Their end is >= our start (otherwise it would not be in the 2nd partition) and
245 // our end is >= their start. This means we can merge the ranges.
246let new_start = other_offset.min(offset);
247let mut new_end = (other_offset + other_size).max(offset + size);
248// We grew to the right, so merge with overlapping/adjacent elements.
249 // (We also may have grown to the left, but that can never make us adjacent with
250 // anything there since we selected the first such candidate via `partition_point`.)
251let mut scan_right = 1;
252while let Some(&(next_offset, next_size)) = v.get(idx + scan_right)
253 && new_end >= next_offset
254 {
255// Increase our size to absorb the next element.
256new_end = new_end.max(next_offset + next_size);
257// Look at the next element.
258scan_right += 1;
259 }
260// Update the element we grew.
261v[idx] = (new_start, new_end - new_start);
262// Remove the elements we absorbed (if any).
263if scan_right > 1 {
264drop(v.drain((idx + 1)..(idx + scan_right)));
265 }
266 } else {
267// Insert new element.
268v.insert(idx, (offset, size));
269 }
270 }
271}
272273struct ValidityVisitor<'rt, 'tcx, M: Machine<'tcx>> {
274/// The `path` may be pushed to, but the part that is present when a function
275 /// starts must not be changed! `visit_fields` and `visit_array` rely on
276 /// this stack discipline.
277path: Vec<PathElem>,
278 ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
279/// `None` indicates this is not validating for CTFE (but for runtime).
280ctfe_mode: Option<CtfeValidationMode>,
281 ecx: &'rt mut InterpCx<'tcx, M>,
282/// Whether provenance should be reset outside of pointers (emulating the effect of a typed
283 /// copy).
284reset_provenance_and_padding: bool,
285/// This tracks which byte ranges in this value contain data; the remaining bytes are padding.
286 /// The ideal representation here would be pointer-length pairs, but to keep things more compact
287 /// we only store a (range) set of offsets -- the base pointer is the same throughout the entire
288 /// visit, after all.
289 /// If this is `Some`, then `reset_provenance_and_padding` must be true (but not vice versa:
290 /// we might not track data vs padding bytes if the operand isn't stored in memory anyway).
291data_bytes: Option<RangeSet>,
292/// True if we are inside of `MaybeDangling`. This disables pointer access checks.
293may_dangle: bool,
294}
295296impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
297fn aggregate_field_path_elem(&mut self, layout: TyAndLayout<'tcx>, field: usize) -> PathElem {
298// First, check if we are projecting to a variant.
299match layout.variants {
300 Variants::Multiple { tag_field, .. } => {
301if tag_field.as_usize() == field {
302return match layout.ty.kind() {
303 ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,
304 ty::Coroutine(..) => PathElem::CoroutineTag,
305_ => ::rustc_middle::util::bug::bug_fmt(format_args!("non-variant type {0:?}",
layout.ty))bug!("non-variant type {:?}", layout.ty),
306 };
307 }
308 }
309 Variants::Single { .. } | Variants::Empty => {}
310 }
311312// Now we know we are projecting to a field, so figure out which one.
313match layout.ty.kind() {
314// coroutines, closures, and coroutine-closures all have upvars that may be named.
315ty::Closure(def_id, _) | ty::Coroutine(def_id, _) | ty::CoroutineClosure(def_id, _) => {
316let mut name = None;
317// FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar
318 // https://github.com/rust-lang/project-rfc-2229/issues/46
319if let Some(local_def_id) = def_id.as_local() {
320let captures = self.ecx.tcx.closure_captures(local_def_id);
321if let Some(captured_place) = captures.get(field) {
322// Sometimes the index is beyond the number of upvars (seen
323 // for a coroutine).
324let var_hir_id = captured_place.get_root_variable();
325let node = self.ecx.tcx.hir_node(var_hir_id);
326if let hir::Node::Pat(pat) = node327 && let hir::PatKind::Binding(_, _, ident, _) = pat.kind
328 {
329name = Some(ident.name);
330 }
331 }
332 }
333334 PathElem::CapturedVar(name.unwrap_or_else(|| {
335// Fall back to showing the field index.
336sym::integer(field)
337 }))
338 }
339340// tuples
341ty::Tuple(_) => PathElem::TupleElem(field),
342343// enums
344ty::Adt(def, ..) if def.is_enum() => {
345// we might be projecting *to* a variant, or to a field *in* a variant.
346match layout.variants {
347 Variants::Single { index } => {
348// Inside a variant
349PathElem::Field(def.variant(index).fields[FieldIdx::from_usize(field)].name)
350 }
351 Variants::Empty => {
::core::panicking::panic_fmt(format_args!("there is no field in Variants::Empty types"));
}panic!("there is no field in Variants::Empty types"),
352 Variants::Multiple { .. } => ::rustc_middle::util::bug::bug_fmt(format_args!("we handled variants above"))bug!("we handled variants above"),
353 }
354 }
355356// other ADTs
357ty::Adt(def, _) => {
358 PathElem::Field(def.non_enum_variant().fields[FieldIdx::from_usize(field)].name)
359 }
360361// arrays/slices
362ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
363364// dyn traits
365ty::Dynamic(..) => {
366match (&field, &0) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(field, 0);
367 PathElem::DynDowncast368 }
369370// nothing else has an aggregate layout
371_ => ::rustc_middle::util::bug::bug_fmt(format_args!("aggregate_field_path_elem: got non-aggregate type {0:?}",
layout.ty))bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
372 }
373 }
374375fn with_elem<R>(
376&mut self,
377 elem: PathElem,
378 f: impl FnOnce(&mut Self) -> InterpResult<'tcx, R>,
379 ) -> InterpResult<'tcx, R> {
380// Remember the old state
381let path_len = self.path.len();
382// Record new element
383self.path.push(elem);
384// Perform operation
385let r = f(self)?;
386// Undo changes
387self.path.truncate(path_len);
388// Done
389interp_ok(r)
390 }
391392fn read_immediate(
393&self,
394 val: &PlaceTy<'tcx, M::Provenance>,
395 expected: ExpectedKind,
396 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
397interp_ok({
self.ecx.read_immediate(val).map_err_kind(|e|
{
match e {
Ub(InvalidUninitBytes(_)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: Uninit { expected },
}))
}
}
Unsup(ReadPointerAsInt(_)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PointerAsInt { expected },
}))
}
}
Unsup(ReadPartialPointer(_)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PartialPointer,
}))
}
}
e => e,
}
})?
}try_validation!(
398self.ecx.read_immediate(val),
399self.path,
400 Ub(InvalidUninitBytes(_)) =>
401 Uninit { expected },
402// The `Unsup` cases can only occur during CTFE
403Unsup(ReadPointerAsInt(_)) =>
404 PointerAsInt { expected },
405 Unsup(ReadPartialPointer(_)) =>
406 PartialPointer,
407 ))
408 }
409410fn read_scalar(
411&self,
412 val: &PlaceTy<'tcx, M::Provenance>,
413 expected: ExpectedKind,
414 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
415interp_ok(self.read_immediate(val, expected)?.to_scalar())
416 }
417418fn deref_pointer(
419&mut self,
420 val: &PlaceTy<'tcx, M::Provenance>,
421 expected: ExpectedKind,
422 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
423// Not using `ecx.deref_pointer` since we want to use our `read_immediate` wrapper.
424let imm = self.read_immediate(val, expected)?;
425// Reset provenance: ensure slice tail metadata does not preserve provenance,
426 // and ensure all pointers do not preserve partial provenance.
427if self.reset_provenance_and_padding {
428if #[allow(non_exhaustive_omitted_patterns)] match imm.layout.backend_repr {
BackendRepr::Scalar(..) => true,
_ => false,
}matches!(imm.layout.backend_repr, BackendRepr::Scalar(..)) {
429// A thin pointer. If it has provenance, we don't have to do anything.
430 // If it does not, ensure we clear the provenance in memory.
431if #[allow(non_exhaustive_omitted_patterns)] match imm.to_scalar() {
Scalar::Int(..) => true,
_ => false,
}matches!(imm.to_scalar(), Scalar::Int(..)) {
432self.ecx.clear_provenance(val)?;
433 }
434 } else {
435// A wide pointer. This means we have to worry both about the pointer itself and the
436 // metadata. We do the lazy thing and just write back the value we got. Just
437 // clearing provenance in a targeted manner would be more efficient, but unless this
438 // is a perf hotspot it's just not worth the effort.
439self.ecx.write_immediate_no_validate(*imm, val)?;
440 }
441// The entire thing is data, not padding.
442self.add_data_range_place(val);
443 }
444// Now turn it into a place.
445self.ecx.ref_to_mplace(&imm)
446 }
447448fn check_wide_ptr_meta(
449&mut self,
450 meta: MemPlaceMeta<M::Provenance>,
451 pointee: TyAndLayout<'tcx>,
452 ) -> InterpResult<'tcx> {
453let tail = self.ecx.tcx.struct_tail_for_codegen(pointee.ty, self.ecx.typing_env);
454match tail.kind() {
455 ty::Dynamic(data, _) => {
456let vtable = meta.unwrap_meta().to_pointer(self.ecx)?;
457// Make sure it is a genuine vtable pointer for the right trait.
458{
self.ecx.get_ptr_vtable_ty(vtable,
Some(data)).map_err_kind(|e|
{
match e {
Ub(DanglingIntPointer { .. } | InvalidVTablePointer(..)) =>
{
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: InvalidVTablePtr {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}", vtable))
}),
},
}))
}
}
Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type
}) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: {
InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
},
}))
}
}
e => e,
}
})?
};try_validation!(
459self.ecx.get_ptr_vtable_ty(vtable, Some(data)),
460self.path,
461 Ub(DanglingIntPointer{ .. } | InvalidVTablePointer(..)) =>
462 InvalidVTablePtr { value: format!("{vtable}") },
463 Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) => {
464 InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
465 },
466 );
467 }
468 ty::Slice(..) | ty::Str => {
469let _len = meta.unwrap_meta().to_target_usize(self.ecx)?;
470// We do not check that `len * elem_size <= isize::MAX`:
471 // that is only required for references, and there it falls out of the
472 // "dereferenceable" check performed by Stacked Borrows.
473}
474 ty::Foreign(..) => {
475// Unsized, but not wide.
476}
477_ => ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected unsized type tail: {0:?}",
tail))bug!("Unexpected unsized type tail: {:?}", tail),
478 }
479480interp_ok(())
481 }
482483/// Check a reference or `Box`.
484fn check_safe_pointer(
485&mut self,
486 value: &PlaceTy<'tcx, M::Provenance>,
487 ptr_kind: PointerKind,
488 ) -> InterpResult<'tcx> {
489let place = self.deref_pointer(value, ptr_kind.into())?;
490// Handle wide pointers.
491 // Check metadata early, for better diagnostics
492if place.layout.is_unsized() {
493self.check_wide_ptr_meta(place.meta(), place.layout)?;
494 }
495496// Determine size and alignment of pointee.
497let size_and_align = {
self.ecx.size_and_align_of_val(&place).map_err_kind(|e|
{
match e {
Ub(InvalidMeta(msg)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: match msg {
InvalidMetaKind::SliceTooBig =>
InvalidMetaSliceTooLarge { ptr_kind },
InvalidMetaKind::TooBig => InvalidMetaTooLarge { ptr_kind },
},
}))
}
}
e => e,
}
})?
}try_validation!(
498self.ecx.size_and_align_of_val(&place),
499self.path,
500 Ub(InvalidMeta(msg)) => match msg {
501 InvalidMetaKind::SliceTooBig => InvalidMetaSliceTooLarge { ptr_kind },
502 InvalidMetaKind::TooBig => InvalidMetaTooLarge { ptr_kind },
503 }
504 );
505let (size, align) = size_and_align506// for the purpose of validity, consider foreign types to have
507 // alignment and size determined by the layout (size will be 0,
508 // alignment should take attributes into account).
509.unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
510511if !self.may_dangle {
512// Make sure this is dereferenceable and all.
513514 // Direct call to `check_ptr_access_align` checks alignment even on CTFE machines.
515 // Call `check_ptr_access` to avoid checking alignment here.
516{
self.ecx.check_ptr_access(place.ptr(), size,
CheckInAllocMsg::Dereferenceable).map_err_kind(|e|
{
match e {
Ub(DanglingIntPointer { addr: 0, .. }) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: NullPtr { ptr_kind, maybe: false },
}))
}
}
Ub(DanglingIntPointer { addr: i, .. }) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: DanglingPtrNoProvenance {
ptr_kind,
pointer: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}",
Pointer::<Option<AllocId>>::without_provenance(i)))
}),
},
}))
}
}
Ub(PointerOutOfBounds { .. }) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: DanglingPtrOutOfBounds { ptr_kind },
}))
}
}
Ub(PointerUseAfterFree(..)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: DanglingPtrUseAfterFree { ptr_kind },
}))
}
}
e => e,
}
})?
};try_validation!(
517self.ecx.check_ptr_access(
518 place.ptr(),
519 size,
520 CheckInAllocMsg::Dereferenceable, // will anyway be replaced by validity message
521),
522self.path,
523 Ub(DanglingIntPointer { addr: 0, .. }) => NullPtr { ptr_kind, maybe: false },
524 Ub(DanglingIntPointer { addr: i, .. }) => DanglingPtrNoProvenance {
525 ptr_kind,
526 pointer: format!("{}", Pointer::<Option<AllocId>>::without_provenance(i))
527 },
528 Ub(PointerOutOfBounds { .. }) => DanglingPtrOutOfBounds {
529 ptr_kind
530 },
531 Ub(PointerUseAfterFree(..)) => DanglingPtrUseAfterFree {
532 ptr_kind,
533 },
534 );
535 }
536537{
self.ecx.check_ptr_align(place.ptr(),
align).map_err_kind(|e|
{
match e {
Ub(AlignmentCheckFailed(Misalignment { required, has },
_msg)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: UnalignedPtr {
ptr_kind,
required_bytes: required.bytes(),
found_bytes: has.bytes(),
},
}))
}
}
e => e,
}
})?
};try_validation!(
538self.ecx.check_ptr_align(
539 place.ptr(),
540 align,
541 ),
542self.path,
543 Ub(AlignmentCheckFailed(Misalignment { required, has }, _msg)) => UnalignedPtr {
544 ptr_kind,
545 required_bytes: required.bytes(),
546 found_bytes: has.bytes()
547 },
548 );
549550// Make sure this is non-null. This is obviously needed when `may_dangle` is set,
551 // but even if we did check dereferenceability above that would still allow null
552 // pointers if `size` is zero.
553let scalar = Scalar::from_maybe_pointer(place.ptr(), self.ecx);
554if self.ecx.scalar_may_be_null(scalar)? {
555let maybe = !M::Provenance::OFFSET_IS_ADDR && #[allow(non_exhaustive_omitted_patterns)] match scalar {
Scalar::Ptr(..) => true,
_ => false,
}matches!(scalar, Scalar::Ptr(..));
556do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: NullPtr { ptr_kind, maybe },
}))
}throw_validation_failure!(self.path, NullPtr { ptr_kind, maybe })557 }
558// Do not allow references to uninhabited types.
559if place.layout.is_uninhabited() {
560let ty = place.layout.ty;
561do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PtrToUninhabited { ptr_kind, ty },
}))
}throw_validation_failure!(self.path, PtrToUninhabited { ptr_kind, ty })562 }
563// Recursive checking
564if let Some(ref_tracking) = self.ref_tracking.as_deref_mut() {
565// Proceed recursively even for ZST, no reason to skip them!
566 // `!` is a ZST and we want to validate it.
567if let Some(ctfe_mode) = self.ctfe_mode {
568let mut skip_recursive_check = false;
569// CTFE imposes restrictions on what references can point to.
570if let Ok((alloc_id, _offset, _prov)) =
571self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)
572 {
573// Everything should be already interned.
574let Some(global_alloc) = self.ecx.tcx.try_get_global_alloc(alloc_id) else {
575if self.ecx.memory.alloc_map.contains_key(&alloc_id) {
576// This can happen when interning didn't complete due to, e.g.
577 // missing `make_global`. This must mean other errors are already
578 // being reported.
579self.ecx.tcx.dcx().delayed_bug(
580"interning did not complete, there should be an error",
581 );
582return interp_ok(());
583 }
584// We can't have *any* references to non-existing allocations in const-eval
585 // as the rest of rustc isn't happy with them... so we throw an error, even
586 // though for zero-sized references this isn't really UB.
587 // A potential future alternative would be to resurrect this as a zero-sized allocation
588 // (which codegen will then compile to an aligned dummy pointer anyway).
589do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: DanglingPtrUseAfterFree { ptr_kind },
}))
};throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
590 };
591let (size, _align) =
592global_alloc.size_and_align(*self.ecx.tcx, self.ecx.typing_env);
593let alloc_actual_mutbl =
594global_alloc.mutability(*self.ecx.tcx, self.ecx.typing_env);
595596match global_alloc {
597 GlobalAlloc::Static(did) => {
598let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {
599::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!()600 };
601if !!self.ecx.tcx.is_thread_local_static(did) {
::core::panicking::panic("assertion failed: !self.ecx.tcx.is_thread_local_static(did)")
};assert!(!self.ecx.tcx.is_thread_local_static(did));
602if !self.ecx.tcx.is_static(did) {
::core::panicking::panic("assertion failed: self.ecx.tcx.is_static(did)")
};assert!(self.ecx.tcx.is_static(did));
603match ctfe_mode {
604 CtfeValidationMode::Static { .. }
605 | CtfeValidationMode::Promoted { .. } => {
606// We skip recursively checking other statics. These statics must be sound by
607 // themselves, and the only way to get broken statics here is by using
608 // unsafe code.
609 // The reasons we don't check other statics is twofold. For one, in all
610 // sound cases, the static was already validated on its own, and second, we
611 // trigger cycle errors if we try to compute the value of the other static
612 // and that static refers back to us (potentially through a promoted).
613 // This could miss some UB, but that's fine.
614 // We still walk nested allocations, as they are fundamentally part of this validation run.
615 // This means we will also recurse into nested statics of *other*
616 // statics, even though we do not recurse into other statics directly.
617 // That's somewhat inconsistent but harmless.
618skip_recursive_check = !nested;
619 }
620 CtfeValidationMode::Const { .. } => {
621// If this is mutable memory or an `extern static`, there's no point in checking it -- we'd
622 // just get errors trying to read the value.
623if alloc_actual_mutbl.is_mut()
624 || self.ecx.tcx.is_foreign_item(did)
625 {
626skip_recursive_check = true;
627 }
628 }
629 }
630 }
631_ => (),
632 }
633634// If this allocation has size zero, there is no actual mutability here.
635if size != Size::ZERO {
636// Determine whether this pointer expects to be pointing to something mutable.
637let ptr_expected_mutbl = match ptr_kind {
638 PointerKind::Box => Mutability::Mut,
639 PointerKind::Ref(mutbl) => {
640// We do not take into account interior mutability here since we cannot know if
641 // there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
642 // that in the recursive descent behind this reference (controlled by
643 // `allow_immutable_unsafe_cell`).
644mutbl645 }
646 };
647// Mutable pointer to immutable memory is no good.
648if ptr_expected_mutbl == Mutability::Mut649 && alloc_actual_mutbl == Mutability::Not650 {
651// This can actually occur with transmutes.
652do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: MutableRefToImmutable,
}))
};throw_validation_failure!(self.path, MutableRefToImmutable);
653 }
654 }
655 }
656// Potentially skip recursive check.
657if skip_recursive_check {
658return interp_ok(());
659 }
660 } else {
661// This is not CTFE, so it's Miri with recursive checking.
662 // FIXME: should we skip `UnsafeCell` behind shared references? Currently that is
663 // not needed since validation reads bypass Stacked Borrows and data race checks,
664 // but is that really coherent?
665}
666let path = &self.path;
667ref_tracking.track(place, || {
668// We need to clone the path anyway, make sure it gets created
669 // with enough space for the additional `Deref`.
670let mut new_path = Vec::with_capacity(path.len() + 1);
671new_path.extend(path);
672new_path.push(PathElem::Deref);
673new_path674 });
675 }
676interp_ok(())
677 }
678679/// Check if this is a value of primitive type, and if yes check the validity of the value
680 /// at that type. Return `true` if the type is indeed primitive.
681 ///
682 /// Note that not all of these have `FieldsShape::Primitive`, e.g. wide references.
683fn try_visit_primitive(
684&mut self,
685 value: &PlaceTy<'tcx, M::Provenance>,
686 ) -> InterpResult<'tcx, bool> {
687// Go over all the primitive types
688let ty = value.layout.ty;
689match ty.kind() {
690 ty::Bool => {
691let scalar = self.read_scalar(value, ExpectedKind::Bool)?;
692{
scalar.to_bool().map_err_kind(|e|
{
match e {
Ub(InvalidBool(..)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: ValidationErrorKind::InvalidBool {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:x}", scalar))
}),
},
}))
}
}
e => e,
}
})?
};try_validation!(
693 scalar.to_bool(),
694self.path,
695 Ub(InvalidBool(..)) => ValidationErrorKind::InvalidBool {
696 value: format!("{scalar:x}"),
697 }
698 );
699if self.reset_provenance_and_padding {
700self.ecx.clear_provenance(value)?;
701self.add_data_range_place(value);
702 }
703interp_ok(true)
704 }
705 ty::Char => {
706let scalar = self.read_scalar(value, ExpectedKind::Char)?;
707{
scalar.to_char().map_err_kind(|e|
{
match e {
Ub(InvalidChar(..)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: ValidationErrorKind::InvalidChar {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:x}", scalar))
}),
},
}))
}
}
e => e,
}
})?
};try_validation!(
708 scalar.to_char(),
709self.path,
710 Ub(InvalidChar(..)) => ValidationErrorKind::InvalidChar {
711 value: format!("{scalar:x}"),
712 }
713 );
714if self.reset_provenance_and_padding {
715self.ecx.clear_provenance(value)?;
716self.add_data_range_place(value);
717 }
718interp_ok(true)
719 }
720 ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
721// NOTE: Keep this in sync with the array optimization for int/float
722 // types below!
723self.read_scalar(
724value,
725if #[allow(non_exhaustive_omitted_patterns)] match ty.kind() {
ty::Float(..) => true,
_ => false,
}matches!(ty.kind(), ty::Float(..)) {
726 ExpectedKind::Float727 } else {
728 ExpectedKind::Int729 },
730 )?;
731if self.reset_provenance_and_padding {
732self.ecx.clear_provenance(value)?;
733self.add_data_range_place(value);
734 }
735interp_ok(true)
736 }
737 ty::RawPtr(..) => {
738let place = self.deref_pointer(value, ExpectedKind::RawPtr)?;
739if place.layout.is_unsized() {
740self.check_wide_ptr_meta(place.meta(), place.layout)?;
741 }
742interp_ok(true)
743 }
744 ty::Ref(_, _ty, mutbl) => {
745self.check_safe_pointer(value, PointerKind::Ref(*mutbl))?;
746interp_ok(true)
747 }
748 ty::FnPtr(..) => {
749let scalar = self.read_scalar(value, ExpectedKind::FnPtr)?;
750751// If we check references recursively, also check that this points to a function.
752if let Some(_) = self.ref_tracking {
753let ptr = scalar.to_pointer(self.ecx)?;
754let _fn = {
self.ecx.get_ptr_fn(ptr).map_err_kind(|e|
{
match e {
Ub(DanglingIntPointer { .. } | InvalidFunctionPointer(..))
=> {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: InvalidFnPtr {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}", ptr))
}),
},
}))
}
}
e => e,
}
})?
}try_validation!(
755self.ecx.get_ptr_fn(ptr),
756self.path,
757 Ub(DanglingIntPointer{ .. } | InvalidFunctionPointer(..)) =>
758 InvalidFnPtr { value: format!("{ptr}") },
759 );
760// FIXME: Check if the signature matches
761} else {
762// Otherwise (for standalone Miri and for `-Zextra-const-ub-checks`),
763 // we have to still check it to be non-null.
764if self.ecx.scalar_may_be_null(scalar)? {
765let maybe =
766 !M::Provenance::OFFSET_IS_ADDR && #[allow(non_exhaustive_omitted_patterns)] match scalar {
Scalar::Ptr(..) => true,
_ => false,
}matches!(scalar, Scalar::Ptr(..));
767do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: NullFnPtr { maybe },
}))
};throw_validation_failure!(self.path, NullFnPtr { maybe });
768 }
769 }
770if self.reset_provenance_and_padding {
771// Make sure we do not preserve partial provenance. This matches the thin
772 // pointer handling in `deref_pointer`.
773if #[allow(non_exhaustive_omitted_patterns)] match scalar {
Scalar::Int(..) => true,
_ => false,
}matches!(scalar, Scalar::Int(..)) {
774self.ecx.clear_provenance(value)?;
775 }
776self.add_data_range_place(value);
777 }
778interp_ok(true)
779 }
780 ty::Never => do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: NeverVal,
}))
}throw_validation_failure!(self.path, NeverVal),
781 ty::Foreign(..) | ty::FnDef(..) => {
782// Nothing to check.
783interp_ok(true)
784 }
785 ty::UnsafeBinder(_) => {
::core::panicking::panic_fmt(format_args!("not yet implemented: {0}",
format_args!("FIXME(unsafe_binder)")));
}todo!("FIXME(unsafe_binder)"),
786// The above should be all the primitive types. The rest is compound, we
787 // check them by visiting their fields/variants.
788ty::Adt(..)
789 | ty::Tuple(..)
790 | ty::Array(..)
791 | ty::Slice(..)
792 | ty::Str793 | ty::Dynamic(..)
794 | ty::Closure(..)
795 | ty::Pat(..)
796 | ty::CoroutineClosure(..)
797 | ty::Coroutine(..) => interp_ok(false),
798// Some types only occur during typechecking, they have no layout.
799 // We should not see them here and we could not check them anyway.
800ty::Error(_)
801 | ty::Infer(..)
802 | ty::Placeholder(..)
803 | ty::Bound(..)
804 | ty::Param(..)
805 | ty::Alias(..)
806 | ty::CoroutineWitness(..) => ::rustc_middle::util::bug::bug_fmt(format_args!("Encountered invalid type {0:?}",
ty))bug!("Encountered invalid type {:?}", ty),
807 }
808 }
809810fn visit_scalar(
811&mut self,
812 scalar: Scalar<M::Provenance>,
813 scalar_layout: ScalarAbi,
814 ) -> InterpResult<'tcx> {
815let size = scalar_layout.size(self.ecx);
816let valid_range = scalar_layout.valid_range(self.ecx);
817let WrappingRange { start, end } = valid_range;
818let max_value = size.unsigned_int_max();
819if !(end <= max_value) {
::core::panicking::panic("assertion failed: end <= max_value")
};assert!(end <= max_value);
820let bits = match scalar.try_to_scalar_int() {
821Ok(int) => int.to_bits(size),
822Err(_) => {
823// So this is a pointer then, and casting to an int failed.
824 // Can only happen during CTFE.
825 // We support 2 kinds of ranges here: full range, and excluding zero.
826if start == 1 && end == max_value {
827// Only null is the niche. So make sure the ptr is NOT null.
828if self.ecx.scalar_may_be_null(scalar)? {
829do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: NonnullPtrMaybeNull,
}))
}throw_validation_failure!(self.path, NonnullPtrMaybeNull)830 } else {
831return interp_ok(());
832 }
833 } else if scalar_layout.is_always_valid(self.ecx) {
834// Easy. (This is reachable if `enforce_number_validity` is set.)
835return interp_ok(());
836 } else {
837// Conservatively, we reject, because the pointer *could* have a bad
838 // value.
839do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PtrOutOfRange { range: valid_range, max_value },
}))
}throw_validation_failure!(
840self.path,
841 PtrOutOfRange { range: valid_range, max_value }
842 )843 }
844 }
845 };
846// Now compare.
847if valid_range.contains(bits) {
848interp_ok(())
849 } else {
850do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: OutOfRange {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}", bits))
}),
range: valid_range,
max_value,
},
}))
}throw_validation_failure!(
851self.path,
852 OutOfRange { value: format!("{bits}"), range: valid_range, max_value }
853 )854 }
855 }
856857fn in_mutable_memory(&self, val: &PlaceTy<'tcx, M::Provenance>) -> bool {
858if true {
if !self.ctfe_mode.is_some() {
::core::panicking::panic("assertion failed: self.ctfe_mode.is_some()")
};
};debug_assert!(self.ctfe_mode.is_some());
859if let Some(mplace) = val.as_mplace_or_local().left() {
860if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
861let tcx = *self.ecx.tcx;
862// Everything must be already interned.
863let mutbl = tcx.global_alloc(alloc_id).mutability(tcx, self.ecx.typing_env);
864if let Some((_, alloc)) = self.ecx.memory.alloc_map.get(alloc_id) {
865match (&alloc.mutability, &mutbl) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(alloc.mutability, mutbl);
866 }
867mutbl.is_mut()
868 } else {
869// No memory at all.
870false
871}
872 } else {
873// A local variable -- definitely mutable.
874true
875}
876 }
877878/// Add the given pointer-length pair to the "data" range of this visit.
879fn add_data_range(&mut self, ptr: Pointer<Option<M::Provenance>>, size: Size) {
880if let Some(data_bytes) = self.data_bytes.as_mut() {
881// We only have to store the offset, the rest is the same for all pointers here.
882 // The logic is agnostic to whether the offset is relative or absolute as long as
883 // it is consistent.
884let (_prov, offset) = ptr.into_raw_parts();
885// Add this.
886data_bytes.add_range(offset, size);
887 };
888 }
889890/// Add the entire given place to the "data" range of this visit.
891fn add_data_range_place(&mut self, place: &PlaceTy<'tcx, M::Provenance>) {
892// Only sized places can be added this way.
893if true {
if !place.layout.is_sized() {
::core::panicking::panic("assertion failed: place.layout.is_sized()")
};
};debug_assert!(place.layout.is_sized());
894if let Some(data_bytes) = self.data_bytes.as_mut() {
895let offset = Self::data_range_offset(self.ecx, place);
896data_bytes.add_range(offset, place.layout.size);
897 }
898 }
899900/// Convert a place into the offset it starts at, for the purpose of data_range tracking.
901 /// Must only be called if `data_bytes` is `Some(_)`.
902fn data_range_offset(ecx: &InterpCx<'tcx, M>, place: &PlaceTy<'tcx, M::Provenance>) -> Size {
903// The presence of `data_bytes` implies that our place is in memory.
904let ptr = ecx905 .place_to_op(place)
906 .expect("place must be in memory")
907 .as_mplace_or_imm()
908 .expect_left("place must be in memory")
909 .ptr();
910let (_prov, offset) = ptr.into_raw_parts();
911offset912 }
913914fn reset_padding(&mut self, place: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
915let Some(data_bytes) = self.data_bytes.as_mut() else { return interp_ok(()) };
916// Our value must be in memory, otherwise we would not have set up `data_bytes`.
917let mplace = self.ecx.force_allocation(place)?;
918// Determine starting offset and size.
919let (_prov, start_offset) = mplace.ptr().into_raw_parts();
920let (size, _align) = self921 .ecx
922 .size_and_align_of_val(&mplace)?
923.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
924// If there is no padding at all, we can skip the rest: check for
925 // a single data range covering the entire value.
926if data_bytes.0 == &[(start_offset, size)] {
927return interp_ok(());
928 }
929// Get a handle for the allocation. Do this only once, to avoid looking up the same
930 // allocation over and over again. (Though to be fair, iterating the value already does
931 // exactly that.)
932let Some(mut alloc) = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)? else {
933// A ZST, no padding to clear.
934return interp_ok(());
935 };
936// Add a "finalizer" data range at the end, so that the iteration below finds all gaps
937 // between ranges.
938data_bytes.0.push((start_offset + size, Size::ZERO));
939// Iterate, and reset gaps.
940let mut padding_cleared_until = start_offset;
941for &(offset, size) in data_bytes.0.iter() {
942if !(offset >= padding_cleared_until) {
{
::core::panicking::panic_fmt(format_args!("reset_padding on {0}: previous field ended at offset {1}, next field starts at {2} (and has a size of {3} bytes)",
mplace.layout.ty,
(padding_cleared_until - start_offset).bytes(),
(offset - start_offset).bytes(), size.bytes()));
}
};assert!(
943 offset >= padding_cleared_until,
944"reset_padding on {}: previous field ended at offset {}, next field starts at {} (and has a size of {} bytes)",
945 mplace.layout.ty,
946 (padding_cleared_until - start_offset).bytes(),
947 (offset - start_offset).bytes(),
948 size.bytes(),
949 );
950if offset > padding_cleared_until {
951// We found padding. Adjust the range to be relative to `alloc`, and make it uninit.
952let padding_start = padding_cleared_until - start_offset;
953let padding_size = offset - padding_cleared_until;
954let range = alloc_range(padding_start, padding_size);
955{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:955",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(955u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("reset_padding on {0}: resetting padding range {1:?}",
mplace.layout.ty, range) as &dyn Value))])
});
} else { ; }
};trace!("reset_padding on {}: resetting padding range {range:?}", mplace.layout.ty);
956 alloc.write_uninit(range);
957 }
958 padding_cleared_until = offset + size;
959 }
960if !(padding_cleared_until == start_offset + size) {
::core::panicking::panic("assertion failed: padding_cleared_until == start_offset + size")
};assert!(padding_cleared_until == start_offset + size);
961interp_ok(())
962 }
963964/// Computes the data range of this union type:
965 /// which bytes are inside a field (i.e., not padding.)
966fn union_data_range<'e>(
967 ecx: &'e mut InterpCx<'tcx, M>,
968 layout: TyAndLayout<'tcx>,
969 ) -> Cow<'e, RangeSet> {
970if !layout.ty.is_union() {
::core::panicking::panic("assertion failed: layout.ty.is_union()")
};assert!(layout.ty.is_union());
971if !layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("there are no unsized unions"));
}
};assert!(layout.is_sized(), "there are no unsized unions");
972let layout_cx = LayoutCx::new(*ecx.tcx, ecx.typing_env);
973return M::cached_union_data_range(ecx, layout.ty, || {
974let mut out = RangeSet(Vec::new());
975union_data_range_uncached(&layout_cx, layout, Size::ZERO, &mut out);
976out977 });
978979/// Helper for recursive traversal: add data ranges of the given type to `out`.
980fn union_data_range_uncached<'tcx>(
981 cx: &LayoutCx<'tcx>,
982 layout: TyAndLayout<'tcx>,
983 base_offset: Size,
984 out: &mut RangeSet,
985 ) {
986// If this is a ZST, we don't contain any data. In particular, this helps us to quickly
987 // skip over huge arrays of ZST.
988if layout.is_zst() {
989return;
990 }
991// Just recursively add all the fields of everything to the output.
992match &layout.fields {
993 FieldsShape::Primitive => {
994out.add_range(base_offset, layout.size);
995 }
996&FieldsShape::Union(fields) => {
997// Currently, all fields start at offset 0 (relative to `base_offset`).
998for field in 0..fields.get() {
999let field = layout.field(cx, field);
1000 union_data_range_uncached(cx, field, base_offset, out);
1001 }
1002 }
1003&FieldsShape::Array { stride, count } => {
1004let elem = layout.field(cx, 0);
10051006// Fast-path for large arrays of simple types that do not contain any padding.
1007if elem.backend_repr.is_scalar() {
1008out.add_range(base_offset, elem.size * count);
1009 } else {
1010for idx in 0..count {
1011// This repeats the same computation for every array element... but the alternative
1012 // is to allocate temporary storage for a dedicated `out` set for the array element,
1013 // and replicating that N times. Is that better?
1014union_data_range_uncached(cx, elem, base_offset + idx * stride, out);
1015 }
1016 }
1017 }
1018 FieldsShape::Arbitrary { offsets, .. } => {
1019for (field, &offset) in offsets.iter_enumerated() {
1020let field = layout.field(cx, field.as_usize());
1021 union_data_range_uncached(cx, field, base_offset + offset, out);
1022 }
1023 }
1024 }
1025// Don't forget potential other variants.
1026match &layout.variants {
1027 Variants::Single { .. } | Variants::Empty => {
1028// Fully handled above.
1029}
1030 Variants::Multiple { variants, .. } => {
1031for variant in variants.indices() {
1032let variant = layout.for_variant(cx, variant);
1033 union_data_range_uncached(cx, variant, base_offset, out);
1034 }
1035 }
1036 }
1037 }
1038 }
1039}
10401041impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, 'tcx, M> {
1042type V = PlaceTy<'tcx, M::Provenance>;
10431044#[inline(always)]
1045fn ecx(&self) -> &InterpCx<'tcx, M> {
1046self.ecx
1047 }
10481049fn read_discriminant(
1050&mut self,
1051 val: &PlaceTy<'tcx, M::Provenance>,
1052 ) -> InterpResult<'tcx, VariantIdx> {
1053self.with_elem(PathElem::EnumTag, move |this| {
1054interp_ok({
this.ecx.read_discriminant(val).map_err_kind(|e|
{
match e {
Ub(InvalidTag(val)) => {
{
let where_ = &this.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: InvalidEnumTag {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:x}", val))
}),
},
}))
}
}
Ub(UninhabitedEnumVariantRead(_)) => {
{
let where_ = &this.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: UninhabitedEnumVariant,
}))
}
}
e => e,
}
})?
}try_validation!(
1055 this.ecx.read_discriminant(val),
1056 this.path,
1057 Ub(InvalidTag(val)) => InvalidEnumTag {
1058 value: format!("{val:x}"),
1059 },
1060 Ub(UninhabitedEnumVariantRead(_)) => UninhabitedEnumVariant,
1061// Uninit / bad provenance are not possible since the field was already previously
1062 // checked at its integer type.
1063))
1064 })
1065 }
10661067#[inline]
1068fn visit_field(
1069&mut self,
1070 old_val: &PlaceTy<'tcx, M::Provenance>,
1071 field: usize,
1072 new_val: &PlaceTy<'tcx, M::Provenance>,
1073 ) -> InterpResult<'tcx> {
1074let elem = self.aggregate_field_path_elem(old_val.layout, field);
1075self.with_elem(elem, move |this| this.visit_value(new_val))
1076 }
10771078#[inline]
1079fn visit_variant(
1080&mut self,
1081 old_val: &PlaceTy<'tcx, M::Provenance>,
1082 variant_id: VariantIdx,
1083 new_val: &PlaceTy<'tcx, M::Provenance>,
1084 ) -> InterpResult<'tcx> {
1085let name = match old_val.layout.ty.kind() {
1086 ty::Adt(adt, _) => PathElem::Variant(adt.variant(variant_id).name),
1087// Coroutines also have variants
1088ty::Coroutine(..) => PathElem::CoroutineState(variant_id),
1089_ => ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected type with variant: {0:?}",
old_val.layout.ty))bug!("Unexpected type with variant: {:?}", old_val.layout.ty),
1090 };
1091self.with_elem(name, move |this| this.visit_value(new_val))
1092 }
10931094#[inline(always)]
1095fn visit_union(
1096&mut self,
1097 val: &PlaceTy<'tcx, M::Provenance>,
1098 _fields: NonZero<usize>,
1099 ) -> InterpResult<'tcx> {
1100// Special check for CTFE validation, preventing `UnsafeCell` inside unions in immutable memory.
1101if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1102// Unsized unions are currently not a thing, but let's keep this code consistent with
1103 // the check in `visit_value`.
1104let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1105if !zst && !val.layout.ty.is_freeze(*self.ecx.tcx, self.ecx.typing_env) {
1106if !self.in_mutable_memory(val) {
1107do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: UnsafeCellInImmutable,
}))
};throw_validation_failure!(self.path, UnsafeCellInImmutable);
1108 }
1109 }
1110 }
1111if self.reset_provenance_and_padding
1112 && let Some(data_bytes) = self.data_bytes.as_mut()
1113 {
1114let base_offset = Self::data_range_offset(self.ecx, val);
1115// Determine and add data range for this union.
1116let union_data_range = Self::union_data_range(self.ecx, val.layout);
1117for &(offset, size) in union_data_range.0.iter() {
1118 data_bytes.add_range(base_offset + offset, size);
1119 }
1120 }
1121interp_ok(())
1122 }
11231124#[inline]
1125fn visit_box(
1126&mut self,
1127 _box_ty: Ty<'tcx>,
1128 val: &PlaceTy<'tcx, M::Provenance>,
1129 ) -> InterpResult<'tcx> {
1130self.check_safe_pointer(val, PointerKind::Box)?;
1131interp_ok(())
1132 }
11331134#[inline]
1135fn visit_value(&mut self, val: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
1136{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1136",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(1136u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("visit_value: {0:?}, {1:?}",
*val, val.layout) as &dyn Value))])
});
} else { ; }
};trace!("visit_value: {:?}, {:?}", *val, val.layout);
11371138// Check primitive types -- the leaves of our recursive descent.
1139 // This is called even for enum discriminants (which are "fields" of their enum),
1140 // so for integer-typed discriminants the provenance reset will happen here.
1141 // We assume that the Scalar validity range does not restrict these values
1142 // any further than `try_visit_primitive` does!
1143if self.try_visit_primitive(val)? {
1144return interp_ok(());
1145 }
11461147// Special check preventing `UnsafeCell` in the inner part of constants
1148if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1149// Exclude ZST values. We need to compute the dynamic size/align to properly
1150 // handle slices and trait objects.
1151let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1152if !zst1153 && let Some(def) = val.layout.ty.ty_adt_def()
1154 && def.is_unsafe_cell()
1155 {
1156if !self.in_mutable_memory(val) {
1157do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: UnsafeCellInImmutable,
}))
};throw_validation_failure!(self.path, UnsafeCellInImmutable);
1158 }
1159 }
1160 }
11611162// Recursively walk the value at its type. Apply optimizations for some large types.
1163match val.layout.ty.kind() {
1164 ty::Str => {
1165let mplace = val.assert_mem_place(); // strings are unsized and hence never immediate
1166let len = mplace.len(self.ecx)?;
1167{
self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(),
Size::from_bytes(len)).map_err_kind(|e|
{
match e {
Ub(InvalidUninitBytes(..)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: Uninit { expected: ExpectedKind::Str },
}))
}
}
Unsup(ReadPointerAsInt(_)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PointerAsInt { expected: ExpectedKind::Str },
}))
}
}
e => e,
}
})?
};try_validation!(
1168self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len)),
1169self.path,
1170 Ub(InvalidUninitBytes(..)) => Uninit { expected: ExpectedKind::Str },
1171 Unsup(ReadPointerAsInt(_)) => PointerAsInt { expected: ExpectedKind::Str }
1172 );
1173 }
1174 ty::Array(tys, ..) | ty::Slice(tys)
1175// This optimization applies for types that can hold arbitrary non-provenance bytes (such as
1176 // integer and floating point types).
1177 // FIXME(wesleywiser) This logic could be extended further to arbitrary structs or
1178 // tuples made up of integer/floating point types or inhabited ZSTs with no padding.
1179if #[allow(non_exhaustive_omitted_patterns)] match tys.kind() {
ty::Int(..) | ty::Uint(..) | ty::Float(..) => true,
_ => false,
}matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..))1180 =>
1181 {
1182let expected = if tys.is_integral() { ExpectedKind::Int } else { ExpectedKind::Float };
1183// Optimized handling for arrays of integer/float type.
11841185 // This is the length of the array/slice.
1186let len = val.len(self.ecx)?;
1187// This is the element type size.
1188let layout = self.ecx.layout_of(*tys)?;
1189// This is the size in bytes of the whole array. (This checks for overflow.)
1190let size = layout.size * len;
1191// If the size is 0, there is nothing to check.
1192 // (`size` can only be 0 if `len` is 0, and empty arrays are always valid.)
1193if size == Size::ZERO {
1194return interp_ok(());
1195 }
1196// Now that we definitely have a non-ZST array, we know it lives in memory -- except it may
1197 // be an uninitialized local variable, those are also "immediate".
1198let mplace = match val.to_op(self.ecx)?.as_mplace_or_imm() {
1199Left(mplace) => mplace,
1200Right(imm) => match *imm {
1201 Immediate::Uninit =>
1202do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: Uninit { expected },
}))
}throw_validation_failure!(self.path, Uninit { expected }),
1203 Immediate::Scalar(..) | Immediate::ScalarPair(..) =>
1204::rustc_middle::util::bug::bug_fmt(format_args!("arrays/slices can never have Scalar/ScalarPair layout"))bug!("arrays/slices can never have Scalar/ScalarPair layout"),
1205 }
1206 };
12071208// Optimization: we just check the entire range at once.
1209 // NOTE: Keep this in sync with the handling of integer and float
1210 // types above, in `visit_primitive`.
1211 // No need for an alignment check here, this is not an actual memory access.
1212let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size)?.expect("we already excluded size 0");
12131214alloc.get_bytes_strip_provenance().map_err_kind(|kind| {
1215// Some error happened, try to provide a more detailed description.
1216 // For some errors we might be able to provide extra information.
1217 // (This custom logic does not fit the `try_validation!` macro.)
1218match kind {
1219Ub(InvalidUninitBytes(Some((_alloc_id, access)))) | Unsup(ReadPointerAsInt(Some((_alloc_id, access)))) => {
1220// Some byte was uninitialized, determine which
1221 // element that byte belongs to so we can
1222 // provide an index.
1223let i = usize::try_from(
1224access.bad.start.bytes() / layout.size.bytes(),
1225 )
1226 .unwrap();
1227self.path.push(PathElem::ArrayElem(i));
12281229if #[allow(non_exhaustive_omitted_patterns)] match kind {
Ub(InvalidUninitBytes(_)) => true,
_ => false,
}matches!(kind, Ub(InvalidUninitBytes(_))) {
1230{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: Uninit { expected },
}))
}err_validation_failure!(self.path, Uninit { expected })1231 } else {
1232{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PointerAsInt { expected },
}))
}err_validation_failure!(self.path, PointerAsInt { expected })1233 }
1234 }
12351236// Propagate upwards (that will also check for unexpected errors).
1237err => err,
1238 }
1239 })?;
12401241// Don't forget that these are all non-pointer types, and thus do not preserve
1242 // provenance.
1243if self.reset_provenance_and_padding {
1244// We can't share this with above as above, we might be looking at read-only memory.
1245let mut alloc = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)?.expect("we already excluded size 0");
1246alloc.clear_provenance();
1247// Also, mark this as containing data, not padding.
1248self.add_data_range(mplace.ptr(), size);
1249 }
1250 }
1251// Fast path for arrays and slices of ZSTs. We only need to check a single ZST element
1252 // of an array and not all of them, because there's only a single value of a specific
1253 // ZST type, so either validation fails for all elements or none.
1254ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(*tys)?.is_zst() => {
1255// Validate just the first element (if any).
1256if val.len(self.ecx)? > 0 {
1257self.visit_field(val, 0, &self.ecx.project_index(val, 0)?)?;
1258 }
1259 }
1260 ty::Pat(base, pat) => {
1261// First check that the base type is valid
1262self.visit_value(&val.transmute(self.ecx.layout_of(*base)?, self.ecx)?)?;
1263// When you extend this match, make sure to also add tests to
1264 // tests/ui/type/pattern_types/validity.rs((
1265match **pat {
1266// Range and non-null patterns are precisely reflected into `valid_range` and thus
1267 // handled fully by `visit_scalar` (called below).
1268ty::PatternKind::Range { .. } => {},
1269 ty::PatternKind::NotNull => {},
12701271// FIXME(pattern_types): check that the value is covered by one of the variants.
1272 // For now, we rely on layout computation setting the scalar's `valid_range` to
1273 // match the pattern. However, this cannot always work; the layout may
1274 // pessimistically cover actually illegal ranges and Miri would miss that UB.
1275 // The consolation here is that codegen also will miss that UB, so at least
1276 // we won't see optimizations actually breaking such programs.
1277ty::PatternKind::Or(_patterns) => {}
1278 }
1279 }
1280 ty::Adt(adt, _) if adt.is_maybe_dangling() => {
1281let old_may_dangle = mem::replace(&mut self.may_dangle, true);
12821283let inner = self.ecx.project_field(val, FieldIdx::ZERO)?;
1284self.visit_value(&inner)?;
12851286self.may_dangle = old_may_dangle;
1287 }
1288_ => {
1289// default handler
1290{
self.walk_value(val).map_err_kind(|e|
{
match e {
Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type
}) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: {
InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
},
}))
}
}
e => e,
}
})?
};try_validation!(
1291self.walk_value(val),
1292self.path,
1293// It's not great to catch errors here, since we can't give a very good path,
1294 // but it's better than ICEing.
1295Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) => {
1296 InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
1297 },
1298 );
1299 }
1300 }
13011302// *After* all of this, check further information stored in the layout. We need to check
1303 // this to handle types like `NonNull` where the `Scalar` info is more restrictive than what
1304 // the fields say (`rustc_layout_scalar_valid_range_start`). But in most cases, this will
1305 // just propagate what the fields say, and then we want the error to point at the field --
1306 // so, we first recurse, then we do this check.
1307 //
1308 // FIXME: We could avoid some redundant checks here. For newtypes wrapping
1309 // scalars, we do the same check on every "level" (e.g., first we check
1310 // MyNewtype and then the scalar in there).
1311if val.layout.is_uninhabited() {
1312let ty = val.layout.ty;
1313do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: UninhabitedVal { ty },
}))
};throw_validation_failure!(self.path, UninhabitedVal { ty });
1314 }
1315match val.layout.backend_repr {
1316 BackendRepr::Scalar(scalar_layout) => {
1317if !scalar_layout.is_uninit_valid() {
1318// There is something to check here.
1319let scalar = self.read_scalar(val, ExpectedKind::InitScalar)?;
1320self.visit_scalar(scalar, scalar_layout)?;
1321 }
1322 }
1323 BackendRepr::ScalarPair(a_layout, b_layout) => {
1324// We can only proceed if *both* scalars need to be initialized.
1325 // FIXME: find a way to also check ScalarPair when one side can be uninit but
1326 // the other must be init.
1327if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
1328let (a, b) =
1329self.read_immediate(val, ExpectedKind::InitScalar)?.to_scalar_pair();
1330self.visit_scalar(a, a_layout)?;
1331self.visit_scalar(b, b_layout)?;
1332 }
1333 }
1334 BackendRepr::SimdVector { .. } | BackendRepr::SimdScalableVector { .. } => {
1335// No checks here, we assume layout computation gets this right.
1336 // (This is harder to check since Miri does not represent these as `Immediate`. We
1337 // also cannot use field projections since this might be a newtype around a vector.)
1338}
1339 BackendRepr::Memory { .. } => {
1340// Nothing to do.
1341}
1342 }
13431344interp_ok(())
1345 }
1346}
13471348impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1349fn validate_operand_internal(
1350&mut self,
1351 val: &PlaceTy<'tcx, M::Provenance>,
1352 path: Vec<PathElem>,
1353 ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
1354 ctfe_mode: Option<CtfeValidationMode>,
1355 reset_provenance_and_padding: bool,
1356 ) -> InterpResult<'tcx> {
1357{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1357",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(1357u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("validate_operand_internal: {0:?}, {1:?}",
*val, val.layout.ty) as &dyn Value))])
});
} else { ; }
};trace!("validate_operand_internal: {:?}, {:?}", *val, val.layout.ty);
13581359// Run the visitor.
1360self.run_for_validation_mut(|ecx| {
1361let reset_padding = reset_provenance_and_padding && {
1362// Check if `val` is actually stored in memory. If not, padding is not even
1363 // represented and we need not reset it.
1364ecx.place_to_op(val)?.as_mplace_or_imm().is_left()
1365 };
1366let mut v = ValidityVisitor {
1367path,
1368ref_tracking,
1369ctfe_mode,
1370ecx,
1371reset_provenance_and_padding,
1372 data_bytes: reset_padding.then_some(RangeSet(Vec::new())),
1373 may_dangle: false,
1374 };
1375v.visit_value(val)?;
1376v.reset_padding(val)?;
1377interp_ok(())
1378 })
1379 .map_err_info(|err| {
1380if !#[allow(non_exhaustive_omitted_patterns)] match err.kind() {
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
.. }) | InterpErrorKind::InvalidProgram(_) |
InterpErrorKind::Unsupported(UnsupportedOpInfo::ExternTypeField) =>
true,
_ => false,
}matches!(
1381 err.kind(),
1382err_ub!(ValidationError { .. })
1383 | InterpErrorKind::InvalidProgram(_)
1384 | InterpErrorKind::Unsupported(UnsupportedOpInfo::ExternTypeField)
1385 ) {
1386::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected error during validation: {0}",
format_interp_error(self.tcx.dcx(), err)));bug!(
1387"Unexpected error during validation: {}",
1388 format_interp_error(self.tcx.dcx(), err)
1389 );
1390 }
1391err1392 })
1393 }
13941395/// This function checks the data at `val` to be const-valid.
1396 /// `val` is assumed to cover valid memory if it is an indirect operand.
1397 /// It will error if the bits at the destination do not match the ones described by the layout.
1398 ///
1399 /// `ref_tracking` is used to record references that we encounter so that they
1400 /// can be checked recursively by an outside driving loop.
1401 ///
1402 /// `constant` controls whether this must satisfy the rules for constants:
1403 /// - no pointers to statics.
1404 /// - no `UnsafeCell` or non-ZST `&mut`.
1405#[inline(always)]
1406pub(crate) fn const_validate_operand(
1407&mut self,
1408 val: &PlaceTy<'tcx, M::Provenance>,
1409 path: Vec<PathElem>,
1410 ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>,
1411 ctfe_mode: CtfeValidationMode,
1412 ) -> InterpResult<'tcx> {
1413self.validate_operand_internal(
1414val,
1415path,
1416Some(ref_tracking),
1417Some(ctfe_mode),
1418/*reset_provenance*/ false,
1419 )
1420 }
14211422/// This function checks the data at `val` to be runtime-valid.
1423 /// `val` is assumed to cover valid memory if it is an indirect operand.
1424 /// It will error if the bits at the destination do not match the ones described by the layout.
1425#[inline(always)]
1426pub fn validate_operand(
1427&mut self,
1428 val: &PlaceTy<'tcx, M::Provenance>,
1429 recursive: bool,
1430 reset_provenance_and_padding: bool,
1431 ) -> InterpResult<'tcx> {
1432let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("validate_operand",
"rustc_const_eval::interpret::validity",
::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(1432u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["recursive",
"reset_provenance_and_padding", "val"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&recursive as
&dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&reset_provenance_and_padding
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&val) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(
1433 M,
1434"validate_operand",
1435 recursive,
1436 reset_provenance_and_padding,
1437?val,
1438 );
14391440// Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's
1441 // still correct to not use `ctfe_mode`: that mode is for validation of the final constant
1442 // value, it rules out things like `UnsafeCell` in awkward places.
1443if !recursive {
1444return self.validate_operand_internal(
1445val,
1446::alloc::vec::Vec::new()vec![],
1447None,
1448None,
1449reset_provenance_and_padding,
1450 );
1451 }
1452// Do a recursive check.
1453let mut ref_tracking = RefTracking::empty();
1454self.validate_operand_internal(
1455val,
1456::alloc::vec::Vec::new()vec![],
1457Some(&mut ref_tracking),
1458None,
1459reset_provenance_and_padding,
1460 )?;
1461while let Some((mplace, path)) = ref_tracking.todo.pop() {
1462// Things behind reference do *not* have the provenance reset.
1463self.validate_operand_internal(
1464&mplace.into(),
1465 path,
1466Some(&mut ref_tracking),
1467None,
1468/*reset_provenance_and_padding*/ false,
1469 )?;
1470 }
1471interp_ok(())
1472 }
1473}