1//! Check the validity invariant of a given value, and tell the user
2//! where in the value it got violated.
3//! In const context, this goes even further and tries to approximate const safety.
4//! That's useful because it means other passes (e.g. promotion) can rely on `const`s
5//! to be const-safe.
67use std::borrow::Cow;
8use std::fmt::Write;
9use std::hash::Hash;
10use std::num::NonZero;
1112use either::{Left, Right};
13use hir::def::DefKind;
14use rustc_abi::{
15BackendRepr, FieldIdx, FieldsShape, Scalaras ScalarAbi, Size, VariantIdx, Variants,
16WrappingRange,
17};
18use rustc_ast::Mutability;
19use rustc_data_structures::fx::FxHashSet;
20use rustc_hiras hir;
21use rustc_middle::bug;
22use rustc_middle::mir::interpret::ValidationErrorKind::{self, *};
23use rustc_middle::mir::interpret::{
24ExpectedKind, InterpErrorKind, InvalidMetaKind, Misalignment, PointerKind, Provenance,
25UnsupportedOpInfo, ValidationErrorInfo, alloc_range, interp_ok,
26};
27use rustc_middle::ty::layout::{LayoutCx, TyAndLayout};
28use rustc_middle::ty::{self, Ty};
29use rustc_span::{Symbol, sym};
30use tracing::trace;
3132use super::machine::AllocMap;
33use super::{
34AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
35Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
36format_interp_error,
37};
38use crate::enter_trace_span;
3940// for the validation errors
41#[rustfmt::skip]
42use super::InterpErrorKind::UndefinedBehavioras Ub;
43use super::InterpErrorKind::Unsupportedas Unsup;
44use super::UndefinedBehaviorInfo::*;
45use super::UnsupportedOpInfo::*;
4647macro_rules!err_validation_failure {
48 ($where:expr, $kind: expr) => {{
49let where_ = &$where;
50let path = if !where_.is_empty() {
51let mut path = String::new();
52 write_path(&mut path, where_);
53Some(path)
54 } else {
55None
56};
5758err_ub!(ValidationError(ValidationErrorInfo { path, kind: $kind }))
59 }};
60}
6162macro_rules!throw_validation_failure {
63 ($where:expr, $kind: expr) => {
64do yeet err_validation_failure!($where, $kind)
65 };
66}
6768/// If $e throws an error matching the pattern, throw a validation failure.
69/// Other errors are passed back to the caller, unchanged -- and if they reach the root of
70/// the visitor, we make sure only validation errors and `InvalidProgram` errors are left.
71/// This lets you use the patterns as a kind of validation list, asserting which errors
72/// can possibly happen:
73///
74/// ```ignore(illustrative)
75/// let v = try_validation!(some_fn(), some_path, {
76/// Foo | Bar | Baz => { "some failure" },
77/// });
78/// ```
79///
80/// The patterns must be of type `UndefinedBehaviorInfo`.
81/// An additional expected parameter can also be added to the failure message:
82///
83/// ```ignore(illustrative)
84/// let v = try_validation!(some_fn(), some_path, {
85/// Foo | Bar | Baz => { "some failure" } expected { "something that wasn't a failure" },
86/// });
87/// ```
88///
89/// An additional nicety is that both parameters actually take format args, so you can just write
90/// the format string in directly:
91///
92/// ```ignore(illustrative)
93/// let v = try_validation!(some_fn(), some_path, {
94/// Foo | Bar | Baz => { "{:?}", some_failure } expected { "{}", expected_value },
95/// });
96/// ```
97///
98macro_rules!try_validation {
99 ($e:expr, $where:expr,
100 $( $( $p:pat_param )|+ => $kind: expr ),+ $(,)?
101) => {{
102$e.map_err_kind(|e| {
103// We catch the error and turn it into a validation failure. We are okay with
104 // allocation here as this can only slow down builds that fail anyway.
105match e {
106 $(
107 $($p)|+ => {
108err_validation_failure!(
109$where,
110$kind
111)
112 }
113 ),+,
114 e => e,
115 }
116 })?
117}};
118}
119120/// We want to show a nice path to the invalid field for diagnostics,
121/// but avoid string operations in the happy case where no error happens.
122/// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
123/// need to later print something for the user.
124#[derive(#[automatically_derived]
impl ::core::marker::Copy for PathElem { }Copy, #[automatically_derived]
impl ::core::clone::Clone for PathElem {
#[inline]
fn clone(&self) -> PathElem {
let _: ::core::clone::AssertParamIsClone<Symbol>;
let _: ::core::clone::AssertParamIsClone<VariantIdx>;
let _: ::core::clone::AssertParamIsClone<usize>;
*self
}
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for PathElem {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
PathElem::Field(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Field",
&__self_0),
PathElem::Variant(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"Variant", &__self_0),
PathElem::CoroutineState(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CoroutineState", &__self_0),
PathElem::CapturedVar(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"CapturedVar", &__self_0),
PathElem::ArrayElem(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"ArrayElem", &__self_0),
PathElem::TupleElem(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"TupleElem", &__self_0),
PathElem::Deref => ::core::fmt::Formatter::write_str(f, "Deref"),
PathElem::EnumTag =>
::core::fmt::Formatter::write_str(f, "EnumTag"),
PathElem::CoroutineTag =>
::core::fmt::Formatter::write_str(f, "CoroutineTag"),
PathElem::DynDowncast =>
::core::fmt::Formatter::write_str(f, "DynDowncast"),
PathElem::Vtable =>
::core::fmt::Formatter::write_str(f, "Vtable"),
}
}
}Debug)]
125pub enum PathElem {
126 Field(Symbol),
127 Variant(Symbol),
128 CoroutineState(VariantIdx),
129 CapturedVar(Symbol),
130 ArrayElem(usize),
131 TupleElem(usize),
132 Deref,
133 EnumTag,
134 CoroutineTag,
135 DynDowncast,
136 Vtable,
137}
138139/// Extra things to check for during validation of CTFE results.
140#[derive(#[automatically_derived]
impl ::core::marker::Copy for CtfeValidationMode { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CtfeValidationMode {
#[inline]
fn clone(&self) -> CtfeValidationMode {
let _: ::core::clone::AssertParamIsClone<Mutability>;
let _: ::core::clone::AssertParamIsClone<bool>;
*self
}
}Clone)]
141pub enum CtfeValidationMode {
142/// Validation of a `static`
143Static { mutbl: Mutability },
144/// Validation of a promoted.
145Promoted,
146/// Validation of a `const`.
147 /// `allow_immutable_unsafe_cell` says whether we allow `UnsafeCell` in immutable memory (which is the
148 /// case for the top-level allocation of a `const`, where this is fine because the allocation will be
149 /// copied at each use site).
150Const { allow_immutable_unsafe_cell: bool },
151}
152153impl CtfeValidationMode {
154fn allow_immutable_unsafe_cell(self) -> bool {
155match self {
156 CtfeValidationMode::Static { .. } => false,
157 CtfeValidationMode::Promoted { .. } => false,
158 CtfeValidationMode::Const { allow_immutable_unsafe_cell, .. } => {
159allow_immutable_unsafe_cell160 }
161 }
162 }
163}
164165/// State for tracking recursive validation of references
166pub struct RefTracking<T, PATH = ()> {
167 seen: FxHashSet<T>,
168 todo: Vec<(T, PATH)>,
169}
170171impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
172pub fn empty() -> Self {
173RefTracking { seen: FxHashSet::default(), todo: ::alloc::vec::Vec::new()vec![] }
174 }
175pub fn new(val: T) -> Self {
176let mut ref_tracking_for_consts =
177RefTracking { seen: FxHashSet::default(), todo: <[_]>::into_vec(::alloc::boxed::box_new([(val.clone(), PATH::default())]))vec![(val.clone(), PATH::default())] };
178ref_tracking_for_consts.seen.insert(val);
179ref_tracking_for_consts180 }
181pub fn next(&mut self) -> Option<(T, PATH)> {
182self.todo.pop()
183 }
184185fn track(&mut self, val: T, path: impl FnOnce() -> PATH) {
186if self.seen.insert(val.clone()) {
187{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:187",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(187u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("Recursing below ptr {0:#?}",
val) as &dyn Value))])
});
} else { ; }
};trace!("Recursing below ptr {:#?}", val);
188let path = path();
189// Remember to come back to this later.
190self.todo.push((val, path));
191 }
192 }
193}
194195// FIXME make this translatable as well?
196/// Format a path
197fn write_path(out: &mut String, path: &[PathElem]) {
198use self::PathElem::*;
199200for elem in path.iter() {
201match elem {
202 Field(name) => out.write_fmt(format_args!(".{0}", name))write!(out, ".{name}"),
203 EnumTag => out.write_fmt(format_args!(".<enum-tag>"))write!(out, ".<enum-tag>"),
204 Variant(name) => out.write_fmt(format_args!(".<enum-variant({0})>", name))write!(out, ".<enum-variant({name})>"),
205 CoroutineTag => out.write_fmt(format_args!(".<coroutine-tag>"))write!(out, ".<coroutine-tag>"),
206 CoroutineState(idx) => out.write_fmt(format_args!(".<coroutine-state({0})>", idx.index()))write!(out, ".<coroutine-state({})>", idx.index()),
207 CapturedVar(name) => out.write_fmt(format_args!(".<captured-var({0})>", name))write!(out, ".<captured-var({name})>"),
208 TupleElem(idx) => out.write_fmt(format_args!(".{0}", idx))write!(out, ".{idx}"),
209 ArrayElem(idx) => out.write_fmt(format_args!("[{0}]", idx))write!(out, "[{idx}]"),
210// `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
211 // some of the other items here also are not Rust syntax. Actually we can't
212 // even use the usual syntax because we are just showing the projections,
213 // not the root.
214 Deref => out.write_fmt(format_args!(".<deref>"))write!(out, ".<deref>"),
215 DynDowncast => out.write_fmt(format_args!(".<dyn-downcast>"))write!(out, ".<dyn-downcast>"),
216 Vtable => out.write_fmt(format_args!(".<vtable>"))write!(out, ".<vtable>"),
217 }
218 .unwrap()
219 }
220}
221222/// Represents a set of `Size` values as a sorted list of ranges.
223// These are (offset, length) pairs, and they are sorted and mutually disjoint,
224// and never adjacent (i.e. there's always a gap between two of them).
225#[derive(#[automatically_derived]
impl ::core::fmt::Debug for RangeSet {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_tuple_field1_finish(f, "RangeSet",
&&self.0)
}
}Debug, #[automatically_derived]
impl ::core::clone::Clone for RangeSet {
#[inline]
fn clone(&self) -> RangeSet {
RangeSet(::core::clone::Clone::clone(&self.0))
}
}Clone)]
226pub struct RangeSet(Vec<(Size, Size)>);
227228impl RangeSet {
229fn add_range(&mut self, offset: Size, size: Size) {
230if size.bytes() == 0 {
231// No need to track empty ranges.
232return;
233 }
234let v = &mut self.0;
235// We scan for a partition point where the left partition is all the elements that end
236 // strictly before we start. Those are elements that are too "low" to merge with us.
237let idx =
238v.partition_point(|&(other_offset, other_size)| other_offset + other_size < offset);
239// Now we want to either merge with the first element of the second partition, or insert ourselves before that.
240if let Some(&(other_offset, other_size)) = v.get(idx)
241 && offset + size >= other_offset242 {
243// Their end is >= our start (otherwise it would not be in the 2nd partition) and
244 // our end is >= their start. This means we can merge the ranges.
245let new_start = other_offset.min(offset);
246let mut new_end = (other_offset + other_size).max(offset + size);
247// We grew to the right, so merge with overlapping/adjacent elements.
248 // (We also may have grown to the left, but that can never make us adjacent with
249 // anything there since we selected the first such candidate via `partition_point`.)
250let mut scan_right = 1;
251while let Some(&(next_offset, next_size)) = v.get(idx + scan_right)
252 && new_end >= next_offset
253 {
254// Increase our size to absorb the next element.
255new_end = new_end.max(next_offset + next_size);
256// Look at the next element.
257scan_right += 1;
258 }
259// Update the element we grew.
260v[idx] = (new_start, new_end - new_start);
261// Remove the elements we absorbed (if any).
262if scan_right > 1 {
263drop(v.drain((idx + 1)..(idx + scan_right)));
264 }
265 } else {
266// Insert new element.
267v.insert(idx, (offset, size));
268 }
269 }
270}
271272struct ValidityVisitor<'rt, 'tcx, M: Machine<'tcx>> {
273/// The `path` may be pushed to, but the part that is present when a function
274 /// starts must not be changed! `visit_fields` and `visit_array` rely on
275 /// this stack discipline.
276path: Vec<PathElem>,
277 ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
278/// `None` indicates this is not validating for CTFE (but for runtime).
279ctfe_mode: Option<CtfeValidationMode>,
280 ecx: &'rt mut InterpCx<'tcx, M>,
281/// Whether provenance should be reset outside of pointers (emulating the effect of a typed
282 /// copy).
283reset_provenance_and_padding: bool,
284/// This tracks which byte ranges in this value contain data; the remaining bytes are padding.
285 /// The ideal representation here would be pointer-length pairs, but to keep things more compact
286 /// we only store a (range) set of offsets -- the base pointer is the same throughout the entire
287 /// visit, after all.
288 /// If this is `Some`, then `reset_provenance_and_padding` must be true (but not vice versa:
289 /// we might not track data vs padding bytes if the operand isn't stored in memory anyway).
290data_bytes: Option<RangeSet>,
291}
292293impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
294fn aggregate_field_path_elem(&mut self, layout: TyAndLayout<'tcx>, field: usize) -> PathElem {
295// First, check if we are projecting to a variant.
296match layout.variants {
297 Variants::Multiple { tag_field, .. } => {
298if tag_field.as_usize() == field {
299return match layout.ty.kind() {
300 ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,
301 ty::Coroutine(..) => PathElem::CoroutineTag,
302_ => ::rustc_middle::util::bug::bug_fmt(format_args!("non-variant type {0:?}",
layout.ty))bug!("non-variant type {:?}", layout.ty),
303 };
304 }
305 }
306 Variants::Single { .. } | Variants::Empty => {}
307 }
308309// Now we know we are projecting to a field, so figure out which one.
310match layout.ty.kind() {
311// coroutines, closures, and coroutine-closures all have upvars that may be named.
312ty::Closure(def_id, _) | ty::Coroutine(def_id, _) | ty::CoroutineClosure(def_id, _) => {
313let mut name = None;
314// FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar
315 // https://github.com/rust-lang/project-rfc-2229/issues/46
316if let Some(local_def_id) = def_id.as_local() {
317let captures = self.ecx.tcx.closure_captures(local_def_id);
318if let Some(captured_place) = captures.get(field) {
319// Sometimes the index is beyond the number of upvars (seen
320 // for a coroutine).
321let var_hir_id = captured_place.get_root_variable();
322let node = self.ecx.tcx.hir_node(var_hir_id);
323if let hir::Node::Pat(pat) = node324 && let hir::PatKind::Binding(_, _, ident, _) = pat.kind
325 {
326name = Some(ident.name);
327 }
328 }
329 }
330331 PathElem::CapturedVar(name.unwrap_or_else(|| {
332// Fall back to showing the field index.
333sym::integer(field)
334 }))
335 }
336337// tuples
338ty::Tuple(_) => PathElem::TupleElem(field),
339340// enums
341ty::Adt(def, ..) if def.is_enum() => {
342// we might be projecting *to* a variant, or to a field *in* a variant.
343match layout.variants {
344 Variants::Single { index } => {
345// Inside a variant
346PathElem::Field(def.variant(index).fields[FieldIdx::from_usize(field)].name)
347 }
348 Variants::Empty => {
::core::panicking::panic_fmt(format_args!("there is no field in Variants::Empty types"));
}panic!("there is no field in Variants::Empty types"),
349 Variants::Multiple { .. } => ::rustc_middle::util::bug::bug_fmt(format_args!("we handled variants above"))bug!("we handled variants above"),
350 }
351 }
352353// other ADTs
354ty::Adt(def, _) => {
355 PathElem::Field(def.non_enum_variant().fields[FieldIdx::from_usize(field)].name)
356 }
357358// arrays/slices
359ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
360361// dyn traits
362ty::Dynamic(..) => {
363match (&field, &0) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(field, 0);
364 PathElem::DynDowncast365 }
366367// nothing else has an aggregate layout
368_ => ::rustc_middle::util::bug::bug_fmt(format_args!("aggregate_field_path_elem: got non-aggregate type {0:?}",
layout.ty))bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
369 }
370 }
371372fn with_elem<R>(
373&mut self,
374 elem: PathElem,
375 f: impl FnOnce(&mut Self) -> InterpResult<'tcx, R>,
376 ) -> InterpResult<'tcx, R> {
377// Remember the old state
378let path_len = self.path.len();
379// Record new element
380self.path.push(elem);
381// Perform operation
382let r = f(self)?;
383// Undo changes
384self.path.truncate(path_len);
385// Done
386interp_ok(r)
387 }
388389fn read_immediate(
390&self,
391 val: &PlaceTy<'tcx, M::Provenance>,
392 expected: ExpectedKind,
393 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
394interp_ok({
self.ecx.read_immediate(val).map_err_kind(|e|
{
match e {
Ub(InvalidUninitBytes(_)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: Uninit { expected },
}))
}
}
Unsup(ReadPointerAsInt(_)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PointerAsInt { expected },
}))
}
}
Unsup(ReadPartialPointer(_)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PartialPointer,
}))
}
}
e => e,
}
})?
}try_validation!(
395self.ecx.read_immediate(val),
396self.path,
397 Ub(InvalidUninitBytes(_)) =>
398 Uninit { expected },
399// The `Unsup` cases can only occur during CTFE
400Unsup(ReadPointerAsInt(_)) =>
401 PointerAsInt { expected },
402 Unsup(ReadPartialPointer(_)) =>
403 PartialPointer,
404 ))
405 }
406407fn read_scalar(
408&self,
409 val: &PlaceTy<'tcx, M::Provenance>,
410 expected: ExpectedKind,
411 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
412interp_ok(self.read_immediate(val, expected)?.to_scalar())
413 }
414415fn deref_pointer(
416&mut self,
417 val: &PlaceTy<'tcx, M::Provenance>,
418 expected: ExpectedKind,
419 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
420// Not using `ecx.deref_pointer` since we want to use our `read_immediate` wrapper.
421let imm = self.read_immediate(val, expected)?;
422// Reset provenance: ensure slice tail metadata does not preserve provenance,
423 // and ensure all pointers do not preserve partial provenance.
424if self.reset_provenance_and_padding {
425if #[allow(non_exhaustive_omitted_patterns)] match imm.layout.backend_repr {
BackendRepr::Scalar(..) => true,
_ => false,
}matches!(imm.layout.backend_repr, BackendRepr::Scalar(..)) {
426// A thin pointer. If it has provenance, we don't have to do anything.
427 // If it does not, ensure we clear the provenance in memory.
428if #[allow(non_exhaustive_omitted_patterns)] match imm.to_scalar() {
Scalar::Int(..) => true,
_ => false,
}matches!(imm.to_scalar(), Scalar::Int(..)) {
429self.ecx.clear_provenance(val)?;
430 }
431 } else {
432// A wide pointer. This means we have to worry both about the pointer itself and the
433 // metadata. We do the lazy thing and just write back the value we got. Just
434 // clearing provenance in a targeted manner would be more efficient, but unless this
435 // is a perf hotspot it's just not worth the effort.
436self.ecx.write_immediate_no_validate(*imm, val)?;
437 }
438// The entire thing is data, not padding.
439self.add_data_range_place(val);
440 }
441// Now turn it into a place.
442self.ecx.ref_to_mplace(&imm)
443 }
444445fn check_wide_ptr_meta(
446&mut self,
447 meta: MemPlaceMeta<M::Provenance>,
448 pointee: TyAndLayout<'tcx>,
449 ) -> InterpResult<'tcx> {
450let tail = self.ecx.tcx.struct_tail_for_codegen(pointee.ty, self.ecx.typing_env);
451match tail.kind() {
452 ty::Dynamic(data, _) => {
453let vtable = meta.unwrap_meta().to_pointer(self.ecx)?;
454// Make sure it is a genuine vtable pointer for the right trait.
455{
self.ecx.get_ptr_vtable_ty(vtable,
Some(data)).map_err_kind(|e|
{
match e {
Ub(DanglingIntPointer { .. } | InvalidVTablePointer(..)) =>
{
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: InvalidVTablePtr {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}", vtable))
}),
},
}))
}
}
Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type
}) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: {
InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
},
}))
}
}
e => e,
}
})?
};try_validation!(
456self.ecx.get_ptr_vtable_ty(vtable, Some(data)),
457self.path,
458 Ub(DanglingIntPointer{ .. } | InvalidVTablePointer(..)) =>
459 InvalidVTablePtr { value: format!("{vtable}") },
460 Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) => {
461 InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
462 },
463 );
464 }
465 ty::Slice(..) | ty::Str => {
466let _len = meta.unwrap_meta().to_target_usize(self.ecx)?;
467// We do not check that `len * elem_size <= isize::MAX`:
468 // that is only required for references, and there it falls out of the
469 // "dereferenceable" check performed by Stacked Borrows.
470}
471 ty::Foreign(..) => {
472// Unsized, but not wide.
473}
474_ => ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected unsized type tail: {0:?}",
tail))bug!("Unexpected unsized type tail: {:?}", tail),
475 }
476477interp_ok(())
478 }
479480/// Check a reference or `Box`.
481fn check_safe_pointer(
482&mut self,
483 value: &PlaceTy<'tcx, M::Provenance>,
484 ptr_kind: PointerKind,
485 ) -> InterpResult<'tcx> {
486let place = self.deref_pointer(value, ptr_kind.into())?;
487// Handle wide pointers.
488 // Check metadata early, for better diagnostics
489if place.layout.is_unsized() {
490self.check_wide_ptr_meta(place.meta(), place.layout)?;
491 }
492// Make sure this is dereferenceable and all.
493let size_and_align = {
self.ecx.size_and_align_of_val(&place).map_err_kind(|e|
{
match e {
Ub(InvalidMeta(msg)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: match msg {
InvalidMetaKind::SliceTooBig =>
InvalidMetaSliceTooLarge { ptr_kind },
InvalidMetaKind::TooBig => InvalidMetaTooLarge { ptr_kind },
},
}))
}
}
e => e,
}
})?
}try_validation!(
494self.ecx.size_and_align_of_val(&place),
495self.path,
496 Ub(InvalidMeta(msg)) => match msg {
497 InvalidMetaKind::SliceTooBig => InvalidMetaSliceTooLarge { ptr_kind },
498 InvalidMetaKind::TooBig => InvalidMetaTooLarge { ptr_kind },
499 }
500 );
501let (size, align) = size_and_align502// for the purpose of validity, consider foreign types to have
503 // alignment and size determined by the layout (size will be 0,
504 // alignment should take attributes into account).
505.unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
506// Direct call to `check_ptr_access_align` checks alignment even on CTFE machines.
507{
self.ecx.check_ptr_access(place.ptr(), size,
CheckInAllocMsg::Dereferenceable).map_err_kind(|e|
{
match e {
Ub(DanglingIntPointer { addr: 0, .. }) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: NullPtr { ptr_kind, maybe: false },
}))
}
}
Ub(DanglingIntPointer { addr: i, .. }) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: DanglingPtrNoProvenance {
ptr_kind,
pointer: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}",
Pointer::<Option<AllocId>>::without_provenance(i)))
}),
},
}))
}
}
Ub(PointerOutOfBounds { .. }) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: DanglingPtrOutOfBounds { ptr_kind },
}))
}
}
Ub(PointerUseAfterFree(..)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: DanglingPtrUseAfterFree { ptr_kind },
}))
}
}
e => e,
}
})?
};try_validation!(
508self.ecx.check_ptr_access(
509 place.ptr(),
510 size,
511 CheckInAllocMsg::Dereferenceable, // will anyway be replaced by validity message
512),
513self.path,
514 Ub(DanglingIntPointer { addr: 0, .. }) => NullPtr { ptr_kind, maybe: false },
515 Ub(DanglingIntPointer { addr: i, .. }) => DanglingPtrNoProvenance {
516 ptr_kind,
517// FIXME this says "null pointer" when null but we need translate
518pointer: format!("{}", Pointer::<Option<AllocId>>::without_provenance(i))
519 },
520 Ub(PointerOutOfBounds { .. }) => DanglingPtrOutOfBounds {
521 ptr_kind
522 },
523 Ub(PointerUseAfterFree(..)) => DanglingPtrUseAfterFree {
524 ptr_kind,
525 },
526 );
527{
self.ecx.check_ptr_align(place.ptr(),
align).map_err_kind(|e|
{
match e {
Ub(AlignmentCheckFailed(Misalignment { required, has },
_msg)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: UnalignedPtr {
ptr_kind,
required_bytes: required.bytes(),
found_bytes: has.bytes(),
},
}))
}
}
e => e,
}
})?
};try_validation!(
528self.ecx.check_ptr_align(
529 place.ptr(),
530 align,
531 ),
532self.path,
533 Ub(AlignmentCheckFailed(Misalignment { required, has }, _msg)) => UnalignedPtr {
534 ptr_kind,
535 required_bytes: required.bytes(),
536 found_bytes: has.bytes()
537 },
538 );
539// Make sure this is non-null. We checked dereferenceability above, but if `size` is zero
540 // that does not imply non-null.
541let scalar = Scalar::from_maybe_pointer(place.ptr(), self.ecx);
542if self.ecx.scalar_may_be_null(scalar)? {
543let maybe = !M::Provenance::OFFSET_IS_ADDR && #[allow(non_exhaustive_omitted_patterns)] match scalar {
Scalar::Ptr(..) => true,
_ => false,
}matches!(scalar, Scalar::Ptr(..));
544do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: NullPtr { ptr_kind, maybe },
}))
}throw_validation_failure!(self.path, NullPtr { ptr_kind, maybe })545 }
546// Do not allow references to uninhabited types.
547if place.layout.is_uninhabited() {
548let ty = place.layout.ty;
549do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PtrToUninhabited { ptr_kind, ty },
}))
}throw_validation_failure!(self.path, PtrToUninhabited { ptr_kind, ty })550 }
551// Recursive checking
552if let Some(ref_tracking) = self.ref_tracking.as_deref_mut() {
553// Proceed recursively even for ZST, no reason to skip them!
554 // `!` is a ZST and we want to validate it.
555if let Some(ctfe_mode) = self.ctfe_mode {
556let mut skip_recursive_check = false;
557// CTFE imposes restrictions on what references can point to.
558if let Ok((alloc_id, _offset, _prov)) =
559self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)
560 {
561// Everything should be already interned.
562let Some(global_alloc) = self.ecx.tcx.try_get_global_alloc(alloc_id) else {
563if self.ecx.memory.alloc_map.contains_key(&alloc_id) {
564// This can happen when interning didn't complete due to, e.g.
565 // missing `make_global`. This must mean other errors are already
566 // being reported.
567self.ecx.tcx.dcx().delayed_bug(
568"interning did not complete, there should be an error",
569 );
570return interp_ok(());
571 }
572// We can't have *any* references to non-existing allocations in const-eval
573 // as the rest of rustc isn't happy with them... so we throw an error, even
574 // though for zero-sized references this isn't really UB.
575 // A potential future alternative would be to resurrect this as a zero-sized allocation
576 // (which codegen will then compile to an aligned dummy pointer anyway).
577do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: DanglingPtrUseAfterFree { ptr_kind },
}))
};throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
578 };
579let (size, _align) =
580global_alloc.size_and_align(*self.ecx.tcx, self.ecx.typing_env);
581let alloc_actual_mutbl =
582global_alloc.mutability(*self.ecx.tcx, self.ecx.typing_env);
583584match global_alloc {
585 GlobalAlloc::Static(did) => {
586let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {
587::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!()588 };
589if !!self.ecx.tcx.is_thread_local_static(did) {
::core::panicking::panic("assertion failed: !self.ecx.tcx.is_thread_local_static(did)")
};assert!(!self.ecx.tcx.is_thread_local_static(did));
590if !self.ecx.tcx.is_static(did) {
::core::panicking::panic("assertion failed: self.ecx.tcx.is_static(did)")
};assert!(self.ecx.tcx.is_static(did));
591match ctfe_mode {
592 CtfeValidationMode::Static { .. }
593 | CtfeValidationMode::Promoted { .. } => {
594// We skip recursively checking other statics. These statics must be sound by
595 // themselves, and the only way to get broken statics here is by using
596 // unsafe code.
597 // The reasons we don't check other statics is twofold. For one, in all
598 // sound cases, the static was already validated on its own, and second, we
599 // trigger cycle errors if we try to compute the value of the other static
600 // and that static refers back to us (potentially through a promoted).
601 // This could miss some UB, but that's fine.
602 // We still walk nested allocations, as they are fundamentally part of this validation run.
603 // This means we will also recurse into nested statics of *other*
604 // statics, even though we do not recurse into other statics directly.
605 // That's somewhat inconsistent but harmless.
606skip_recursive_check = !nested;
607 }
608 CtfeValidationMode::Const { .. } => {
609// If this is mutable memory or an `extern static`, there's no point in checking it -- we'd
610 // just get errors trying to read the value.
611if alloc_actual_mutbl.is_mut()
612 || self.ecx.tcx.is_foreign_item(did)
613 {
614skip_recursive_check = true;
615 }
616 }
617 }
618 }
619_ => (),
620 }
621622// If this allocation has size zero, there is no actual mutability here.
623if size != Size::ZERO {
624// Determine whether this pointer expects to be pointing to something mutable.
625let ptr_expected_mutbl = match ptr_kind {
626 PointerKind::Box => Mutability::Mut,
627 PointerKind::Ref(mutbl) => {
628// We do not take into account interior mutability here since we cannot know if
629 // there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
630 // that in the recursive descent behind this reference (controlled by
631 // `allow_immutable_unsafe_cell`).
632mutbl633 }
634 };
635// Mutable pointer to immutable memory is no good.
636if ptr_expected_mutbl == Mutability::Mut637 && alloc_actual_mutbl == Mutability::Not638 {
639// This can actually occur with transmutes.
640do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: MutableRefToImmutable,
}))
};throw_validation_failure!(self.path, MutableRefToImmutable);
641 }
642 }
643 }
644// Potentially skip recursive check.
645if skip_recursive_check {
646return interp_ok(());
647 }
648 } else {
649// This is not CTFE, so it's Miri with recursive checking.
650 // FIXME: we do *not* check behind boxes, since creating a new box first creates it uninitialized
651 // and then puts the value in there, so briefly we have a box with uninit contents.
652 // FIXME: should we also skip `UnsafeCell` behind shared references? Currently that is not
653 // needed since validation reads bypass Stacked Borrows and data race checks.
654if #[allow(non_exhaustive_omitted_patterns)] match ptr_kind {
PointerKind::Box => true,
_ => false,
}matches!(ptr_kind, PointerKind::Box) {
655return interp_ok(());
656 }
657 }
658let path = &self.path;
659ref_tracking.track(place, || {
660// We need to clone the path anyway, make sure it gets created
661 // with enough space for the additional `Deref`.
662let mut new_path = Vec::with_capacity(path.len() + 1);
663new_path.extend(path);
664new_path.push(PathElem::Deref);
665new_path666 });
667 }
668interp_ok(())
669 }
670671/// Check if this is a value of primitive type, and if yes check the validity of the value
672 /// at that type. Return `true` if the type is indeed primitive.
673 ///
674 /// Note that not all of these have `FieldsShape::Primitive`, e.g. wide references.
675fn try_visit_primitive(
676&mut self,
677 value: &PlaceTy<'tcx, M::Provenance>,
678 ) -> InterpResult<'tcx, bool> {
679// Go over all the primitive types
680let ty = value.layout.ty;
681match ty.kind() {
682 ty::Bool => {
683let scalar = self.read_scalar(value, ExpectedKind::Bool)?;
684{
scalar.to_bool().map_err_kind(|e|
{
match e {
Ub(InvalidBool(..)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: ValidationErrorKind::InvalidBool {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:x}", scalar))
}),
},
}))
}
}
e => e,
}
})?
};try_validation!(
685 scalar.to_bool(),
686self.path,
687 Ub(InvalidBool(..)) => ValidationErrorKind::InvalidBool {
688 value: format!("{scalar:x}"),
689 }
690 );
691if self.reset_provenance_and_padding {
692self.ecx.clear_provenance(value)?;
693self.add_data_range_place(value);
694 }
695interp_ok(true)
696 }
697 ty::Char => {
698let scalar = self.read_scalar(value, ExpectedKind::Char)?;
699{
scalar.to_char().map_err_kind(|e|
{
match e {
Ub(InvalidChar(..)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: ValidationErrorKind::InvalidChar {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:x}", scalar))
}),
},
}))
}
}
e => e,
}
})?
};try_validation!(
700 scalar.to_char(),
701self.path,
702 Ub(InvalidChar(..)) => ValidationErrorKind::InvalidChar {
703 value: format!("{scalar:x}"),
704 }
705 );
706if self.reset_provenance_and_padding {
707self.ecx.clear_provenance(value)?;
708self.add_data_range_place(value);
709 }
710interp_ok(true)
711 }
712 ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
713// NOTE: Keep this in sync with the array optimization for int/float
714 // types below!
715self.read_scalar(
716value,
717if #[allow(non_exhaustive_omitted_patterns)] match ty.kind() {
ty::Float(..) => true,
_ => false,
}matches!(ty.kind(), ty::Float(..)) {
718 ExpectedKind::Float719 } else {
720 ExpectedKind::Int721 },
722 )?;
723if self.reset_provenance_and_padding {
724self.ecx.clear_provenance(value)?;
725self.add_data_range_place(value);
726 }
727interp_ok(true)
728 }
729 ty::RawPtr(..) => {
730let place = self.deref_pointer(value, ExpectedKind::RawPtr)?;
731if place.layout.is_unsized() {
732self.check_wide_ptr_meta(place.meta(), place.layout)?;
733 }
734interp_ok(true)
735 }
736 ty::Ref(_, _ty, mutbl) => {
737self.check_safe_pointer(value, PointerKind::Ref(*mutbl))?;
738interp_ok(true)
739 }
740 ty::FnPtr(..) => {
741let scalar = self.read_scalar(value, ExpectedKind::FnPtr)?;
742743// If we check references recursively, also check that this points to a function.
744if let Some(_) = self.ref_tracking {
745let ptr = scalar.to_pointer(self.ecx)?;
746let _fn = {
self.ecx.get_ptr_fn(ptr).map_err_kind(|e|
{
match e {
Ub(DanglingIntPointer { .. } | InvalidFunctionPointer(..))
=> {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: InvalidFnPtr {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}", ptr))
}),
},
}))
}
}
e => e,
}
})?
}try_validation!(
747self.ecx.get_ptr_fn(ptr),
748self.path,
749 Ub(DanglingIntPointer{ .. } | InvalidFunctionPointer(..)) =>
750 InvalidFnPtr { value: format!("{ptr}") },
751 );
752// FIXME: Check if the signature matches
753} else {
754// Otherwise (for standalone Miri and for `-Zextra-const-ub-checks`),
755 // we have to still check it to be non-null.
756if self.ecx.scalar_may_be_null(scalar)? {
757let maybe =
758 !M::Provenance::OFFSET_IS_ADDR && #[allow(non_exhaustive_omitted_patterns)] match scalar {
Scalar::Ptr(..) => true,
_ => false,
}matches!(scalar, Scalar::Ptr(..));
759do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: NullFnPtr { maybe },
}))
};throw_validation_failure!(self.path, NullFnPtr { maybe });
760 }
761 }
762if self.reset_provenance_and_padding {
763// Make sure we do not preserve partial provenance. This matches the thin
764 // pointer handling in `deref_pointer`.
765if #[allow(non_exhaustive_omitted_patterns)] match scalar {
Scalar::Int(..) => true,
_ => false,
}matches!(scalar, Scalar::Int(..)) {
766self.ecx.clear_provenance(value)?;
767 }
768self.add_data_range_place(value);
769 }
770interp_ok(true)
771 }
772 ty::Never => do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: NeverVal,
}))
}throw_validation_failure!(self.path, NeverVal),
773 ty::Foreign(..) | ty::FnDef(..) => {
774// Nothing to check.
775interp_ok(true)
776 }
777 ty::UnsafeBinder(_) => {
::core::panicking::panic_fmt(format_args!("not yet implemented: {0}",
format_args!("FIXME(unsafe_binder)")));
}todo!("FIXME(unsafe_binder)"),
778// The above should be all the primitive types. The rest is compound, we
779 // check them by visiting their fields/variants.
780ty::Adt(..)
781 | ty::Tuple(..)
782 | ty::Array(..)
783 | ty::Slice(..)
784 | ty::Str785 | ty::Dynamic(..)
786 | ty::Closure(..)
787 | ty::Pat(..)
788 | ty::CoroutineClosure(..)
789 | ty::Coroutine(..) => interp_ok(false),
790// Some types only occur during typechecking, they have no layout.
791 // We should not see them here and we could not check them anyway.
792ty::Error(_)
793 | ty::Infer(..)
794 | ty::Placeholder(..)
795 | ty::Bound(..)
796 | ty::Param(..)
797 | ty::Alias(..)
798 | ty::CoroutineWitness(..) => ::rustc_middle::util::bug::bug_fmt(format_args!("Encountered invalid type {0:?}",
ty))bug!("Encountered invalid type {:?}", ty),
799 }
800 }
801802fn visit_scalar(
803&mut self,
804 scalar: Scalar<M::Provenance>,
805 scalar_layout: ScalarAbi,
806 ) -> InterpResult<'tcx> {
807let size = scalar_layout.size(self.ecx);
808let valid_range = scalar_layout.valid_range(self.ecx);
809let WrappingRange { start, end } = valid_range;
810let max_value = size.unsigned_int_max();
811if !(end <= max_value) {
::core::panicking::panic("assertion failed: end <= max_value")
};assert!(end <= max_value);
812let bits = match scalar.try_to_scalar_int() {
813Ok(int) => int.to_bits(size),
814Err(_) => {
815// So this is a pointer then, and casting to an int failed.
816 // Can only happen during CTFE.
817 // We support 2 kinds of ranges here: full range, and excluding zero.
818if start == 1 && end == max_value {
819// Only null is the niche. So make sure the ptr is NOT null.
820if self.ecx.scalar_may_be_null(scalar)? {
821do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: NonnullPtrMaybeNull,
}))
}throw_validation_failure!(self.path, NonnullPtrMaybeNull)822 } else {
823return interp_ok(());
824 }
825 } else if scalar_layout.is_always_valid(self.ecx) {
826// Easy. (This is reachable if `enforce_number_validity` is set.)
827return interp_ok(());
828 } else {
829// Conservatively, we reject, because the pointer *could* have a bad
830 // value.
831do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PtrOutOfRange { range: valid_range, max_value },
}))
}throw_validation_failure!(
832self.path,
833 PtrOutOfRange { range: valid_range, max_value }
834 )835 }
836 }
837 };
838// Now compare.
839if valid_range.contains(bits) {
840interp_ok(())
841 } else {
842do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: OutOfRange {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}", bits))
}),
range: valid_range,
max_value,
},
}))
}throw_validation_failure!(
843self.path,
844 OutOfRange { value: format!("{bits}"), range: valid_range, max_value }
845 )846 }
847 }
848849fn in_mutable_memory(&self, val: &PlaceTy<'tcx, M::Provenance>) -> bool {
850if true {
if !self.ctfe_mode.is_some() {
::core::panicking::panic("assertion failed: self.ctfe_mode.is_some()")
};
};debug_assert!(self.ctfe_mode.is_some());
851if let Some(mplace) = val.as_mplace_or_local().left() {
852if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
853let tcx = *self.ecx.tcx;
854// Everything must be already interned.
855let mutbl = tcx.global_alloc(alloc_id).mutability(tcx, self.ecx.typing_env);
856if let Some((_, alloc)) = self.ecx.memory.alloc_map.get(alloc_id) {
857match (&alloc.mutability, &mutbl) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(alloc.mutability, mutbl);
858 }
859mutbl.is_mut()
860 } else {
861// No memory at all.
862false
863}
864 } else {
865// A local variable -- definitely mutable.
866true
867}
868 }
869870/// Add the given pointer-length pair to the "data" range of this visit.
871fn add_data_range(&mut self, ptr: Pointer<Option<M::Provenance>>, size: Size) {
872if let Some(data_bytes) = self.data_bytes.as_mut() {
873// We only have to store the offset, the rest is the same for all pointers here.
874 // The logic is agnostic to whether the offset is relative or absolute as long as
875 // it is consistent.
876let (_prov, offset) = ptr.into_raw_parts();
877// Add this.
878data_bytes.add_range(offset, size);
879 };
880 }
881882/// Add the entire given place to the "data" range of this visit.
883fn add_data_range_place(&mut self, place: &PlaceTy<'tcx, M::Provenance>) {
884// Only sized places can be added this way.
885if true {
if !place.layout.is_sized() {
::core::panicking::panic("assertion failed: place.layout.is_sized()")
};
};debug_assert!(place.layout.is_sized());
886if let Some(data_bytes) = self.data_bytes.as_mut() {
887let offset = Self::data_range_offset(self.ecx, place);
888data_bytes.add_range(offset, place.layout.size);
889 }
890 }
891892/// Convert a place into the offset it starts at, for the purpose of data_range tracking.
893 /// Must only be called if `data_bytes` is `Some(_)`.
894fn data_range_offset(ecx: &InterpCx<'tcx, M>, place: &PlaceTy<'tcx, M::Provenance>) -> Size {
895// The presence of `data_bytes` implies that our place is in memory.
896let ptr = ecx897 .place_to_op(place)
898 .expect("place must be in memory")
899 .as_mplace_or_imm()
900 .expect_left("place must be in memory")
901 .ptr();
902let (_prov, offset) = ptr.into_raw_parts();
903offset904 }
905906fn reset_padding(&mut self, place: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
907let Some(data_bytes) = self.data_bytes.as_mut() else { return interp_ok(()) };
908// Our value must be in memory, otherwise we would not have set up `data_bytes`.
909let mplace = self.ecx.force_allocation(place)?;
910// Determine starting offset and size.
911let (_prov, start_offset) = mplace.ptr().into_raw_parts();
912let (size, _align) = self913 .ecx
914 .size_and_align_of_val(&mplace)?
915.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
916// If there is no padding at all, we can skip the rest: check for
917 // a single data range covering the entire value.
918if data_bytes.0 == &[(start_offset, size)] {
919return interp_ok(());
920 }
921// Get a handle for the allocation. Do this only once, to avoid looking up the same
922 // allocation over and over again. (Though to be fair, iterating the value already does
923 // exactly that.)
924let Some(mut alloc) = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)? else {
925// A ZST, no padding to clear.
926return interp_ok(());
927 };
928// Add a "finalizer" data range at the end, so that the iteration below finds all gaps
929 // between ranges.
930data_bytes.0.push((start_offset + size, Size::ZERO));
931// Iterate, and reset gaps.
932let mut padding_cleared_until = start_offset;
933for &(offset, size) in data_bytes.0.iter() {
934if !(offset >= padding_cleared_until) {
{
::core::panicking::panic_fmt(format_args!("reset_padding on {0}: previous field ended at offset {1}, next field starts at {2} (and has a size of {3} bytes)",
mplace.layout.ty,
(padding_cleared_until - start_offset).bytes(),
(offset - start_offset).bytes(), size.bytes()));
}
};assert!(
935 offset >= padding_cleared_until,
936"reset_padding on {}: previous field ended at offset {}, next field starts at {} (and has a size of {} bytes)",
937 mplace.layout.ty,
938 (padding_cleared_until - start_offset).bytes(),
939 (offset - start_offset).bytes(),
940 size.bytes(),
941 );
942if offset > padding_cleared_until {
943// We found padding. Adjust the range to be relative to `alloc`, and make it uninit.
944let padding_start = padding_cleared_until - start_offset;
945let padding_size = offset - padding_cleared_until;
946let range = alloc_range(padding_start, padding_size);
947{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:947",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(947u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("reset_padding on {0}: resetting padding range {1:?}",
mplace.layout.ty, range) as &dyn Value))])
});
} else { ; }
};trace!("reset_padding on {}: resetting padding range {range:?}", mplace.layout.ty);
948 alloc.write_uninit(range);
949 }
950 padding_cleared_until = offset + size;
951 }
952if !(padding_cleared_until == start_offset + size) {
::core::panicking::panic("assertion failed: padding_cleared_until == start_offset + size")
};assert!(padding_cleared_until == start_offset + size);
953interp_ok(())
954 }
955956/// Computes the data range of this union type:
957 /// which bytes are inside a field (i.e., not padding.)
958fn union_data_range<'e>(
959 ecx: &'e mut InterpCx<'tcx, M>,
960 layout: TyAndLayout<'tcx>,
961 ) -> Cow<'e, RangeSet> {
962if !layout.ty.is_union() {
::core::panicking::panic("assertion failed: layout.ty.is_union()")
};assert!(layout.ty.is_union());
963if !layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("there are no unsized unions"));
}
};assert!(layout.is_sized(), "there are no unsized unions");
964let layout_cx = LayoutCx::new(*ecx.tcx, ecx.typing_env);
965return M::cached_union_data_range(ecx, layout.ty, || {
966let mut out = RangeSet(Vec::new());
967union_data_range_uncached(&layout_cx, layout, Size::ZERO, &mut out);
968out969 });
970971/// Helper for recursive traversal: add data ranges of the given type to `out`.
972fn union_data_range_uncached<'tcx>(
973 cx: &LayoutCx<'tcx>,
974 layout: TyAndLayout<'tcx>,
975 base_offset: Size,
976 out: &mut RangeSet,
977 ) {
978// If this is a ZST, we don't contain any data. In particular, this helps us to quickly
979 // skip over huge arrays of ZST.
980if layout.is_zst() {
981return;
982 }
983// Just recursively add all the fields of everything to the output.
984match &layout.fields {
985 FieldsShape::Primitive => {
986out.add_range(base_offset, layout.size);
987 }
988&FieldsShape::Union(fields) => {
989// Currently, all fields start at offset 0 (relative to `base_offset`).
990for field in 0..fields.get() {
991let field = layout.field(cx, field);
992 union_data_range_uncached(cx, field, base_offset, out);
993 }
994 }
995&FieldsShape::Array { stride, count } => {
996let elem = layout.field(cx, 0);
997998// Fast-path for large arrays of simple types that do not contain any padding.
999if elem.backend_repr.is_scalar() {
1000out.add_range(base_offset, elem.size * count);
1001 } else {
1002for idx in 0..count {
1003// This repeats the same computation for every array element... but the alternative
1004 // is to allocate temporary storage for a dedicated `out` set for the array element,
1005 // and replicating that N times. Is that better?
1006union_data_range_uncached(cx, elem, base_offset + idx * stride, out);
1007 }
1008 }
1009 }
1010 FieldsShape::Arbitrary { offsets, .. } => {
1011for (field, &offset) in offsets.iter_enumerated() {
1012let field = layout.field(cx, field.as_usize());
1013 union_data_range_uncached(cx, field, base_offset + offset, out);
1014 }
1015 }
1016 }
1017// Don't forget potential other variants.
1018match &layout.variants {
1019 Variants::Single { .. } | Variants::Empty => {
1020// Fully handled above.
1021}
1022 Variants::Multiple { variants, .. } => {
1023for variant in variants.indices() {
1024let variant = layout.for_variant(cx, variant);
1025 union_data_range_uncached(cx, variant, base_offset, out);
1026 }
1027 }
1028 }
1029 }
1030 }
1031}
10321033impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, 'tcx, M> {
1034type V = PlaceTy<'tcx, M::Provenance>;
10351036#[inline(always)]
1037fn ecx(&self) -> &InterpCx<'tcx, M> {
1038self.ecx
1039 }
10401041fn read_discriminant(
1042&mut self,
1043 val: &PlaceTy<'tcx, M::Provenance>,
1044 ) -> InterpResult<'tcx, VariantIdx> {
1045self.with_elem(PathElem::EnumTag, move |this| {
1046interp_ok({
this.ecx.read_discriminant(val).map_err_kind(|e|
{
match e {
Ub(InvalidTag(val)) => {
{
let where_ = &this.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: InvalidEnumTag {
value: ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0:x}", val))
}),
},
}))
}
}
Ub(UninhabitedEnumVariantRead(_)) => {
{
let where_ = &this.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: UninhabitedEnumVariant,
}))
}
}
e => e,
}
})?
}try_validation!(
1047 this.ecx.read_discriminant(val),
1048 this.path,
1049 Ub(InvalidTag(val)) => InvalidEnumTag {
1050 value: format!("{val:x}"),
1051 },
1052 Ub(UninhabitedEnumVariantRead(_)) => UninhabitedEnumVariant,
1053// Uninit / bad provenance are not possible since the field was already previously
1054 // checked at its integer type.
1055))
1056 })
1057 }
10581059#[inline]
1060fn visit_field(
1061&mut self,
1062 old_val: &PlaceTy<'tcx, M::Provenance>,
1063 field: usize,
1064 new_val: &PlaceTy<'tcx, M::Provenance>,
1065 ) -> InterpResult<'tcx> {
1066let elem = self.aggregate_field_path_elem(old_val.layout, field);
1067self.with_elem(elem, move |this| this.visit_value(new_val))
1068 }
10691070#[inline]
1071fn visit_variant(
1072&mut self,
1073 old_val: &PlaceTy<'tcx, M::Provenance>,
1074 variant_id: VariantIdx,
1075 new_val: &PlaceTy<'tcx, M::Provenance>,
1076 ) -> InterpResult<'tcx> {
1077let name = match old_val.layout.ty.kind() {
1078 ty::Adt(adt, _) => PathElem::Variant(adt.variant(variant_id).name),
1079// Coroutines also have variants
1080ty::Coroutine(..) => PathElem::CoroutineState(variant_id),
1081_ => ::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected type with variant: {0:?}",
old_val.layout.ty))bug!("Unexpected type with variant: {:?}", old_val.layout.ty),
1082 };
1083self.with_elem(name, move |this| this.visit_value(new_val))
1084 }
10851086#[inline(always)]
1087fn visit_union(
1088&mut self,
1089 val: &PlaceTy<'tcx, M::Provenance>,
1090 _fields: NonZero<usize>,
1091 ) -> InterpResult<'tcx> {
1092// Special check for CTFE validation, preventing `UnsafeCell` inside unions in immutable memory.
1093if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1094// Unsized unions are currently not a thing, but let's keep this code consistent with
1095 // the check in `visit_value`.
1096let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1097if !zst && !val.layout.ty.is_freeze(*self.ecx.tcx, self.ecx.typing_env) {
1098if !self.in_mutable_memory(val) {
1099do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: UnsafeCellInImmutable,
}))
};throw_validation_failure!(self.path, UnsafeCellInImmutable);
1100 }
1101 }
1102 }
1103if self.reset_provenance_and_padding
1104 && let Some(data_bytes) = self.data_bytes.as_mut()
1105 {
1106let base_offset = Self::data_range_offset(self.ecx, val);
1107// Determine and add data range for this union.
1108let union_data_range = Self::union_data_range(self.ecx, val.layout);
1109for &(offset, size) in union_data_range.0.iter() {
1110 data_bytes.add_range(base_offset + offset, size);
1111 }
1112 }
1113interp_ok(())
1114 }
11151116#[inline]
1117fn visit_box(
1118&mut self,
1119 _box_ty: Ty<'tcx>,
1120 val: &PlaceTy<'tcx, M::Provenance>,
1121 ) -> InterpResult<'tcx> {
1122self.check_safe_pointer(val, PointerKind::Box)?;
1123interp_ok(())
1124 }
11251126#[inline]
1127fn visit_value(&mut self, val: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
1128{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1128",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(1128u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("visit_value: {0:?}, {1:?}",
*val, val.layout) as &dyn Value))])
});
} else { ; }
};trace!("visit_value: {:?}, {:?}", *val, val.layout);
11291130// Check primitive types -- the leaves of our recursive descent.
1131 // This is called even for enum discriminants (which are "fields" of their enum),
1132 // so for integer-typed discriminants the provenance reset will happen here.
1133 // We assume that the Scalar validity range does not restrict these values
1134 // any further than `try_visit_primitive` does!
1135if self.try_visit_primitive(val)? {
1136return interp_ok(());
1137 }
11381139// Special check preventing `UnsafeCell` in the inner part of constants
1140if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1141// Exclude ZST values. We need to compute the dynamic size/align to properly
1142 // handle slices and trait objects.
1143let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1144if !zst1145 && let Some(def) = val.layout.ty.ty_adt_def()
1146 && def.is_unsafe_cell()
1147 {
1148if !self.in_mutable_memory(val) {
1149do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: UnsafeCellInImmutable,
}))
};throw_validation_failure!(self.path, UnsafeCellInImmutable);
1150 }
1151 }
1152 }
11531154// Recursively walk the value at its type. Apply optimizations for some large types.
1155match val.layout.ty.kind() {
1156 ty::Str => {
1157let mplace = val.assert_mem_place(); // strings are unsized and hence never immediate
1158let len = mplace.len(self.ecx)?;
1159{
self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(),
Size::from_bytes(len)).map_err_kind(|e|
{
match e {
Ub(InvalidUninitBytes(..)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: Uninit { expected: ExpectedKind::Str },
}))
}
}
Unsup(ReadPointerAsInt(_)) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PointerAsInt { expected: ExpectedKind::Str },
}))
}
}
e => e,
}
})?
};try_validation!(
1160self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len)),
1161self.path,
1162 Ub(InvalidUninitBytes(..)) => Uninit { expected: ExpectedKind::Str },
1163 Unsup(ReadPointerAsInt(_)) => PointerAsInt { expected: ExpectedKind::Str }
1164 );
1165 }
1166 ty::Array(tys, ..) | ty::Slice(tys)
1167// This optimization applies for types that can hold arbitrary non-provenance bytes (such as
1168 // integer and floating point types).
1169 // FIXME(wesleywiser) This logic could be extended further to arbitrary structs or
1170 // tuples made up of integer/floating point types or inhabited ZSTs with no padding.
1171if #[allow(non_exhaustive_omitted_patterns)] match tys.kind() {
ty::Int(..) | ty::Uint(..) | ty::Float(..) => true,
_ => false,
}matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..))1172 =>
1173 {
1174let expected = if tys.is_integral() { ExpectedKind::Int } else { ExpectedKind::Float };
1175// Optimized handling for arrays of integer/float type.
11761177 // This is the length of the array/slice.
1178let len = val.len(self.ecx)?;
1179// This is the element type size.
1180let layout = self.ecx.layout_of(*tys)?;
1181// This is the size in bytes of the whole array. (This checks for overflow.)
1182let size = layout.size * len;
1183// If the size is 0, there is nothing to check.
1184 // (`size` can only be 0 if `len` is 0, and empty arrays are always valid.)
1185if size == Size::ZERO {
1186return interp_ok(());
1187 }
1188// Now that we definitely have a non-ZST array, we know it lives in memory -- except it may
1189 // be an uninitialized local variable, those are also "immediate".
1190let mplace = match val.to_op(self.ecx)?.as_mplace_or_imm() {
1191Left(mplace) => mplace,
1192Right(imm) => match *imm {
1193 Immediate::Uninit =>
1194do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: Uninit { expected },
}))
}throw_validation_failure!(self.path, Uninit { expected }),
1195 Immediate::Scalar(..) | Immediate::ScalarPair(..) =>
1196::rustc_middle::util::bug::bug_fmt(format_args!("arrays/slices can never have Scalar/ScalarPair layout"))bug!("arrays/slices can never have Scalar/ScalarPair layout"),
1197 }
1198 };
11991200// Optimization: we just check the entire range at once.
1201 // NOTE: Keep this in sync with the handling of integer and float
1202 // types above, in `visit_primitive`.
1203 // No need for an alignment check here, this is not an actual memory access.
1204let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size)?.expect("we already excluded size 0");
12051206alloc.get_bytes_strip_provenance().map_err_kind(|kind| {
1207// Some error happened, try to provide a more detailed description.
1208 // For some errors we might be able to provide extra information.
1209 // (This custom logic does not fit the `try_validation!` macro.)
1210match kind {
1211Ub(InvalidUninitBytes(Some((_alloc_id, access)))) | Unsup(ReadPointerAsInt(Some((_alloc_id, access)))) => {
1212// Some byte was uninitialized, determine which
1213 // element that byte belongs to so we can
1214 // provide an index.
1215let i = usize::try_from(
1216access.bad.start.bytes() / layout.size.bytes(),
1217 )
1218 .unwrap();
1219self.path.push(PathElem::ArrayElem(i));
12201221if #[allow(non_exhaustive_omitted_patterns)] match kind {
Ub(InvalidUninitBytes(_)) => true,
_ => false,
}matches!(kind, Ub(InvalidUninitBytes(_))) {
1222{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: Uninit { expected },
}))
}err_validation_failure!(self.path, Uninit { expected })1223 } else {
1224{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: PointerAsInt { expected },
}))
}err_validation_failure!(self.path, PointerAsInt { expected })1225 }
1226 }
12271228// Propagate upwards (that will also check for unexpected errors).
1229err => err,
1230 }
1231 })?;
12321233// Don't forget that these are all non-pointer types, and thus do not preserve
1234 // provenance.
1235if self.reset_provenance_and_padding {
1236// We can't share this with above as above, we might be looking at read-only memory.
1237let mut alloc = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)?.expect("we already excluded size 0");
1238alloc.clear_provenance();
1239// Also, mark this as containing data, not padding.
1240self.add_data_range(mplace.ptr(), size);
1241 }
1242 }
1243// Fast path for arrays and slices of ZSTs. We only need to check a single ZST element
1244 // of an array and not all of them, because there's only a single value of a specific
1245 // ZST type, so either validation fails for all elements or none.
1246ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(*tys)?.is_zst() => {
1247// Validate just the first element (if any).
1248if val.len(self.ecx)? > 0 {
1249self.visit_field(val, 0, &self.ecx.project_index(val, 0)?)?;
1250 }
1251 }
1252 ty::Pat(base, pat) => {
1253// First check that the base type is valid
1254self.visit_value(&val.transmute(self.ecx.layout_of(*base)?, self.ecx)?)?;
1255// When you extend this match, make sure to also add tests to
1256 // tests/ui/type/pattern_types/validity.rs((
1257match **pat {
1258// Range and non-null patterns are precisely reflected into `valid_range` and thus
1259 // handled fully by `visit_scalar` (called below).
1260ty::PatternKind::Range { .. } => {},
1261 ty::PatternKind::NotNull => {},
12621263// FIXME(pattern_types): check that the value is covered by one of the variants.
1264 // For now, we rely on layout computation setting the scalar's `valid_range` to
1265 // match the pattern. However, this cannot always work; the layout may
1266 // pessimistically cover actually illegal ranges and Miri would miss that UB.
1267 // The consolation here is that codegen also will miss that UB, so at least
1268 // we won't see optimizations actually breaking such programs.
1269ty::PatternKind::Or(_patterns) => {}
1270 }
1271 }
1272_ => {
1273// default handler
1274{
self.walk_value(val).map_err_kind(|e|
{
match e {
Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type
}) => {
{
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: {
InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
},
}))
}
}
e => e,
}
})?
};try_validation!(
1275self.walk_value(val),
1276self.path,
1277// It's not great to catch errors here, since we can't give a very good path,
1278 // but it's better than ICEing.
1279Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) => {
1280 InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
1281 },
1282 );
1283 }
1284 }
12851286// *After* all of this, check further information stored in the layout. We need to check
1287 // this to handle types like `NonNull` where the `Scalar` info is more restrictive than what
1288 // the fields say (`rustc_layout_scalar_valid_range_start`). But in most cases, this will
1289 // just propagate what the fields say, and then we want the error to point at the field --
1290 // so, we first recurse, then we do this check.
1291 //
1292 // FIXME: We could avoid some redundant checks here. For newtypes wrapping
1293 // scalars, we do the same check on every "level" (e.g., first we check
1294 // MyNewtype and then the scalar in there).
1295if val.layout.is_uninhabited() {
1296let ty = val.layout.ty;
1297do yeet {
let where_ = &self.path;
let path =
if !where_.is_empty() {
let mut path = String::new();
write_path(&mut path, where_);
Some(path)
} else { None };
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError(ValidationErrorInfo {
path,
kind: UninhabitedVal { ty },
}))
};throw_validation_failure!(self.path, UninhabitedVal { ty });
1298 }
1299match val.layout.backend_repr {
1300 BackendRepr::Scalar(scalar_layout) => {
1301if !scalar_layout.is_uninit_valid() {
1302// There is something to check here.
1303let scalar = self.read_scalar(val, ExpectedKind::InitScalar)?;
1304self.visit_scalar(scalar, scalar_layout)?;
1305 }
1306 }
1307 BackendRepr::ScalarPair(a_layout, b_layout) => {
1308// We can only proceed if *both* scalars need to be initialized.
1309 // FIXME: find a way to also check ScalarPair when one side can be uninit but
1310 // the other must be init.
1311if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
1312let (a, b) =
1313self.read_immediate(val, ExpectedKind::InitScalar)?.to_scalar_pair();
1314self.visit_scalar(a, a_layout)?;
1315self.visit_scalar(b, b_layout)?;
1316 }
1317 }
1318 BackendRepr::SimdVector { .. } | BackendRepr::ScalableVector { .. } => {
1319// No checks here, we assume layout computation gets this right.
1320 // (This is harder to check since Miri does not represent these as `Immediate`. We
1321 // also cannot use field projections since this might be a newtype around a vector.)
1322}
1323 BackendRepr::Memory { .. } => {
1324// Nothing to do.
1325}
1326 }
13271328interp_ok(())
1329 }
1330}
13311332impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1333fn validate_operand_internal(
1334&mut self,
1335 val: &PlaceTy<'tcx, M::Provenance>,
1336 path: Vec<PathElem>,
1337 ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
1338 ctfe_mode: Option<CtfeValidationMode>,
1339 reset_provenance_and_padding: bool,
1340 ) -> InterpResult<'tcx> {
1341{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/validity.rs:1341",
"rustc_const_eval::interpret::validity",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(1341u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("validate_operand_internal: {0:?}, {1:?}",
*val, val.layout.ty) as &dyn Value))])
});
} else { ; }
};trace!("validate_operand_internal: {:?}, {:?}", *val, val.layout.ty);
13421343// Run the visitor.
1344self.run_for_validation_mut(|ecx| {
1345let reset_padding = reset_provenance_and_padding && {
1346// Check if `val` is actually stored in memory. If not, padding is not even
1347 // represented and we need not reset it.
1348ecx.place_to_op(val)?.as_mplace_or_imm().is_left()
1349 };
1350let mut v = ValidityVisitor {
1351path,
1352ref_tracking,
1353ctfe_mode,
1354ecx,
1355reset_provenance_and_padding,
1356 data_bytes: reset_padding.then_some(RangeSet(Vec::new())),
1357 };
1358v.visit_value(val)?;
1359v.reset_padding(val)?;
1360interp_ok(())
1361 })
1362 .map_err_info(|err| {
1363if !#[allow(non_exhaustive_omitted_patterns)] match err.kind() {
::rustc_middle::mir::interpret::InterpErrorKind::UndefinedBehavior(::rustc_middle::mir::interpret::UndefinedBehaviorInfo::ValidationError {
.. }) | InterpErrorKind::InvalidProgram(_) |
InterpErrorKind::Unsupported(UnsupportedOpInfo::ExternTypeField) =>
true,
_ => false,
}matches!(
1364 err.kind(),
1365err_ub!(ValidationError { .. })
1366 | InterpErrorKind::InvalidProgram(_)
1367 | InterpErrorKind::Unsupported(UnsupportedOpInfo::ExternTypeField)
1368 ) {
1369::rustc_middle::util::bug::bug_fmt(format_args!("Unexpected error during validation: {0}",
format_interp_error(self.tcx.dcx(), err)));bug!(
1370"Unexpected error during validation: {}",
1371 format_interp_error(self.tcx.dcx(), err)
1372 );
1373 }
1374err1375 })
1376 }
13771378/// This function checks the data at `val` to be const-valid.
1379 /// `val` is assumed to cover valid memory if it is an indirect operand.
1380 /// It will error if the bits at the destination do not match the ones described by the layout.
1381 ///
1382 /// `ref_tracking` is used to record references that we encounter so that they
1383 /// can be checked recursively by an outside driving loop.
1384 ///
1385 /// `constant` controls whether this must satisfy the rules for constants:
1386 /// - no pointers to statics.
1387 /// - no `UnsafeCell` or non-ZST `&mut`.
1388#[inline(always)]
1389pub(crate) fn const_validate_operand(
1390&mut self,
1391 val: &PlaceTy<'tcx, M::Provenance>,
1392 path: Vec<PathElem>,
1393 ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>,
1394 ctfe_mode: CtfeValidationMode,
1395 ) -> InterpResult<'tcx> {
1396self.validate_operand_internal(
1397val,
1398path,
1399Some(ref_tracking),
1400Some(ctfe_mode),
1401/*reset_provenance*/ false,
1402 )
1403 }
14041405/// This function checks the data at `val` to be runtime-valid.
1406 /// `val` is assumed to cover valid memory if it is an indirect operand.
1407 /// It will error if the bits at the destination do not match the ones described by the layout.
1408#[inline(always)]
1409pub fn validate_operand(
1410&mut self,
1411 val: &PlaceTy<'tcx, M::Provenance>,
1412 recursive: bool,
1413 reset_provenance_and_padding: bool,
1414 ) -> InterpResult<'tcx> {
1415let _trace = <M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("validate_operand",
"rustc_const_eval::interpret::validity",
::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/validity.rs"),
::tracing_core::__macro_support::Option::Some(1415u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::validity"),
::tracing_core::field::FieldSet::new(&["recursive",
"reset_provenance_and_padding", "val"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&recursive as
&dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&reset_provenance_and_padding
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&val) as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
})enter_trace_span!(
1416 M,
1417"validate_operand",
1418 recursive,
1419 reset_provenance_and_padding,
1420?val,
1421 );
14221423// Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's
1424 // still correct to not use `ctfe_mode`: that mode is for validation of the final constant
1425 // value, it rules out things like `UnsafeCell` in awkward places.
1426if !recursive {
1427return self.validate_operand_internal(
1428val,
1429::alloc::vec::Vec::new()vec![],
1430None,
1431None,
1432reset_provenance_and_padding,
1433 );
1434 }
1435// Do a recursive check.
1436let mut ref_tracking = RefTracking::empty();
1437self.validate_operand_internal(
1438val,
1439::alloc::vec::Vec::new()vec![],
1440Some(&mut ref_tracking),
1441None,
1442reset_provenance_and_padding,
1443 )?;
1444while let Some((mplace, path)) = ref_tracking.todo.pop() {
1445// Things behind reference do *not* have the provenance reset.
1446self.validate_operand_internal(
1447&mplace.into(),
1448 path,
1449Some(&mut ref_tracking),
1450None,
1451/*reset_provenance_and_padding*/ false,
1452 )?;
1453 }
1454interp_ok(())
1455 }
1456}