1//! Computations on places -- field projections, going from mir::Place, and writing
2//! into a place.
3//! All high-level functions to write to memory work on places as destinations.
45use std::assert_matches::assert_matches;
67use either::{Either, Left, Right};
8use rustc_abi::{BackendRepr, HasDataLayout, Size};
9use rustc_middle::ty::Ty;
10use rustc_middle::ty::layout::TyAndLayout;
11use rustc_middle::{bug, mir, span_bug};
12use tracing::field::Empty;
13use tracing::{instrument, trace};
1415use super::{
16AllocInit, AllocRef, AllocRefMut, CheckAlignMsg, CtfeProvenance, ImmTy, Immediate, InterpCx,
17InterpResult, Machine, MemoryKind, Misalignment, OffsetMode, OpTy, Operand, Pointer,
18Projectable, Provenance, Scalar, alloc_range, interp_ok, mir_assign_valid_types,
19};
20use crate::enter_trace_span;
2122#[derive(#[automatically_derived]
impl<Prov: ::core::marker::Copy + Provenance> ::core::marker::Copy for
MemPlaceMeta<Prov> {
}Copy, #[automatically_derived]
impl<Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
MemPlaceMeta<Prov> {
#[inline]
fn clone(&self) -> MemPlaceMeta<Prov> {
match self {
MemPlaceMeta::Meta(__self_0) =>
MemPlaceMeta::Meta(::core::clone::Clone::clone(__self_0)),
MemPlaceMeta::None => MemPlaceMeta::None,
}
}
}Clone, #[automatically_derived]
impl<Prov: ::core::hash::Hash + Provenance> ::core::hash::Hash for
MemPlaceMeta<Prov> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state);
match self {
MemPlaceMeta::Meta(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
_ => {}
}
}
}Hash, #[automatically_derived]
impl<Prov: ::core::cmp::PartialEq + Provenance> ::core::cmp::PartialEq for
MemPlaceMeta<Prov> {
#[inline]
fn eq(&self, other: &MemPlaceMeta<Prov>) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(MemPlaceMeta::Meta(__self_0), MemPlaceMeta::Meta(__arg1_0))
=> __self_0 == __arg1_0,
_ => true,
}
}
}PartialEq, #[automatically_derived]
impl<Prov: ::core::cmp::Eq + Provenance> ::core::cmp::Eq for
MemPlaceMeta<Prov> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Scalar<Prov>>;
}
}Eq, #[automatically_derived]
impl<Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for
MemPlaceMeta<Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
MemPlaceMeta::Meta(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Meta",
&__self_0),
MemPlaceMeta::None =>
::core::fmt::Formatter::write_str(f, "None"),
}
}
}Debug)]
23/// Information required for the sound usage of a `MemPlace`.
24pub enum MemPlaceMeta<Prov: Provenance = CtfeProvenance> {
25/// The unsized payload (e.g. length for slices or vtable pointer for trait objects).
26Meta(Scalar<Prov>),
27/// `Sized` types or unsized `extern type`
28None,
29}
3031impl<Prov: Provenance> MemPlaceMeta<Prov> {
32#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
33pub fn unwrap_meta(self) -> Scalar<Prov> {
34match self {
35Self::Meta(s) => s,
36Self::None => {
37::rustc_middle::util::bug::bug_fmt(format_args!("expected wide pointer extra data (e.g. slice length or trait object vtable)"))bug!("expected wide pointer extra data (e.g. slice length or trait object vtable)")38 }
39 }
40 }
4142#[inline(always)]
43pub fn has_meta(self) -> bool {
44match self {
45Self::Meta(_) => true,
46Self::None => false,
47 }
48 }
49}
5051#[derive(#[automatically_derived]
impl<Prov: ::core::marker::Copy + Provenance> ::core::marker::Copy for
MemPlace<Prov> {
}Copy, #[automatically_derived]
impl<Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
MemPlace<Prov> {
#[inline]
fn clone(&self) -> MemPlace<Prov> {
MemPlace {
ptr: ::core::clone::Clone::clone(&self.ptr),
meta: ::core::clone::Clone::clone(&self.meta),
misaligned: ::core::clone::Clone::clone(&self.misaligned),
}
}
}Clone, #[automatically_derived]
impl<Prov: ::core::hash::Hash + Provenance> ::core::hash::Hash for
MemPlace<Prov> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.ptr, state);
::core::hash::Hash::hash(&self.meta, state);
::core::hash::Hash::hash(&self.misaligned, state)
}
}Hash, #[automatically_derived]
impl<Prov: ::core::cmp::PartialEq + Provenance> ::core::cmp::PartialEq for
MemPlace<Prov> {
#[inline]
fn eq(&self, other: &MemPlace<Prov>) -> bool {
self.ptr == other.ptr && self.meta == other.meta &&
self.misaligned == other.misaligned
}
}PartialEq, #[automatically_derived]
impl<Prov: ::core::cmp::Eq + Provenance> ::core::cmp::Eq for MemPlace<Prov> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<Pointer<Option<Prov>>>;
let _: ::core::cmp::AssertParamIsEq<MemPlaceMeta<Prov>>;
let _: ::core::cmp::AssertParamIsEq<Option<Misalignment>>;
}
}Eq, #[automatically_derived]
impl<Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for
MemPlace<Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f, "MemPlace",
"ptr", &self.ptr, "meta", &self.meta, "misaligned",
&&self.misaligned)
}
}Debug)]
52pub(super) struct MemPlace<Prov: Provenance = CtfeProvenance> {
53/// The pointer can be a pure integer, with the `None` provenance.
54pub ptr: Pointer<Option<Prov>>,
55/// Metadata for unsized places. Interpretation is up to the type.
56 /// Must not be present for sized types, but can be missing for unsized types
57 /// (e.g., `extern type`).
58pub meta: MemPlaceMeta<Prov>,
59/// Stores whether this place was created based on a sufficiently aligned pointer.
60misaligned: Option<Misalignment>,
61}
6263impl<Prov: Provenance> MemPlace<Prov> {
64/// Adjust the provenance of the main pointer (metadata is unaffected).
65fn map_provenance(self, f: impl FnOnce(Prov) -> Prov) -> Self {
66MemPlace { ptr: self.ptr.map_provenance(|p| p.map(f)), ..self }
67 }
6869/// Turn a mplace into a (thin or wide) pointer, as a reference, pointing to the same space.
70#[inline]
71fn to_ref(self, cx: &impl HasDataLayout) -> Immediate<Prov> {
72Immediate::new_pointer_with_meta(self.ptr, self.meta, cx)
73 }
7475#[inline]
76// Not called `offset_with_meta` to avoid confusion with the trait method.
77fn offset_with_meta_<'tcx, M: Machine<'tcx, Provenance = Prov>>(
78self,
79 offset: Size,
80 mode: OffsetMode,
81 meta: MemPlaceMeta<Prov>,
82 ecx: &InterpCx<'tcx, M>,
83 ) -> InterpResult<'tcx, Self> {
84if true {
if !(!meta.has_meta() || self.meta.has_meta()) {
{
::core::panicking::panic_fmt(format_args!("cannot use `offset_with_meta` to add metadata to a place"));
}
};
};debug_assert!(
85 !meta.has_meta() || self.meta.has_meta(),
86"cannot use `offset_with_meta` to add metadata to a place"
87);
88let ptr = match mode {
89 OffsetMode::Inbounds => {
90ecx.ptr_offset_inbounds(self.ptr, offset.bytes().try_into().unwrap())?
91}
92 OffsetMode::Wrapping => self.ptr.wrapping_offset(offset, ecx),
93 };
94interp_ok(MemPlace { ptr, meta, misaligned: self.misaligned })
95 }
96}
9798/// A MemPlace with its layout. Constructing it is only possible in this module.
99#[derive(#[automatically_derived]
impl<'tcx, Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
MPlaceTy<'tcx, Prov> {
#[inline]
fn clone(&self) -> MPlaceTy<'tcx, Prov> {
MPlaceTy {
mplace: ::core::clone::Clone::clone(&self.mplace),
layout: ::core::clone::Clone::clone(&self.layout),
}
}
}Clone, #[automatically_derived]
impl<'tcx, Prov: ::core::hash::Hash + Provenance> ::core::hash::Hash for
MPlaceTy<'tcx, Prov> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
::core::hash::Hash::hash(&self.mplace, state);
::core::hash::Hash::hash(&self.layout, state)
}
}Hash, #[automatically_derived]
impl<'tcx, Prov: ::core::cmp::Eq + Provenance> ::core::cmp::Eq for
MPlaceTy<'tcx, Prov> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<MemPlace<Prov>>;
let _: ::core::cmp::AssertParamIsEq<TyAndLayout<'tcx>>;
}
}Eq, #[automatically_derived]
impl<'tcx, Prov: ::core::cmp::PartialEq + Provenance> ::core::cmp::PartialEq
for MPlaceTy<'tcx, Prov> {
#[inline]
fn eq(&self, other: &MPlaceTy<'tcx, Prov>) -> bool {
self.mplace == other.mplace && self.layout == other.layout
}
}PartialEq)]
100pub struct MPlaceTy<'tcx, Prov: Provenance = CtfeProvenance> {
101 mplace: MemPlace<Prov>,
102pub layout: TyAndLayout<'tcx>,
103}
104105impl<Prov: Provenance> std::fmt::Debugfor MPlaceTy<'_, Prov> {
106fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
107// Printing `layout` results in too much noise; just print a nice version of the type.
108f.debug_struct("MPlaceTy")
109 .field("mplace", &self.mplace)
110 .field("ty", &format_args!("{0}", self.layout.ty)format_args!("{}", self.layout.ty))
111 .finish()
112 }
113}
114115impl<'tcx, Prov: Provenance> MPlaceTy<'tcx, Prov> {
116/// Produces a MemPlace that works for ZST but nothing else.
117 /// Conceptually this is a new allocation, but it doesn't actually create an allocation so you
118 /// don't need to worry about memory leaks.
119#[inline]
120pub fn fake_alloc_zst(layout: TyAndLayout<'tcx>) -> Self {
121if !layout.is_zst() {
::core::panicking::panic("assertion failed: layout.is_zst()")
};assert!(layout.is_zst());
122let align = layout.align.abi;
123let ptr = Pointer::without_provenance(align.bytes()); // no provenance, absolute address
124MPlaceTy { mplace: MemPlace { ptr, meta: MemPlaceMeta::None, misaligned: None }, layout }
125 }
126127/// Adjust the provenance of the main pointer (metadata is unaffected).
128pub fn map_provenance(self, f: impl FnOnce(Prov) -> Prov) -> Self {
129MPlaceTy { mplace: self.mplace.map_provenance(f), ..self }
130 }
131132#[inline(always)]
133pub(super) fn mplace(&self) -> &MemPlace<Prov> {
134&self.mplace
135 }
136137#[inline(always)]
138pub fn ptr(&self) -> Pointer<Option<Prov>> {
139self.mplace.ptr
140 }
141142#[inline(always)]
143pub fn to_ref(&self, cx: &impl HasDataLayout) -> Immediate<Prov> {
144self.mplace.to_ref(cx)
145 }
146}
147148impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
149#[inline(always)]
150fn layout(&self) -> TyAndLayout<'tcx> {
151self.layout
152 }
153154#[inline(always)]
155fn meta(&self) -> MemPlaceMeta<Prov> {
156self.mplace.meta
157 }
158159fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
160&self,
161 offset: Size,
162 mode: OffsetMode,
163 meta: MemPlaceMeta<Prov>,
164 layout: TyAndLayout<'tcx>,
165 ecx: &InterpCx<'tcx, M>,
166 ) -> InterpResult<'tcx, Self> {
167interp_ok(MPlaceTy {
168 mplace: self.mplace.offset_with_meta_(offset, mode, meta, ecx)?,
169layout,
170 })
171 }
172173#[inline(always)]
174fn to_op<M: Machine<'tcx, Provenance = Prov>>(
175&self,
176 _ecx: &InterpCx<'tcx, M>,
177 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
178interp_ok(self.clone().into())
179 }
180}
181182#[derive(#[automatically_derived]
impl<Prov: ::core::marker::Copy + Provenance> ::core::marker::Copy for
Place<Prov> {
}Copy, #[automatically_derived]
impl<Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
Place<Prov> {
#[inline]
fn clone(&self) -> Place<Prov> {
match self {
Place::Ptr(__self_0) =>
Place::Ptr(::core::clone::Clone::clone(__self_0)),
Place::Local {
local: __self_0, offset: __self_1, locals_addr: __self_2 } =>
Place::Local {
local: ::core::clone::Clone::clone(__self_0),
offset: ::core::clone::Clone::clone(__self_1),
locals_addr: ::core::clone::Clone::clone(__self_2),
},
}
}
}Clone, #[automatically_derived]
impl<Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for Place<Prov>
{
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
Place::Ptr(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Ptr",
&__self_0),
Place::Local {
local: __self_0, offset: __self_1, locals_addr: __self_2 } =>
::core::fmt::Formatter::debug_struct_field3_finish(f, "Local",
"local", __self_0, "offset", __self_1, "locals_addr",
&__self_2),
}
}
}Debug)]
183pub(super) enum Place<Prov: Provenance = CtfeProvenance> {
184/// A place referring to a value allocated in the `Memory` system.
185Ptr(MemPlace<Prov>),
186187/// To support alloc-free locals, we are able to write directly to a local. The offset indicates
188 /// where in the local this place is located; if it is `None`, no projection has been applied
189 /// and the type of the place is exactly the type of the local.
190 /// Such projections are meaningful even if the offset is 0, since they can change layouts.
191 /// (Without that optimization, we'd just always be a `MemPlace`.)
192 /// `Local` places always refer to the current stack frame, so they are unstable under
193 /// function calls/returns and switching betweens stacks of different threads!
194 /// We carry around the address of the `locals` buffer of the correct stack frame as a sanity
195 /// check to be able to catch some cases of using a dangling `Place`.
196 ///
197 /// This variant shall not be used for unsized types -- those must always live in memory.
198Local { local: mir::Local, offset: Option<Size>, locals_addr: usize },
199}
200201/// An evaluated place, together with its type.
202///
203/// This may reference a stack frame by its index, so `PlaceTy` should generally not be kept around
204/// for longer than a single operation. Popping and then pushing a stack frame can make `PlaceTy`
205/// point to the wrong destination. If the interpreter has multiple stacks, stack switching will
206/// also invalidate a `PlaceTy`.
207#[derive(#[automatically_derived]
impl<'tcx, Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
PlaceTy<'tcx, Prov> {
#[inline]
fn clone(&self) -> PlaceTy<'tcx, Prov> {
PlaceTy {
place: ::core::clone::Clone::clone(&self.place),
layout: ::core::clone::Clone::clone(&self.layout),
}
}
}Clone)]
208pub struct PlaceTy<'tcx, Prov: Provenance = CtfeProvenance> {
209 place: Place<Prov>, // Keep this private; it helps enforce invariants.
210pub layout: TyAndLayout<'tcx>,
211}
212213impl<Prov: Provenance> std::fmt::Debugfor PlaceTy<'_, Prov> {
214fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
215// Printing `layout` results in too much noise; just print a nice version of the type.
216f.debug_struct("PlaceTy")
217 .field("place", &self.place)
218 .field("ty", &format_args!("{0}", self.layout.ty)format_args!("{}", self.layout.ty))
219 .finish()
220 }
221}
222223impl<'tcx, Prov: Provenance> From<MPlaceTy<'tcx, Prov>> for PlaceTy<'tcx, Prov> {
224#[inline(always)]
225fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self {
226PlaceTy { place: Place::Ptr(mplace.mplace), layout: mplace.layout }
227 }
228}
229230impl<'tcx, Prov: Provenance> PlaceTy<'tcx, Prov> {
231#[inline(always)]
232pub(super) fn place(&self) -> &Place<Prov> {
233&self.place
234 }
235236/// A place is either an mplace or some local.
237 ///
238 /// Note that the return value can be different even for logically identical places!
239 /// Specifically, if a local is stored in-memory, this may return `Local` or `MPlaceTy`
240 /// depending on how the place was constructed. In other words, seeing `Local` here does *not*
241 /// imply that this place does not point to memory. Every caller must therefore always handle
242 /// both cases.
243#[inline(always)]
244pub fn as_mplace_or_local(
245&self,
246 ) -> Either<MPlaceTy<'tcx, Prov>, (mir::Local, Option<Size>, usize, TyAndLayout<'tcx>)> {
247match self.place {
248 Place::Ptr(mplace) => Left(MPlaceTy { mplace, layout: self.layout }),
249 Place::Local { local, offset, locals_addr } => {
250Right((local, offset, locals_addr, self.layout))
251 }
252 }
253 }
254255#[inline(always)]
256 #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
257pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
258self.as_mplace_or_local().left().unwrap_or_else(|| {
259::rustc_middle::util::bug::bug_fmt(format_args!("PlaceTy of type {0} was a local when it was expected to be an MPlace",
self.layout.ty))bug!(
260"PlaceTy of type {} was a local when it was expected to be an MPlace",
261self.layout.ty
262 )263 })
264 }
265}
266267impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for PlaceTy<'tcx, Prov> {
268#[inline(always)]
269fn layout(&self) -> TyAndLayout<'tcx> {
270self.layout
271 }
272273#[inline]
274fn meta(&self) -> MemPlaceMeta<Prov> {
275match self.as_mplace_or_local() {
276Left(mplace) => mplace.meta(),
277Right(_) => {
278if true {
if !self.layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("unsized locals should live in memory"));
}
};
};debug_assert!(self.layout.is_sized(), "unsized locals should live in memory");
279 MemPlaceMeta::None280 }
281 }
282 }
283284fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
285&self,
286 offset: Size,
287 mode: OffsetMode,
288 meta: MemPlaceMeta<Prov>,
289 layout: TyAndLayout<'tcx>,
290 ecx: &InterpCx<'tcx, M>,
291 ) -> InterpResult<'tcx, Self> {
292interp_ok(match self.as_mplace_or_local() {
293Left(mplace) => mplace.offset_with_meta(offset, mode, meta, layout, ecx)?.into(),
294Right((local, old_offset, locals_addr, _)) => {
295if true {
if !layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("unsized locals should live in memory"));
}
};
};debug_assert!(layout.is_sized(), "unsized locals should live in memory");
296match meta {
MemPlaceMeta::None => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"MemPlaceMeta::None", ::core::option::Option::None);
}
};assert_matches!(meta, MemPlaceMeta::None); // we couldn't store it anyway...
297 // `Place::Local` are always in-bounds of their surrounding local, so we can just
298 // check directly if this remains in-bounds. This cannot actually be violated since
299 // projections are type-checked and bounds-checked.
300if !(offset + layout.size <= self.layout.size) {
::core::panicking::panic("assertion failed: offset + layout.size <= self.layout.size")
};assert!(offset + layout.size <= self.layout.size);
301302// Size `+`, ensures no overflow.
303let new_offset = old_offset.unwrap_or(Size::ZERO) + offset;
304305PlaceTy {
306 place: Place::Local { local, offset: Some(new_offset), locals_addr },
307layout,
308 }
309 }
310 })
311 }
312313#[inline(always)]
314fn to_op<M: Machine<'tcx, Provenance = Prov>>(
315&self,
316 ecx: &InterpCx<'tcx, M>,
317 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
318ecx.place_to_op(self)
319 }
320}
321322// These are defined here because they produce a place.
323impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
324#[inline(always)]
325pub fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>> {
326match self.op() {
327 Operand::Indirect(mplace) => Left(MPlaceTy { mplace: *mplace, layout: self.layout }),
328 Operand::Immediate(imm) => Right(ImmTy::from_immediate(*imm, self.layout)),
329 }
330 }
331332#[inline(always)]
333 #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
334pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
335self.as_mplace_or_imm().left().unwrap_or_else(|| {
336::rustc_middle::util::bug::bug_fmt(format_args!("OpTy of type {0} was immediate when it was expected to be an MPlace",
self.layout.ty))bug!(
337"OpTy of type {} was immediate when it was expected to be an MPlace",
338self.layout.ty
339 )340 })
341 }
342}
343344/// The `Weiteable` trait describes interpreter values that can be written to.
345pub trait Writeable<'tcx, Prov: Provenance>: Projectable<'tcx, Prov> {
346fn to_place(&self) -> PlaceTy<'tcx, Prov>;
347348fn force_mplace<M: Machine<'tcx, Provenance = Prov>>(
349&self,
350 ecx: &mut InterpCx<'tcx, M>,
351 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Prov>>;
352}
353354impl<'tcx, Prov: Provenance> Writeable<'tcx, Prov> for PlaceTy<'tcx, Prov> {
355#[inline(always)]
356fn to_place(&self) -> PlaceTy<'tcx, Prov> {
357self.clone()
358 }
359360#[inline(always)]
361fn force_mplace<M: Machine<'tcx, Provenance = Prov>>(
362&self,
363 ecx: &mut InterpCx<'tcx, M>,
364 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Prov>> {
365ecx.force_allocation(self)
366 }
367}
368369impl<'tcx, Prov: Provenance> Writeable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
370#[inline(always)]
371fn to_place(&self) -> PlaceTy<'tcx, Prov> {
372self.clone().into()
373 }
374375#[inline(always)]
376fn force_mplace<M: Machine<'tcx, Provenance = Prov>>(
377&self,
378 _ecx: &mut InterpCx<'tcx, M>,
379 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Prov>> {
380interp_ok(self.clone())
381 }
382}
383384// FIXME: Working around https://github.com/rust-lang/rust/issues/54385
385impl<'tcx, Prov, M> InterpCx<'tcx, M>
386where
387Prov: Provenance,
388 M: Machine<'tcx, Provenance = Prov>,
389{
390fn ptr_with_meta_to_mplace(
391&self,
392 ptr: Pointer<Option<M::Provenance>>,
393 meta: MemPlaceMeta<M::Provenance>,
394 layout: TyAndLayout<'tcx>,
395 unaligned: bool,
396 ) -> MPlaceTy<'tcx, M::Provenance> {
397let misaligned =
398if unaligned { None } else { self.is_ptr_misaligned(ptr, layout.align.abi) };
399MPlaceTy { mplace: MemPlace { ptr, meta, misaligned }, layout }
400 }
401402pub fn ptr_to_mplace(
403&self,
404 ptr: Pointer<Option<M::Provenance>>,
405 layout: TyAndLayout<'tcx>,
406 ) -> MPlaceTy<'tcx, M::Provenance> {
407if !layout.is_sized() {
::core::panicking::panic("assertion failed: layout.is_sized()")
};assert!(layout.is_sized());
408self.ptr_with_meta_to_mplace(ptr, MemPlaceMeta::None, layout, /*unaligned*/ false)
409 }
410411pub fn ptr_to_mplace_unaligned(
412&self,
413 ptr: Pointer<Option<M::Provenance>>,
414 layout: TyAndLayout<'tcx>,
415 ) -> MPlaceTy<'tcx, M::Provenance> {
416if !layout.is_sized() {
::core::panicking::panic("assertion failed: layout.is_sized()")
};assert!(layout.is_sized());
417self.ptr_with_meta_to_mplace(ptr, MemPlaceMeta::None, layout, /*unaligned*/ true)
418 }
419420/// Take a value, which represents a (thin or wide) reference, and make it a place.
421 /// Alignment is just based on the type. This is the inverse of `mplace_to_ref()`.
422 ///
423 /// Only call this if you are sure the place is "valid" (aligned and inbounds), or do not
424 /// want to ever use the place for memory access!
425 /// Generally prefer `deref_pointer`.
426pub fn ref_to_mplace(
427&self,
428 val: &ImmTy<'tcx, M::Provenance>,
429 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
430let pointee_type =
431val.layout.ty.builtin_deref(true).expect("`ref_to_mplace` called on non-ptr type");
432let layout = self.layout_of(pointee_type)?;
433let (ptr, meta) = val.to_scalar_and_meta();
434435// `ref_to_mplace` is called on raw pointers even if they don't actually get dereferenced;
436 // we hence can't call `size_and_align_of` since that asserts more validity than we want.
437let ptr = ptr.to_pointer(self)?;
438interp_ok(self.ptr_with_meta_to_mplace(ptr, meta, layout, /*unaligned*/ false))
439 }
440441/// Turn a mplace into a (thin or wide) mutable raw pointer, pointing to the same space.
442 /// `align` information is lost!
443 /// This is the inverse of `ref_to_mplace`.
444pub fn mplace_to_ref(
445&self,
446 mplace: &MPlaceTy<'tcx, M::Provenance>,
447 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
448let imm = mplace.mplace.to_ref(self);
449let layout = self.layout_of(Ty::new_mut_ptr(self.tcx.tcx, mplace.layout.ty))?;
450interp_ok(ImmTy::from_immediate(imm, layout))
451 }
452453/// Take an operand, representing a pointer, and dereference it to a place.
454 /// Corresponds to the `*` operator in Rust.
455#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("deref_pointer",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(455u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["src"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&src)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return:
InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> = loop {};
return __tracing_attr_fake_return;
}
{
if src.layout().ty.is_box() {
::rustc_middle::util::bug::bug_fmt(format_args!("dereferencing {0}",
src.layout().ty));
}
let val = self.read_immediate(src)?;
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/place.rs:468",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(468u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("deref to {0} on {1:?}",
val.layout.ty, *val) as &dyn Value))])
});
} else { ; }
};
let mplace = self.ref_to_mplace(&val)?;
interp_ok(mplace)
}
}
}#[instrument(skip(self), level = "trace")]456pub fn deref_pointer(
457&self,
458 src: &impl Projectable<'tcx, M::Provenance>,
459 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
460if src.layout().ty.is_box() {
461// Derefer should have removed all Box derefs.
462 // Some `Box` are not immediates (if they have a custom allocator)
463 // so the code below would fail.
464bug!("dereferencing {}", src.layout().ty);
465 }
466467let val = self.read_immediate(src)?;
468trace!("deref to {} on {:?}", val.layout.ty, *val);
469470let mplace = self.ref_to_mplace(&val)?;
471 interp_ok(mplace)
472 }
473474#[inline]
475pub(super) fn get_place_alloc(
476&self,
477 mplace: &MPlaceTy<'tcx, M::Provenance>,
478 ) -> InterpResult<'tcx, Option<AllocRef<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
479 {
480let (size, _align) = self481 .size_and_align_of_val(mplace)?
482.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
483// We check alignment separately, and *after* checking everything else.
484 // If an access is both OOB and misaligned, we want to see the bounds error.
485let a = self.get_ptr_alloc(mplace.ptr(), size)?;
486self.check_misalign(mplace.mplace.misaligned, CheckAlignMsg::BasedOn)?;
487interp_ok(a)
488 }
489490#[inline]
491pub(super) fn get_place_alloc_mut(
492&mut self,
493 mplace: &MPlaceTy<'tcx, M::Provenance>,
494 ) -> InterpResult<'tcx, Option<AllocRefMut<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
495 {
496let (size, _align) = self497 .size_and_align_of_val(mplace)?
498.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
499// We check alignment separately, and raise that error *after* checking everything else.
500 // If an access is both OOB and misaligned, we want to see the bounds error.
501 // However we have to call `check_misalign` first to make the borrow checker happy.
502let misalign_res = self.check_misalign(mplace.mplace.misaligned, CheckAlignMsg::BasedOn);
503// An error from get_ptr_alloc_mut takes precedence.
504let (a, ()) = self.get_ptr_alloc_mut(mplace.ptr(), size).and(misalign_res)?;
505interp_ok(a)
506 }
507508/// Turn a local in the current frame into a place.
509pub fn local_to_place(
510&self,
511 local: mir::Local,
512 ) -> InterpResult<'tcx, PlaceTy<'tcx, M::Provenance>> {
513let frame = self.frame();
514let layout = self.layout_of_local(frame, local, None)?;
515let place = if layout.is_sized() {
516// We can just always use the `Local` for sized values.
517Place::Local { local, offset: None, locals_addr: frame.locals_addr() }
518 } else {
519// Other parts of the system rely on `Place::Local` never being unsized.
520match frame.locals[local].access()? {
521 Operand::Immediate(_) => ::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!(),
522 Operand::Indirect(mplace) => Place::Ptr(*mplace),
523 }
524 };
525interp_ok(PlaceTy { place, layout })
526 }
527528/// Computes a place. You should only use this if you intend to write into this
529 /// place; for reading, a more efficient alternative is `eval_place_to_op`.
530#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("eval_place",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(530u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["mir_place"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&mir_place)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return:
InterpResult<'tcx, PlaceTy<'tcx, M::Provenance>> = loop {};
return __tracing_attr_fake_return;
}
{
let _trace =
<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::place",
::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(536u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["step", "mir_place",
"tracing_separate_thread"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"eval_place")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&mir_place)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
});
let mut place = self.local_to_place(mir_place.local)?;
for elem in mir_place.projection.iter() {
place = self.project(&place, elem)?
}
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/place.rs:544",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(544u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("{0:?}",
self.dump_place(&place)) as &dyn Value))])
});
} else { ; }
};
if true {
let normalized_place_ty =
self.instantiate_from_current_frame_and_normalize_erasing_regions(mir_place.ty(&self.frame().body.local_decls,
*self.tcx).ty)?;
if !mir_assign_valid_types(*self.tcx, self.typing_env,
self.layout_of(normalized_place_ty)?, place.layout) {
::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("eval_place of a MIR place with type {0} produced an interpreter place with type {1}",
normalized_place_ty, place.layout.ty))
}
}
interp_ok(place)
}
}
}#[instrument(skip(self), level = "trace")]531pub fn eval_place(
532&self,
533 mir_place: mir::Place<'tcx>,
534 ) -> InterpResult<'tcx, PlaceTy<'tcx, M::Provenance>> {
535let _trace =
536enter_trace_span!(M, step::eval_place, ?mir_place, tracing_separate_thread = Empty);
537538let mut place = self.local_to_place(mir_place.local)?;
539// Using `try_fold` turned out to be bad for performance, hence the loop.
540for elem in mir_place.projection.iter() {
541 place = self.project(&place, elem)?
542}
543544trace!("{:?}", self.dump_place(&place));
545// Sanity-check the type we ended up with.
546if cfg!(debug_assertions) {
547let normalized_place_ty = self
548.instantiate_from_current_frame_and_normalize_erasing_regions(
549 mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty,
550 )?;
551if !mir_assign_valid_types(
552*self.tcx,
553self.typing_env,
554self.layout_of(normalized_place_ty)?,
555 place.layout,
556 ) {
557span_bug!(
558self.cur_span(),
559"eval_place of a MIR place with type {} produced an interpreter place with type {}",
560 normalized_place_ty,
561 place.layout.ty,
562 )
563 }
564 }
565 interp_ok(place)
566 }
567568/// Given a place, returns either the underlying mplace or a reference to where the value of
569 /// this place is stored.
570#[inline(always)]
571fn as_mplace_or_mutable_local(
572&mut self,
573 place: &PlaceTy<'tcx, M::Provenance>,
574 ) -> InterpResult<
575'tcx,
576Either<
577MPlaceTy<'tcx, M::Provenance>,
578 (&mut Immediate<M::Provenance>, TyAndLayout<'tcx>, mir::Local),
579 >,
580 > {
581interp_ok(match place.to_place().as_mplace_or_local() {
582Left(mplace) => Left(mplace),
583Right((local, offset, locals_addr, layout)) => {
584if offset.is_some() {
585// This has been projected to a part of this local, or had the type changed.
586 // FIXME: there are cases where we could still avoid allocating an mplace.
587Left(place.force_mplace(self)?)
588 } else {
589if true {
match (&locals_addr, &self.frame().locals_addr()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_eq!(locals_addr, self.frame().locals_addr());
590if true {
match (&self.layout_of_local(self.frame(), local, None)?, &layout) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_eq!(self.layout_of_local(self.frame(), local, None)?, layout);
591match self.frame_mut().locals[local].access_mut()? {
592 Operand::Indirect(mplace) => {
593// The local is in memory.
594Left(MPlaceTy { mplace: *mplace, layout })
595 }
596 Operand::Immediate(local_val) => {
597// The local still has the optimized representation.
598Right((local_val, layout, local))
599 }
600 }
601 }
602 }
603 })
604 }
605606/// Write an immediate to a place
607#[inline(always)]
608#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("write_immediate",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(608u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["src", "dest"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&src)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&dest)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
self.write_immediate_no_validate(src, dest)?;
if M::enforce_validity(self, dest.layout()) {
self.validate_operand(&dest.to_place(),
M::enforce_validity_recursively(self, dest.layout()),
true)?;
}
interp_ok(())
}
}
}#[instrument(skip(self), level = "trace")]609pub fn write_immediate(
610&mut self,
611 src: Immediate<M::Provenance>,
612 dest: &impl Writeable<'tcx, M::Provenance>,
613 ) -> InterpResult<'tcx> {
614self.write_immediate_no_validate(src, dest)?;
615616if M::enforce_validity(self, dest.layout()) {
617// Data got changed, better make sure it matches the type!
618 // Also needed to reset padding.
619self.validate_operand(
620&dest.to_place(),
621 M::enforce_validity_recursively(self, dest.layout()),
622/*reset_provenance_and_padding*/ true,
623 )?;
624 }
625626 interp_ok(())
627 }
628629/// Write a scalar to a place
630#[inline(always)]
631pub fn write_scalar(
632&mut self,
633 val: impl Into<Scalar<M::Provenance>>,
634 dest: &impl Writeable<'tcx, M::Provenance>,
635 ) -> InterpResult<'tcx> {
636self.write_immediate(Immediate::Scalar(val.into()), dest)
637 }
638639/// Write a pointer to a place
640#[inline(always)]
641pub fn write_pointer(
642&mut self,
643 ptr: impl Into<Pointer<Option<M::Provenance>>>,
644 dest: &impl Writeable<'tcx, M::Provenance>,
645 ) -> InterpResult<'tcx> {
646self.write_scalar(Scalar::from_maybe_pointer(ptr.into(), self), dest)
647 }
648649/// Write an immediate to a place.
650 /// If you use this you are responsible for validating that things got copied at the
651 /// right type.
652pub(super) fn write_immediate_no_validate(
653&mut self,
654 src: Immediate<M::Provenance>,
655 dest: &impl Writeable<'tcx, M::Provenance>,
656 ) -> InterpResult<'tcx> {
657if !dest.layout().is_sized() {
{
::core::panicking::panic_fmt(format_args!("Cannot write unsized immediate data"));
}
};assert!(dest.layout().is_sized(), "Cannot write unsized immediate data");
658659match self.as_mplace_or_mutable_local(&dest.to_place())? {
660Right((local_val, local_layout, local)) => {
661// Local can be updated in-place.
662*local_val = src;
663// Call the machine hook (the data race detector needs to know about this write).
664if !self.validation_in_progress() {
665 M::after_local_write(self, local, /*storage_live*/ false)?;
666 }
667// Double-check that the value we are storing and the local fit to each other.
668 // Things can ge wrong in quite weird ways when this is violated.
669 // Unfortunately this is too expensive to do in release builds.
670if truecfg!(debug_assertions) {
671src.assert_matches_abi(
672local_layout.backend_repr,
673"invalid immediate for given destination place",
674self,
675 );
676 }
677 }
678Left(mplace) => {
679self.write_immediate_to_mplace_no_validate(src, mplace.layout, mplace.mplace)?;
680 }
681 }
682interp_ok(())
683 }
684685/// Write an immediate to memory.
686 /// If you use this you are responsible for validating that things got copied at the
687 /// right layout.
688fn write_immediate_to_mplace_no_validate(
689&mut self,
690 value: Immediate<M::Provenance>,
691 layout: TyAndLayout<'tcx>,
692 dest: MemPlace<M::Provenance>,
693 ) -> InterpResult<'tcx> {
694// We use the sizes from `value` below.
695 // Ensure that matches the type of the place it is written to.
696value.assert_matches_abi(
697layout.backend_repr,
698"invalid immediate for given destination place",
699self,
700 );
701// Note that it is really important that the type here is the right one, and matches the
702 // type things are read at. In case `value` is a `ScalarPair`, we don't do any magic here
703 // to handle padding properly, which is only correct if we never look at this data with the
704 // wrong type.
705706let tcx = *self.tcx;
707let will_later_validate = M::enforce_validity(self, layout);
708let Some(mut alloc) = self.get_place_alloc_mut(&MPlaceTy { mplace: dest, layout })? else {
709// zero-sized access
710return interp_ok(());
711 };
712713match value {
714 Immediate::Scalar(scalar) => {
715alloc.write_scalar(alloc_range(Size::ZERO, scalar.size()), scalar)?;
716 }
717 Immediate::ScalarPair(a_val, b_val) => {
718let BackendRepr::ScalarPair(_a, b) = layout.backend_repr else {
719::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("write_immediate_to_mplace: invalid ScalarPair layout: {0:#?}",
layout))span_bug!(
720self.cur_span(),
721"write_immediate_to_mplace: invalid ScalarPair layout: {:#?}",
722 layout
723 )724 };
725let a_size = a_val.size();
726let b_offset = a_size.align_to(b.align(&tcx).abi);
727if !(b_offset.bytes() > 0) {
::core::panicking::panic("assertion failed: b_offset.bytes() > 0")
};assert!(b_offset.bytes() > 0); // in `operand_field` we use the offset to tell apart the fields
728729 // It is tempting to verify `b_offset` against `layout.fields.offset(1)`,
730 // but that does not work: We could be a newtype around a pair, then the
731 // fields do not match the `ScalarPair` components.
732733 // In preparation, if we do *not* later reset the padding, we clear the entire
734 // destination now to ensure that no stray pointer fragments are being
735 // preserved (see <https://github.com/rust-lang/rust/issues/148470>).
736 // We can skip this if there is no padding (e.g. for wide pointers).
737if !will_later_validate && a_size + b_val.size() != layout.size {
738alloc.write_uninit_full();
739 }
740741alloc.write_scalar(alloc_range(Size::ZERO, a_size), a_val)?;
742alloc.write_scalar(alloc_range(b_offset, b_val.size()), b_val)?;
743 }
744 Immediate::Uninit => alloc.write_uninit_full(),
745 }
746interp_ok(())
747 }
748749pub fn write_uninit(
750&mut self,
751 dest: &impl Writeable<'tcx, M::Provenance>,
752 ) -> InterpResult<'tcx> {
753match self.as_mplace_or_mutable_local(&dest.to_place())? {
754Right((local_val, _local_layout, local)) => {
755*local_val = Immediate::Uninit;
756// Call the machine hook (the data race detector needs to know about this write).
757if !self.validation_in_progress() {
758 M::after_local_write(self, local, /*storage_live*/ false)?;
759 }
760 }
761Left(mplace) => {
762let Some(mut alloc) = self.get_place_alloc_mut(&mplace)? else {
763// Zero-sized access
764return interp_ok(());
765 };
766alloc.write_uninit_full();
767 }
768 }
769interp_ok(())
770 }
771772/// Remove all provenance in the given place.
773pub fn clear_provenance(
774&mut self,
775 dest: &impl Writeable<'tcx, M::Provenance>,
776 ) -> InterpResult<'tcx> {
777// If this is an efficiently represented local variable without provenance, skip the
778 // `as_mplace_or_mutable_local` that would otherwise force this local into memory.
779if let Right(imm) = dest.to_op(self)?.as_mplace_or_imm() {
780if !imm.has_provenance() {
781return interp_ok(());
782 }
783 }
784match self.as_mplace_or_mutable_local(&dest.to_place())? {
785Right((local_val, _local_layout, local)) => {
786local_val.clear_provenance()?;
787// Call the machine hook (the data race detector needs to know about this write).
788if !self.validation_in_progress() {
789 M::after_local_write(self, local, /*storage_live*/ false)?;
790 }
791 }
792Left(mplace) => {
793let Some(mut alloc) = self.get_place_alloc_mut(&mplace)? else {
794// Zero-sized access
795return interp_ok(());
796 };
797alloc.clear_provenance();
798 }
799 }
800interp_ok(())
801 }
802803/// Copies the data from an operand to a place.
804 /// The layouts of the `src` and `dest` may disagree.
805#[inline(always)]
806pub fn copy_op_allow_transmute(
807&mut self,
808 src: &impl Projectable<'tcx, M::Provenance>,
809 dest: &impl Writeable<'tcx, M::Provenance>,
810 ) -> InterpResult<'tcx> {
811self.copy_op_inner(src, dest, /* allow_transmute */ true)
812 }
813814/// Copies the data from an operand to a place.
815 /// `src` and `dest` must have the same layout and the copied value will be validated.
816#[inline(always)]
817pub fn copy_op(
818&mut self,
819 src: &impl Projectable<'tcx, M::Provenance>,
820 dest: &impl Writeable<'tcx, M::Provenance>,
821 ) -> InterpResult<'tcx> {
822self.copy_op_inner(src, dest, /* allow_transmute */ false)
823 }
824825/// Copies the data from an operand to a place.
826 /// `allow_transmute` indicates whether the layouts may disagree.
827#[inline(always)]
828#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("copy_op_inner",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(828u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["src", "dest",
"allow_transmute"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&src)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&dest)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&allow_transmute as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
self.copy_op_no_validate(src, dest, allow_transmute)?;
if M::enforce_validity(self, dest.layout()) {
let dest = dest.to_place();
if src.layout().ty != dest.layout().ty {
self.validate_operand(&dest.transmute(src.layout(), self)?,
M::enforce_validity_recursively(self, src.layout()), true)?;
}
self.validate_operand(&dest,
M::enforce_validity_recursively(self, dest.layout()),
true)?;
}
interp_ok(())
}
}
}#[instrument(skip(self), level = "trace")]829fn copy_op_inner(
830&mut self,
831 src: &impl Projectable<'tcx, M::Provenance>,
832 dest: &impl Writeable<'tcx, M::Provenance>,
833 allow_transmute: bool,
834 ) -> InterpResult<'tcx> {
835// These are technically *two* typed copies: `src` is a not-yet-loaded value,
836 // so we're doing a typed copy at `src` type from there to some intermediate storage.
837 // And then we're doing a second typed copy from that intermediate storage to `dest`.
838 // But as an optimization, we only make a single direct copy here.
839840 // Do the actual copy.
841self.copy_op_no_validate(src, dest, allow_transmute)?;
842843if M::enforce_validity(self, dest.layout()) {
844let dest = dest.to_place();
845// Given that there were two typed copies, we have to ensure this is valid at both types,
846 // and we have to ensure this loses provenance and padding according to both types.
847 // But if the types are identical, we only do one pass.
848if src.layout().ty != dest.layout().ty {
849self.validate_operand(
850&dest.transmute(src.layout(), self)?,
851 M::enforce_validity_recursively(self, src.layout()),
852/*reset_provenance_and_padding*/ true,
853 )?;
854 }
855self.validate_operand(
856&dest,
857 M::enforce_validity_recursively(self, dest.layout()),
858/*reset_provenance_and_padding*/ true,
859 )?;
860 }
861862 interp_ok(())
863 }
864865/// Copies the data from an operand to a place.
866 /// `allow_transmute` indicates whether the layouts may disagree.
867 /// Also, if you use this you are responsible for validating that things get copied at the
868 /// right type.
869#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("copy_op_no_validate",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(869u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["src", "dest",
"allow_transmute"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&src)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&dest)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&allow_transmute as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
let layout_compat =
mir_assign_valid_types(*self.tcx, self.typing_env,
src.layout(), dest.layout());
if !allow_transmute && !layout_compat {
::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("type mismatch when copying!\nsrc: {0},\ndest: {1}",
src.layout().ty, dest.layout().ty));
}
let src =
match self.read_immediate_raw(src)? {
Right(src_val) => {
if !!src.layout().is_unsized() {
::core::panicking::panic("assertion failed: !src.layout().is_unsized()")
};
if !!dest.layout().is_unsized() {
::core::panicking::panic("assertion failed: !dest.layout().is_unsized()")
};
match (&src.layout().size, &dest.layout().size) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
return if layout_compat {
self.write_immediate_no_validate(*src_val, dest)
} else {
let dest_mem = dest.force_mplace(self)?;
self.write_immediate_to_mplace_no_validate(*src_val,
src.layout(), dest_mem.mplace)
};
}
Left(mplace) => mplace,
};
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/place.rs:915",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(915u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("copy_op: {0:?} <- {1:?}: {2}",
*dest, src, dest.layout().ty) as &dyn Value))])
});
} else { ; }
};
let dest = dest.force_mplace(self)?;
let Some((dest_size, _)) =
self.size_and_align_of_val(&dest)? else {
::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("copy_op needs (dynamically) sized values"))
};
if true {
let src_size = self.size_and_align_of_val(&src)?.unwrap().0;
match (&src_size, &dest_size) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val,
::core::option::Option::Some(format_args!("Cannot copy differently-sized data")));
}
}
};
} else {
match (&src.layout.size, &dest.layout.size) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
}
self.mem_copy(src.ptr(), dest.ptr(), dest_size, true)?;
self.check_misalign(src.mplace.misaligned,
CheckAlignMsg::BasedOn)?;
self.check_misalign(dest.mplace.misaligned,
CheckAlignMsg::BasedOn)?;
interp_ok(())
}
}
}#[instrument(skip(self), level = "trace")]870pub(super) fn copy_op_no_validate(
871&mut self,
872 src: &impl Projectable<'tcx, M::Provenance>,
873 dest: &impl Writeable<'tcx, M::Provenance>,
874 allow_transmute: bool,
875 ) -> InterpResult<'tcx> {
876// We do NOT compare the types for equality, because well-typed code can
877 // actually "transmute" `&mut T` to `&T` in an assignment without a cast.
878let layout_compat =
879 mir_assign_valid_types(*self.tcx, self.typing_env, src.layout(), dest.layout());
880if !allow_transmute && !layout_compat {
881span_bug!(
882self.cur_span(),
883"type mismatch when copying!\nsrc: {},\ndest: {}",
884 src.layout().ty,
885 dest.layout().ty,
886 );
887 }
888889// Let us see if the layout is simple so we take a shortcut,
890 // avoid force_allocation.
891let src = match self.read_immediate_raw(src)? {
892 Right(src_val) => {
893assert!(!src.layout().is_unsized());
894assert!(!dest.layout().is_unsized());
895assert_eq!(src.layout().size, dest.layout().size);
896// Yay, we got a value that we can write directly.
897return if layout_compat {
898self.write_immediate_no_validate(*src_val, dest)
899 } else {
900// This is tricky. The problematic case is `ScalarPair`: the `src_val` was
901 // loaded using the offsets defined by `src.layout`. When we put this back into
902 // the destination, we have to use the same offsets! So (a) we make sure we
903 // write back to memory, and (b) we use `dest` *with the source layout*.
904let dest_mem = dest.force_mplace(self)?;
905self.write_immediate_to_mplace_no_validate(
906*src_val,
907 src.layout(),
908 dest_mem.mplace,
909 )
910 };
911 }
912 Left(mplace) => mplace,
913 };
914// Slow path, this does not fit into an immediate. Just memcpy.
915trace!("copy_op: {:?} <- {:?}: {}", *dest, src, dest.layout().ty);
916917let dest = dest.force_mplace(self)?;
918let Some((dest_size, _)) = self.size_and_align_of_val(&dest)? else {
919span_bug!(self.cur_span(), "copy_op needs (dynamically) sized values")
920 };
921if cfg!(debug_assertions) {
922let src_size = self.size_and_align_of_val(&src)?.unwrap().0;
923assert_eq!(src_size, dest_size, "Cannot copy differently-sized data");
924 } else {
925// As a cheap approximation, we compare the fixed parts of the size.
926assert_eq!(src.layout.size, dest.layout.size);
927 }
928929// Setting `nonoverlapping` here only has an effect when we don't hit the fast-path above,
930 // but that should at least match what LLVM does where `memcpy` is also only used when the
931 // type does not have Scalar/ScalarPair layout.
932 // (Or as the `Assign` docs put it, assignments "not producing primitives" must be
933 // non-overlapping.)
934 // We check alignment separately, and *after* checking everything else.
935 // If an access is both OOB and misaligned, we want to see the bounds error.
936self.mem_copy(src.ptr(), dest.ptr(), dest_size, /*nonoverlapping*/ true)?;
937self.check_misalign(src.mplace.misaligned, CheckAlignMsg::BasedOn)?;
938self.check_misalign(dest.mplace.misaligned, CheckAlignMsg::BasedOn)?;
939 interp_ok(())
940 }
941942/// Ensures that a place is in memory, and returns where it is.
943 /// If the place currently refers to a local that doesn't yet have a matching allocation,
944 /// create such an allocation.
945 /// This is essentially `force_to_memplace`.
946#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("force_allocation",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(946u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["place"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&place)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return:
InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> = loop {};
return __tracing_attr_fake_return;
}
{
let mplace =
match place.place {
Place::Local { local, offset, locals_addr } => {
if true {
match (&locals_addr, &self.frame().locals_addr()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};
let whole_local =
match self.frame_mut().locals[local].access_mut()? {
&mut Operand::Immediate(local_val) => {
let local_layout =
self.layout_of_local(&self.frame(), local, None)?;
if !local_layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("unsized locals cannot be immediate"));
}
};
let mplace =
self.allocate(local_layout, MemoryKind::Stack)?;
if !#[allow(non_exhaustive_omitted_patterns)] match local_val
{
Immediate::Uninit => true,
_ => false,
} {
self.write_immediate_to_mplace_no_validate(local_val,
local_layout, mplace.mplace)?;
}
M::after_local_moved_to_memory(self, local, &mplace)?;
*self.frame_mut().locals[local].access_mut().unwrap() =
Operand::Indirect(mplace.mplace);
mplace.mplace
}
&mut Operand::Indirect(mplace) => mplace,
};
if let Some(offset) = offset {
whole_local.offset_with_meta_(offset, OffsetMode::Wrapping,
MemPlaceMeta::None, self)?
} else { whole_local }
}
Place::Ptr(mplace) => mplace,
};
interp_ok(MPlaceTy { mplace, layout: place.layout })
}
}
}#[instrument(skip(self), level = "trace")]947pub fn force_allocation(
948&mut self,
949 place: &PlaceTy<'tcx, M::Provenance>,
950 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
951let mplace = match place.place {
952 Place::Local { local, offset, locals_addr } => {
953debug_assert_eq!(locals_addr, self.frame().locals_addr());
954let whole_local = match self.frame_mut().locals[local].access_mut()? {
955&mut Operand::Immediate(local_val) => {
956// We need to make an allocation.
957958 // We need the layout of the local. We can NOT use the layout we got,
959 // that might e.g., be an inner field of a struct with `Scalar` layout,
960 // that has different alignment than the outer field.
961let local_layout = self.layout_of_local(&self.frame(), local, None)?;
962assert!(local_layout.is_sized(), "unsized locals cannot be immediate");
963let mplace = self.allocate(local_layout, MemoryKind::Stack)?;
964// Preserve old value. (As an optimization, we can skip this if it was uninit.)
965if !matches!(local_val, Immediate::Uninit) {
966// We don't have to validate as we can assume the local was already
967 // valid for its type. We must not use any part of `place` here, that
968 // could be a projection to a part of the local!
969self.write_immediate_to_mplace_no_validate(
970 local_val,
971 local_layout,
972 mplace.mplace,
973 )?;
974 }
975 M::after_local_moved_to_memory(self, local, &mplace)?;
976// Now we can call `access_mut` again, asserting it goes well, and actually
977 // overwrite things. This points to the entire allocation, not just the part
978 // the place refers to, i.e. we do this before we apply `offset`.
979*self.frame_mut().locals[local].access_mut().unwrap() =
980 Operand::Indirect(mplace.mplace);
981 mplace.mplace
982 }
983&mut Operand::Indirect(mplace) => mplace, // this already was an indirect local
984};
985if let Some(offset) = offset {
986// This offset is always inbounds, no need to check it again.
987whole_local.offset_with_meta_(
988 offset,
989 OffsetMode::Wrapping,
990 MemPlaceMeta::None,
991self,
992 )?
993} else {
994// Preserve wide place metadata, do not call `offset`.
995whole_local
996 }
997 }
998 Place::Ptr(mplace) => mplace,
999 };
1000// Return with the original layout and align, so that the caller can go on
1001interp_ok(MPlaceTy { mplace, layout: place.layout })
1002 }
10031004pub fn allocate_dyn(
1005&mut self,
1006 layout: TyAndLayout<'tcx>,
1007 kind: MemoryKind<M::MemoryKind>,
1008 meta: MemPlaceMeta<M::Provenance>,
1009 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1010let Some((size, align)) = self.size_and_align_from_meta(&meta, &layout)? else {
1011::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("cannot allocate space for `extern` type, size is not known"))span_bug!(self.cur_span(), "cannot allocate space for `extern` type, size is not known")1012 };
1013let ptr = self.allocate_ptr(size, align, kind, AllocInit::Uninit)?;
1014interp_ok(self.ptr_with_meta_to_mplace(ptr.into(), meta, layout, /*unaligned*/ false))
1015 }
10161017pub fn allocate(
1018&mut self,
1019 layout: TyAndLayout<'tcx>,
1020 kind: MemoryKind<M::MemoryKind>,
1021 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1022if !layout.is_sized() {
::core::panicking::panic("assertion failed: layout.is_sized()")
};assert!(layout.is_sized());
1023self.allocate_dyn(layout, kind, MemPlaceMeta::None)
1024 }
10251026/// Allocates a sequence of bytes in the interpreter's memory with alignment 1.
1027 /// This is allocated in immutable global memory and deduplicated.
1028pub fn allocate_bytes_dedup(
1029&mut self,
1030 bytes: &[u8],
1031 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
1032let salt = M::get_global_alloc_salt(self, None);
1033let id = self.tcx.allocate_bytes_dedup(bytes, salt);
10341035// Turn untagged "global" pointers (obtained via `tcx`) into the machine pointer to the allocation.
1036M::adjust_alloc_root_pointer(
1037&self,
1038Pointer::from(id),
1039 M::GLOBAL_KIND.map(MemoryKind::Machine),
1040 )
1041 }
10421043/// Allocates a string in the interpreter's memory, returning it as a (wide) place.
1044 /// This is allocated in immutable global memory and deduplicated.
1045pub fn allocate_str_dedup(
1046&mut self,
1047 s: &str,
1048 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1049let bytes = s.as_bytes();
1050let ptr = self.allocate_bytes_dedup(bytes)?;
10511052// Create length metadata for the string.
1053let meta = Scalar::from_target_usize(u64::try_from(bytes.len()).unwrap(), self);
10541055// Get layout for Rust's str type.
1056let layout = self.layout_of(self.tcx.types.str_).unwrap();
10571058// Combine pointer and metadata into a wide pointer.
1059interp_ok(self.ptr_with_meta_to_mplace(
1060ptr.into(),
1061 MemPlaceMeta::Meta(meta),
1062layout,
1063/*unaligned*/ false,
1064 ))
1065 }
10661067pub fn raw_const_to_mplace(
1068&self,
1069 raw: mir::ConstAlloc<'tcx>,
1070 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1071// This must be an allocation in `tcx`
1072let _ = self.tcx.global_alloc(raw.alloc_id);
1073let ptr = self.global_root_pointer(Pointer::from(raw.alloc_id))?;
1074let layout = self.layout_of(raw.ty)?;
1075interp_ok(self.ptr_to_mplace(ptr.into(), layout))
1076 }
1077}
10781079// Some nodes are used a lot. Make sure they don't unintentionally get bigger.
1080#[cfg(target_pointer_width = "64")]
1081mod size_asserts {
1082use rustc_data_structures::static_assert_size;
10831084use super::*;
1085// tidy-alphabetical-start
1086const _: [(); 64] = [(); ::std::mem::size_of::<MPlaceTy<'_>>()];static_assert_size!(MPlaceTy<'_>, 64);
1087const _: [(); 48] = [(); ::std::mem::size_of::<MemPlace>()];static_assert_size!(MemPlace, 48);
1088const _: [(); 24] = [(); ::std::mem::size_of::<MemPlaceMeta>()];static_assert_size!(MemPlaceMeta, 24);
1089const _: [(); 48] = [(); ::std::mem::size_of::<Place>()];static_assert_size!(Place, 48);
1090const _: [(); 64] = [(); ::std::mem::size_of::<PlaceTy<'_>>()];static_assert_size!(PlaceTy<'_>, 64);
1091// tidy-alphabetical-end
1092}