1//! Computations on places -- field projections, going from mir::Place, and writing
2//! into a place.
3//! All high-level functions to write to memory work on places as destinations.
45use either::{Either, Left, Right};
6use rustc_abi::{BackendRepr, HasDataLayout, Size};
7use rustc_data_structures::assert_matches;
8use rustc_middle::ty::Ty;
9use rustc_middle::ty::layout::TyAndLayout;
10use rustc_middle::{bug, mir, span_bug};
11use tracing::field::Empty;
12use tracing::{instrument, trace};
1314use super::{
15AllocInit, AllocRef, AllocRefMut, CheckAlignMsg, CtfeProvenance, ImmTy, Immediate, InterpCx,
16InterpResult, Machine, MemoryKind, Misalignment, OffsetMode, OpTy, Operand, Pointer,
17Projectable, Provenance, Scalar, alloc_range, interp_ok, mir_assign_valid_types,
18};
19use crate::enter_trace_span;
2021#[derive(#[automatically_derived]
impl<Prov: ::core::marker::Copy + Provenance> ::core::marker::Copy for
MemPlaceMeta<Prov> {
}Copy, #[automatically_derived]
impl<Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
MemPlaceMeta<Prov> {
#[inline]
fn clone(&self) -> MemPlaceMeta<Prov> {
match self {
MemPlaceMeta::Meta(__self_0) =>
MemPlaceMeta::Meta(::core::clone::Clone::clone(__self_0)),
MemPlaceMeta::None => MemPlaceMeta::None,
}
}
}Clone, #[automatically_derived]
impl<Prov: ::core::hash::Hash + Provenance> ::core::hash::Hash for
MemPlaceMeta<Prov> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state);
match self {
MemPlaceMeta::Meta(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
_ => {}
}
}
}Hash, #[automatically_derived]
impl<Prov: ::core::cmp::PartialEq + Provenance> ::core::cmp::PartialEq for
MemPlaceMeta<Prov> {
#[inline]
fn eq(&self, other: &MemPlaceMeta<Prov>) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(MemPlaceMeta::Meta(__self_0), MemPlaceMeta::Meta(__arg1_0))
=> __self_0 == __arg1_0,
_ => true,
}
}
}PartialEq, #[automatically_derived]
impl<Prov: ::core::cmp::Eq + Provenance> ::core::cmp::Eq for
MemPlaceMeta<Prov> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Scalar<Prov>>;
}
}Eq, #[automatically_derived]
impl<Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for
MemPlaceMeta<Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
MemPlaceMeta::Meta(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Meta",
&__self_0),
MemPlaceMeta::None =>
::core::fmt::Formatter::write_str(f, "None"),
}
}
}Debug)]
22/// Information required for the sound usage of a `MemPlace`.
23pub enum MemPlaceMeta<Prov: Provenance = CtfeProvenance> {
24/// The unsized payload (e.g. length for slices or vtable pointer for trait objects).
25Meta(Scalar<Prov>),
26/// `Sized` types or unsized `extern type`
27None,
28}
2930impl<Prov: Provenance> MemPlaceMeta<Prov> {
31#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
32pub fn unwrap_meta(self) -> Scalar<Prov> {
33match self {
34Self::Meta(s) => s,
35Self::None => {
36::rustc_middle::util::bug::bug_fmt(format_args!("expected wide pointer extra data (e.g. slice length or trait object vtable)"))bug!("expected wide pointer extra data (e.g. slice length or trait object vtable)")37 }
38 }
39 }
4041#[inline(always)]
42pub fn has_meta(self) -> bool {
43match self {
44Self::Meta(_) => true,
45Self::None => false,
46 }
47 }
48}
4950#[derive(#[automatically_derived]
impl<Prov: ::core::marker::Copy + Provenance> ::core::marker::Copy for
MemPlace<Prov> {
}Copy, #[automatically_derived]
impl<Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
MemPlace<Prov> {
#[inline]
fn clone(&self) -> MemPlace<Prov> {
MemPlace {
ptr: ::core::clone::Clone::clone(&self.ptr),
meta: ::core::clone::Clone::clone(&self.meta),
misaligned: ::core::clone::Clone::clone(&self.misaligned),
}
}
}Clone, #[automatically_derived]
impl<Prov: ::core::hash::Hash + Provenance> ::core::hash::Hash for
MemPlace<Prov> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.ptr, state);
::core::hash::Hash::hash(&self.meta, state);
::core::hash::Hash::hash(&self.misaligned, state)
}
}Hash, #[automatically_derived]
impl<Prov: ::core::cmp::PartialEq + Provenance> ::core::cmp::PartialEq for
MemPlace<Prov> {
#[inline]
fn eq(&self, other: &MemPlace<Prov>) -> bool {
self.ptr == other.ptr && self.meta == other.meta &&
self.misaligned == other.misaligned
}
}PartialEq, #[automatically_derived]
impl<Prov: ::core::cmp::Eq + Provenance> ::core::cmp::Eq for MemPlace<Prov> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Pointer<Option<Prov>>>;
let _: ::core::cmp::AssertParamIsEq<MemPlaceMeta<Prov>>;
let _: ::core::cmp::AssertParamIsEq<Option<Misalignment>>;
}
}Eq, #[automatically_derived]
impl<Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for
MemPlace<Prov> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field3_finish(f, "MemPlace",
"ptr", &self.ptr, "meta", &self.meta, "misaligned",
&&self.misaligned)
}
}Debug)]
51pub(super) struct MemPlace<Prov: Provenance = CtfeProvenance> {
52/// The pointer can be a pure integer, with the `None` provenance.
53pub ptr: Pointer<Option<Prov>>,
54/// Metadata for unsized places. Interpretation is up to the type.
55 /// Must not be present for sized types, but can be missing for unsized types
56 /// (e.g., `extern type`).
57pub meta: MemPlaceMeta<Prov>,
58/// Stores whether this place was created based on a sufficiently aligned pointer.
59misaligned: Option<Misalignment>,
60}
6162impl<Prov: Provenance> MemPlace<Prov> {
63/// Adjust the provenance of the main pointer (metadata is unaffected).
64fn map_provenance(self, f: impl FnOnce(Prov) -> Prov) -> Self {
65MemPlace { ptr: self.ptr.map_provenance(|p| p.map(f)), ..self }
66 }
6768/// Turn a mplace into a (thin or wide) pointer, as a reference, pointing to the same space.
69#[inline]
70fn to_ref(self, cx: &impl HasDataLayout) -> Immediate<Prov> {
71Immediate::new_pointer_with_meta(self.ptr, self.meta, cx)
72 }
7374#[inline]
75// Not called `offset_with_meta` to avoid confusion with the trait method.
76fn offset_with_meta_<'tcx, M: Machine<'tcx, Provenance = Prov>>(
77self,
78 offset: Size,
79 mode: OffsetMode,
80 meta: MemPlaceMeta<Prov>,
81 ecx: &InterpCx<'tcx, M>,
82 ) -> InterpResult<'tcx, Self> {
83if true {
if !(!meta.has_meta() || self.meta.has_meta()) {
{
::core::panicking::panic_fmt(format_args!("cannot use `offset_with_meta` to add metadata to a place"));
}
};
};debug_assert!(
84 !meta.has_meta() || self.meta.has_meta(),
85"cannot use `offset_with_meta` to add metadata to a place"
86);
87let ptr = match mode {
88 OffsetMode::Inbounds => {
89ecx.ptr_offset_inbounds(self.ptr, offset.bytes().try_into().unwrap())?
90}
91 OffsetMode::Wrapping => self.ptr.wrapping_offset(offset, ecx),
92 };
93interp_ok(MemPlace { ptr, meta, misaligned: self.misaligned })
94 }
95}
9697/// A MemPlace with its layout. Constructing it is only possible in this module.
98#[derive(#[automatically_derived]
impl<'tcx, Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
MPlaceTy<'tcx, Prov> {
#[inline]
fn clone(&self) -> MPlaceTy<'tcx, Prov> {
MPlaceTy {
mplace: ::core::clone::Clone::clone(&self.mplace),
layout: ::core::clone::Clone::clone(&self.layout),
}
}
}Clone, #[automatically_derived]
impl<'tcx, Prov: ::core::hash::Hash + Provenance> ::core::hash::Hash for
MPlaceTy<'tcx, Prov> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
::core::hash::Hash::hash(&self.mplace, state);
::core::hash::Hash::hash(&self.layout, state)
}
}Hash, #[automatically_derived]
impl<'tcx, Prov: ::core::cmp::Eq + Provenance> ::core::cmp::Eq for
MPlaceTy<'tcx, Prov> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<MemPlace<Prov>>;
let _: ::core::cmp::AssertParamIsEq<TyAndLayout<'tcx>>;
}
}Eq, #[automatically_derived]
impl<'tcx, Prov: ::core::cmp::PartialEq + Provenance> ::core::cmp::PartialEq
for MPlaceTy<'tcx, Prov> {
#[inline]
fn eq(&self, other: &MPlaceTy<'tcx, Prov>) -> bool {
self.mplace == other.mplace && self.layout == other.layout
}
}PartialEq)]
99pub struct MPlaceTy<'tcx, Prov: Provenance = CtfeProvenance> {
100 mplace: MemPlace<Prov>,
101pub layout: TyAndLayout<'tcx>,
102}
103104impl<Prov: Provenance> std::fmt::Debugfor MPlaceTy<'_, Prov> {
105fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
106// Printing `layout` results in too much noise; just print a nice version of the type.
107f.debug_struct("MPlaceTy")
108 .field("mplace", &self.mplace)
109 .field("ty", &format_args!("{0}", self.layout.ty)format_args!("{}", self.layout.ty))
110 .finish()
111 }
112}
113114impl<'tcx, Prov: Provenance> MPlaceTy<'tcx, Prov> {
115/// Produces a MemPlace that works for ZST but nothing else.
116 /// Conceptually this is a new allocation, but it doesn't actually create an allocation so you
117 /// don't need to worry about memory leaks.
118#[inline]
119pub fn fake_alloc_zst(layout: TyAndLayout<'tcx>) -> Self {
120if !layout.is_zst() {
::core::panicking::panic("assertion failed: layout.is_zst()")
};assert!(layout.is_zst());
121let align = layout.align.abi;
122let ptr = Pointer::without_provenance(align.bytes()); // no provenance, absolute address
123MPlaceTy { mplace: MemPlace { ptr, meta: MemPlaceMeta::None, misaligned: None }, layout }
124 }
125126/// Adjust the provenance of the main pointer (metadata is unaffected).
127pub fn map_provenance(self, f: impl FnOnce(Prov) -> Prov) -> Self {
128MPlaceTy { mplace: self.mplace.map_provenance(f), ..self }
129 }
130131#[inline(always)]
132pub(super) fn mplace(&self) -> &MemPlace<Prov> {
133&self.mplace
134 }
135136#[inline(always)]
137pub fn ptr(&self) -> Pointer<Option<Prov>> {
138self.mplace.ptr
139 }
140141#[inline(always)]
142pub fn to_ref(&self, cx: &impl HasDataLayout) -> Immediate<Prov> {
143self.mplace.to_ref(cx)
144 }
145}
146147impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
148#[inline(always)]
149fn layout(&self) -> TyAndLayout<'tcx> {
150self.layout
151 }
152153#[inline(always)]
154fn meta(&self) -> MemPlaceMeta<Prov> {
155self.mplace.meta
156 }
157158fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
159&self,
160 offset: Size,
161 mode: OffsetMode,
162 meta: MemPlaceMeta<Prov>,
163 layout: TyAndLayout<'tcx>,
164 ecx: &InterpCx<'tcx, M>,
165 ) -> InterpResult<'tcx, Self> {
166interp_ok(MPlaceTy {
167 mplace: self.mplace.offset_with_meta_(offset, mode, meta, ecx)?,
168layout,
169 })
170 }
171172#[inline(always)]
173fn to_op<M: Machine<'tcx, Provenance = Prov>>(
174&self,
175 _ecx: &InterpCx<'tcx, M>,
176 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
177interp_ok(self.clone().into())
178 }
179}
180181#[derive(#[automatically_derived]
impl<Prov: ::core::marker::Copy + Provenance> ::core::marker::Copy for
Place<Prov> {
}Copy, #[automatically_derived]
impl<Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
Place<Prov> {
#[inline]
fn clone(&self) -> Place<Prov> {
match self {
Place::Ptr(__self_0) =>
Place::Ptr(::core::clone::Clone::clone(__self_0)),
Place::Local {
local: __self_0, offset: __self_1, locals_addr: __self_2 } =>
Place::Local {
local: ::core::clone::Clone::clone(__self_0),
offset: ::core::clone::Clone::clone(__self_1),
locals_addr: ::core::clone::Clone::clone(__self_2),
},
}
}
}Clone, #[automatically_derived]
impl<Prov: ::core::fmt::Debug + Provenance> ::core::fmt::Debug for Place<Prov>
{
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
Place::Ptr(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Ptr",
&__self_0),
Place::Local {
local: __self_0, offset: __self_1, locals_addr: __self_2 } =>
::core::fmt::Formatter::debug_struct_field3_finish(f, "Local",
"local", __self_0, "offset", __self_1, "locals_addr",
&__self_2),
}
}
}Debug)]
182pub(super) enum Place<Prov: Provenance = CtfeProvenance> {
183/// A place referring to a value allocated in the `Memory` system.
184Ptr(MemPlace<Prov>),
185186/// To support alloc-free locals, we are able to write directly to a local. The offset indicates
187 /// where in the local this place is located; if it is `None`, no projection has been applied
188 /// and the type of the place is exactly the type of the local.
189 /// Such projections are meaningful even if the offset is 0, since they can change layouts.
190 /// (Without that optimization, we'd just always be a `MemPlace`.)
191 /// `Local` places always refer to the current stack frame, so they are unstable under
192 /// function calls/returns and switching betweens stacks of different threads!
193 /// We carry around the address of the `locals` buffer of the correct stack frame as a sanity
194 /// check to be able to catch some cases of using a dangling `Place`.
195 ///
196 /// This variant shall not be used for unsized types -- those must always live in memory.
197Local { local: mir::Local, offset: Option<Size>, locals_addr: usize },
198}
199200/// An evaluated place, together with its type.
201///
202/// This may reference a stack frame by its index, so `PlaceTy` should generally not be kept around
203/// for longer than a single operation. Popping and then pushing a stack frame can make `PlaceTy`
204/// point to the wrong destination. If the interpreter has multiple stacks, stack switching will
205/// also invalidate a `PlaceTy`.
206#[derive(#[automatically_derived]
impl<'tcx, Prov: ::core::clone::Clone + Provenance> ::core::clone::Clone for
PlaceTy<'tcx, Prov> {
#[inline]
fn clone(&self) -> PlaceTy<'tcx, Prov> {
PlaceTy {
place: ::core::clone::Clone::clone(&self.place),
layout: ::core::clone::Clone::clone(&self.layout),
}
}
}Clone)]
207pub struct PlaceTy<'tcx, Prov: Provenance = CtfeProvenance> {
208 place: Place<Prov>, // Keep this private; it helps enforce invariants.
209pub layout: TyAndLayout<'tcx>,
210}
211212impl<Prov: Provenance> std::fmt::Debugfor PlaceTy<'_, Prov> {
213fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
214// Printing `layout` results in too much noise; just print a nice version of the type.
215f.debug_struct("PlaceTy")
216 .field("place", &self.place)
217 .field("ty", &format_args!("{0}", self.layout.ty)format_args!("{}", self.layout.ty))
218 .finish()
219 }
220}
221222impl<'tcx, Prov: Provenance> From<MPlaceTy<'tcx, Prov>> for PlaceTy<'tcx, Prov> {
223#[inline(always)]
224fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self {
225PlaceTy { place: Place::Ptr(mplace.mplace), layout: mplace.layout }
226 }
227}
228229impl<'tcx, Prov: Provenance> PlaceTy<'tcx, Prov> {
230#[inline(always)]
231pub(super) fn place(&self) -> &Place<Prov> {
232&self.place
233 }
234235/// A place is either an mplace or some local.
236 ///
237 /// Note that the return value can be different even for logically identical places!
238 /// Specifically, if a local is stored in-memory, this may return `Local` or `MPlaceTy`
239 /// depending on how the place was constructed. In other words, seeing `Local` here does *not*
240 /// imply that this place does not point to memory. Every caller must therefore always handle
241 /// both cases.
242#[inline(always)]
243pub fn as_mplace_or_local(
244&self,
245 ) -> Either<MPlaceTy<'tcx, Prov>, (mir::Local, Option<Size>, usize, TyAndLayout<'tcx>)> {
246match self.place {
247 Place::Ptr(mplace) => Left(MPlaceTy { mplace, layout: self.layout }),
248 Place::Local { local, offset, locals_addr } => {
249Right((local, offset, locals_addr, self.layout))
250 }
251 }
252 }
253254#[inline(always)]
255 #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
256pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
257self.as_mplace_or_local().left().unwrap_or_else(|| {
258::rustc_middle::util::bug::bug_fmt(format_args!("PlaceTy of type {0} was a local when it was expected to be an MPlace",
self.layout.ty))bug!(
259"PlaceTy of type {} was a local when it was expected to be an MPlace",
260self.layout.ty
261 )262 })
263 }
264}
265266impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for PlaceTy<'tcx, Prov> {
267#[inline(always)]
268fn layout(&self) -> TyAndLayout<'tcx> {
269self.layout
270 }
271272#[inline]
273fn meta(&self) -> MemPlaceMeta<Prov> {
274match self.as_mplace_or_local() {
275Left(mplace) => mplace.meta(),
276Right(_) => {
277if true {
if !self.layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("unsized locals should live in memory"));
}
};
};debug_assert!(self.layout.is_sized(), "unsized locals should live in memory");
278 MemPlaceMeta::None279 }
280 }
281 }
282283fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
284&self,
285 offset: Size,
286 mode: OffsetMode,
287 meta: MemPlaceMeta<Prov>,
288 layout: TyAndLayout<'tcx>,
289 ecx: &InterpCx<'tcx, M>,
290 ) -> InterpResult<'tcx, Self> {
291interp_ok(match self.as_mplace_or_local() {
292Left(mplace) => mplace.offset_with_meta(offset, mode, meta, layout, ecx)?.into(),
293Right((local, old_offset, locals_addr, _)) => {
294if true {
if !layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("unsized locals should live in memory"));
}
};
};debug_assert!(layout.is_sized(), "unsized locals should live in memory");
295match meta {
MemPlaceMeta::None => {}
ref left_val => {
::core::panicking::assert_matches_failed(left_val,
"MemPlaceMeta::None", ::core::option::Option::None);
}
};assert_matches!(meta, MemPlaceMeta::None); // we couldn't store it anyway...
296 // `Place::Local` are always in-bounds of their surrounding local, so we can just
297 // check directly if this remains in-bounds. This cannot actually be violated since
298 // projections are type-checked and bounds-checked.
299if !(offset + layout.size <= self.layout.size) {
::core::panicking::panic("assertion failed: offset + layout.size <= self.layout.size")
};assert!(offset + layout.size <= self.layout.size);
300301// Size `+`, ensures no overflow.
302let new_offset = old_offset.unwrap_or(Size::ZERO) + offset;
303304PlaceTy {
305 place: Place::Local { local, offset: Some(new_offset), locals_addr },
306layout,
307 }
308 }
309 })
310 }
311312#[inline(always)]
313fn to_op<M: Machine<'tcx, Provenance = Prov>>(
314&self,
315 ecx: &InterpCx<'tcx, M>,
316 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
317ecx.place_to_op(self)
318 }
319}
320321// These are defined here because they produce a place.
322impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
323#[inline(always)]
324pub fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>> {
325match self.op() {
326 Operand::Indirect(mplace) => Left(MPlaceTy { mplace: *mplace, layout: self.layout }),
327 Operand::Immediate(imm) => Right(ImmTy::from_immediate(*imm, self.layout)),
328 }
329 }
330331#[inline(always)]
332 #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
333pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
334self.as_mplace_or_imm().left().unwrap_or_else(|| {
335::rustc_middle::util::bug::bug_fmt(format_args!("OpTy of type {0} was immediate when it was expected to be an MPlace",
self.layout.ty))bug!(
336"OpTy of type {} was immediate when it was expected to be an MPlace",
337self.layout.ty
338 )339 })
340 }
341}
342343/// The `Weiteable` trait describes interpreter values that can be written to.
344pub trait Writeable<'tcx, Prov: Provenance>: Projectable<'tcx, Prov> {
345fn to_place(&self) -> PlaceTy<'tcx, Prov>;
346347fn force_mplace<M: Machine<'tcx, Provenance = Prov>>(
348&self,
349 ecx: &mut InterpCx<'tcx, M>,
350 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Prov>>;
351}
352353impl<'tcx, Prov: Provenance> Writeable<'tcx, Prov> for PlaceTy<'tcx, Prov> {
354#[inline(always)]
355fn to_place(&self) -> PlaceTy<'tcx, Prov> {
356self.clone()
357 }
358359#[inline(always)]
360fn force_mplace<M: Machine<'tcx, Provenance = Prov>>(
361&self,
362 ecx: &mut InterpCx<'tcx, M>,
363 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Prov>> {
364ecx.force_allocation(self)
365 }
366}
367368impl<'tcx, Prov: Provenance> Writeable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
369#[inline(always)]
370fn to_place(&self) -> PlaceTy<'tcx, Prov> {
371self.clone().into()
372 }
373374#[inline(always)]
375fn force_mplace<M: Machine<'tcx, Provenance = Prov>>(
376&self,
377 _ecx: &mut InterpCx<'tcx, M>,
378 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Prov>> {
379interp_ok(self.clone())
380 }
381}
382383// FIXME: Working around https://github.com/rust-lang/rust/issues/54385
384impl<'tcx, Prov, M> InterpCx<'tcx, M>
385where
386Prov: Provenance,
387 M: Machine<'tcx, Provenance = Prov>,
388{
389fn ptr_with_meta_to_mplace(
390&self,
391 ptr: Pointer<Option<M::Provenance>>,
392 meta: MemPlaceMeta<M::Provenance>,
393 layout: TyAndLayout<'tcx>,
394 unaligned: bool,
395 ) -> MPlaceTy<'tcx, M::Provenance> {
396let misaligned =
397if unaligned { None } else { self.is_ptr_misaligned(ptr, layout.align.abi) };
398MPlaceTy { mplace: MemPlace { ptr, meta, misaligned }, layout }
399 }
400401pub fn ptr_to_mplace(
402&self,
403 ptr: Pointer<Option<M::Provenance>>,
404 layout: TyAndLayout<'tcx>,
405 ) -> MPlaceTy<'tcx, M::Provenance> {
406if !layout.is_sized() {
::core::panicking::panic("assertion failed: layout.is_sized()")
};assert!(layout.is_sized());
407self.ptr_with_meta_to_mplace(ptr, MemPlaceMeta::None, layout, /*unaligned*/ false)
408 }
409410pub fn ptr_to_mplace_unaligned(
411&self,
412 ptr: Pointer<Option<M::Provenance>>,
413 layout: TyAndLayout<'tcx>,
414 ) -> MPlaceTy<'tcx, M::Provenance> {
415if !layout.is_sized() {
::core::panicking::panic("assertion failed: layout.is_sized()")
};assert!(layout.is_sized());
416self.ptr_with_meta_to_mplace(ptr, MemPlaceMeta::None, layout, /*unaligned*/ true)
417 }
418419/// Take a value, which represents a (thin or wide) reference, and make it a place.
420 /// Alignment is just based on the type. This is the inverse of `mplace_to_ref()`.
421 ///
422 /// Only call this if you are sure the place is "valid" (aligned and inbounds), or do not
423 /// want to ever use the place for memory access!
424 /// Generally prefer `deref_pointer`.
425pub fn ref_to_mplace(
426&self,
427 val: &ImmTy<'tcx, M::Provenance>,
428 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
429let pointee_type =
430val.layout.ty.builtin_deref(true).expect("`ref_to_mplace` called on non-ptr type");
431let layout = self.layout_of(pointee_type)?;
432let (ptr, meta) = val.to_scalar_and_meta();
433434// `ref_to_mplace` is called on raw pointers even if they don't actually get dereferenced;
435 // we hence can't call `size_and_align_of` since that asserts more validity than we want.
436let ptr = ptr.to_pointer(self)?;
437interp_ok(self.ptr_with_meta_to_mplace(ptr, meta, layout, /*unaligned*/ false))
438 }
439440/// Turn a mplace into a (thin or wide) mutable raw pointer, pointing to the same space.
441 /// `align` information is lost!
442 /// This is the inverse of `ref_to_mplace`.
443pub fn mplace_to_ref(
444&self,
445 mplace: &MPlaceTy<'tcx, M::Provenance>,
446 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
447let imm = mplace.mplace.to_ref(self);
448let layout = self.layout_of(Ty::new_mut_ptr(self.tcx.tcx, mplace.layout.ty))?;
449interp_ok(ImmTy::from_immediate(imm, layout))
450 }
451452/// Take an operand, representing a pointer, and dereference it to a place.
453 /// Corresponds to the `*` operator in Rust.
454#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("deref_pointer",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(454u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["src"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&src)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return:
InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> = loop {};
return __tracing_attr_fake_return;
}
{
if src.layout().ty.is_box() {
::rustc_middle::util::bug::bug_fmt(format_args!("dereferencing {0}",
src.layout().ty));
}
let val = self.read_immediate(src)?;
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/place.rs:467",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(467u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("deref to {0} on {1:?}",
val.layout.ty, *val) as &dyn Value))])
});
} else { ; }
};
let mplace = self.ref_to_mplace(&val)?;
interp_ok(mplace)
}
}
}#[instrument(skip(self), level = "trace")]455pub fn deref_pointer(
456&self,
457 src: &impl Projectable<'tcx, M::Provenance>,
458 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
459if src.layout().ty.is_box() {
460// Derefer should have removed all Box derefs.
461 // Some `Box` are not immediates (if they have a custom allocator)
462 // so the code below would fail.
463bug!("dereferencing {}", src.layout().ty);
464 }
465466let val = self.read_immediate(src)?;
467trace!("deref to {} on {:?}", val.layout.ty, *val);
468469let mplace = self.ref_to_mplace(&val)?;
470 interp_ok(mplace)
471 }
472473#[inline]
474pub(super) fn get_place_alloc(
475&self,
476 mplace: &MPlaceTy<'tcx, M::Provenance>,
477 ) -> InterpResult<'tcx, Option<AllocRef<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
478 {
479let (size, _align) = self480 .size_and_align_of_val(mplace)?
481.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
482// We check alignment separately, and *after* checking everything else.
483 // If an access is both OOB and misaligned, we want to see the bounds error.
484let a = self.get_ptr_alloc(mplace.ptr(), size)?;
485self.check_misalign(mplace.mplace.misaligned, CheckAlignMsg::BasedOn)?;
486interp_ok(a)
487 }
488489#[inline]
490pub(super) fn get_place_alloc_mut(
491&mut self,
492 mplace: &MPlaceTy<'tcx, M::Provenance>,
493 ) -> InterpResult<'tcx, Option<AllocRefMut<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
494 {
495let (size, _align) = self496 .size_and_align_of_val(mplace)?
497.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
498// We check alignment separately, and raise that error *after* checking everything else.
499 // If an access is both OOB and misaligned, we want to see the bounds error.
500 // However we have to call `check_misalign` first to make the borrow checker happy.
501let misalign_res = self.check_misalign(mplace.mplace.misaligned, CheckAlignMsg::BasedOn);
502// An error from get_ptr_alloc_mut takes precedence.
503let (a, ()) = self.get_ptr_alloc_mut(mplace.ptr(), size).and(misalign_res)?;
504interp_ok(a)
505 }
506507/// Turn a local in the current frame into a place.
508pub fn local_to_place(
509&self,
510 local: mir::Local,
511 ) -> InterpResult<'tcx, PlaceTy<'tcx, M::Provenance>> {
512let frame = self.frame();
513let layout = self.layout_of_local(frame, local, None)?;
514let place = if layout.is_sized() {
515// We can just always use the `Local` for sized values.
516Place::Local { local, offset: None, locals_addr: frame.locals_addr() }
517 } else {
518// Other parts of the system rely on `Place::Local` never being unsized.
519match frame.locals[local].access()? {
520 Operand::Immediate(_) => ::rustc_middle::util::bug::bug_fmt(format_args!("impossible case reached"))bug!(),
521 Operand::Indirect(mplace) => Place::Ptr(*mplace),
522 }
523 };
524interp_ok(PlaceTy { place, layout })
525 }
526527/// Computes a place. You should only use this if you intend to write into this
528 /// place; for reading, a more efficient alternative is `eval_place_to_op`.
529#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("eval_place",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(529u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["mir_place"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&mir_place)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return:
InterpResult<'tcx, PlaceTy<'tcx, M::Provenance>> = loop {};
return __tracing_attr_fake_return;
}
{
let _trace =
<M as
crate::interpret::Machine>::enter_trace_span(||
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("step",
"rustc_const_eval::interpret::place",
::tracing::Level::INFO,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(535u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["step", "mir_place",
"tracing_separate_thread"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::INFO <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::INFO <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&display(&"eval_place")
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&debug(&mir_place)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&Empty as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
});
let mut place = self.local_to_place(mir_place.local)?;
for elem in mir_place.projection.iter() {
place = self.project(&place, elem)?
}
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/place.rs:543",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(543u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("{0:?}",
self.dump_place(&place)) as &dyn Value))])
});
} else { ; }
};
if true {
let normalized_place_ty =
self.instantiate_from_current_frame_and_normalize_erasing_regions(mir_place.ty(&self.frame().body.local_decls,
*self.tcx).ty)?;
if !mir_assign_valid_types(*self.tcx, self.typing_env,
self.layout_of(normalized_place_ty)?, place.layout) {
::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("eval_place of a MIR place with type {0} produced an interpreter place with type {1}",
normalized_place_ty, place.layout.ty))
}
}
interp_ok(place)
}
}
}#[instrument(skip(self), level = "trace")]530pub fn eval_place(
531&self,
532 mir_place: mir::Place<'tcx>,
533 ) -> InterpResult<'tcx, PlaceTy<'tcx, M::Provenance>> {
534let _trace =
535enter_trace_span!(M, step::eval_place, ?mir_place, tracing_separate_thread = Empty);
536537let mut place = self.local_to_place(mir_place.local)?;
538// Using `try_fold` turned out to be bad for performance, hence the loop.
539for elem in mir_place.projection.iter() {
540 place = self.project(&place, elem)?
541}
542543trace!("{:?}", self.dump_place(&place));
544// Sanity-check the type we ended up with.
545if cfg!(debug_assertions) {
546let normalized_place_ty = self
547.instantiate_from_current_frame_and_normalize_erasing_regions(
548 mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty,
549 )?;
550if !mir_assign_valid_types(
551*self.tcx,
552self.typing_env,
553self.layout_of(normalized_place_ty)?,
554 place.layout,
555 ) {
556span_bug!(
557self.cur_span(),
558"eval_place of a MIR place with type {} produced an interpreter place with type {}",
559 normalized_place_ty,
560 place.layout.ty,
561 )
562 }
563 }
564 interp_ok(place)
565 }
566567/// Given a place, returns either the underlying mplace or a reference to where the value of
568 /// this place is stored.
569#[inline(always)]
570fn as_mplace_or_mutable_local(
571&mut self,
572 place: &PlaceTy<'tcx, M::Provenance>,
573 ) -> InterpResult<
574'tcx,
575Either<
576MPlaceTy<'tcx, M::Provenance>,
577 (&mut Immediate<M::Provenance>, TyAndLayout<'tcx>, mir::Local),
578 >,
579 > {
580interp_ok(match place.to_place().as_mplace_or_local() {
581Left(mplace) => Left(mplace),
582Right((local, offset, locals_addr, layout)) => {
583if offset.is_some() {
584// This has been projected to a part of this local, or had the type changed.
585 // FIXME: there are cases where we could still avoid allocating an mplace.
586Left(place.force_mplace(self)?)
587 } else {
588if true {
match (&locals_addr, &self.frame().locals_addr()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_eq!(locals_addr, self.frame().locals_addr());
589if true {
match (&self.layout_of_local(self.frame(), local, None)?, &layout) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};debug_assert_eq!(self.layout_of_local(self.frame(), local, None)?, layout);
590match self.frame_mut().locals[local].access_mut()? {
591 Operand::Indirect(mplace) => {
592// The local is in memory.
593Left(MPlaceTy { mplace: *mplace, layout })
594 }
595 Operand::Immediate(local_val) => {
596// The local still has the optimized representation.
597Right((local_val, layout, local))
598 }
599 }
600 }
601 }
602 })
603 }
604605/// Write an immediate to a place
606#[inline(always)]
607#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("write_immediate",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(607u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["src", "dest"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&src)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&dest)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
self.write_immediate_no_validate(src, dest)?;
if M::enforce_validity(self, dest.layout()) {
self.validate_operand(&dest.to_place(),
M::enforce_validity_recursively(self, dest.layout()),
true)?;
}
interp_ok(())
}
}
}#[instrument(skip(self), level = "trace")]608pub fn write_immediate(
609&mut self,
610 src: Immediate<M::Provenance>,
611 dest: &impl Writeable<'tcx, M::Provenance>,
612 ) -> InterpResult<'tcx> {
613self.write_immediate_no_validate(src, dest)?;
614615if M::enforce_validity(self, dest.layout()) {
616// Data got changed, better make sure it matches the type!
617 // Also needed to reset padding.
618self.validate_operand(
619&dest.to_place(),
620 M::enforce_validity_recursively(self, dest.layout()),
621/*reset_provenance_and_padding*/ true,
622 )?;
623 }
624625 interp_ok(())
626 }
627628/// Write a scalar to a place
629#[inline(always)]
630pub fn write_scalar(
631&mut self,
632 val: impl Into<Scalar<M::Provenance>>,
633 dest: &impl Writeable<'tcx, M::Provenance>,
634 ) -> InterpResult<'tcx> {
635self.write_immediate(Immediate::Scalar(val.into()), dest)
636 }
637638/// Write a pointer to a place
639#[inline(always)]
640pub fn write_pointer(
641&mut self,
642 ptr: impl Into<Pointer<Option<M::Provenance>>>,
643 dest: &impl Writeable<'tcx, M::Provenance>,
644 ) -> InterpResult<'tcx> {
645self.write_scalar(Scalar::from_maybe_pointer(ptr.into(), self), dest)
646 }
647648/// Write an immediate to a place.
649 /// If you use this you are responsible for validating that things got copied at the
650 /// right type.
651pub(super) fn write_immediate_no_validate(
652&mut self,
653 src: Immediate<M::Provenance>,
654 dest: &impl Writeable<'tcx, M::Provenance>,
655 ) -> InterpResult<'tcx> {
656if !dest.layout().is_sized() {
{
::core::panicking::panic_fmt(format_args!("Cannot write unsized immediate data"));
}
};assert!(dest.layout().is_sized(), "Cannot write unsized immediate data");
657658match self.as_mplace_or_mutable_local(&dest.to_place())? {
659Right((local_val, local_layout, local)) => {
660// Local can be updated in-place.
661*local_val = src;
662// Call the machine hook (the data race detector needs to know about this write).
663if !self.validation_in_progress() {
664 M::after_local_write(self, local, /*storage_live*/ false)?;
665 }
666// Double-check that the value we are storing and the local fit to each other.
667 // Things can ge wrong in quite weird ways when this is violated.
668 // Unfortunately this is too expensive to do in release builds.
669if truecfg!(debug_assertions) {
670src.assert_matches_abi(
671local_layout.backend_repr,
672"invalid immediate for given destination place",
673self,
674 );
675 }
676 }
677Left(mplace) => {
678self.write_immediate_to_mplace_no_validate(src, mplace.layout, mplace.mplace)?;
679 }
680 }
681interp_ok(())
682 }
683684/// Write an immediate to memory.
685 /// If you use this you are responsible for validating that things got copied at the
686 /// right layout.
687fn write_immediate_to_mplace_no_validate(
688&mut self,
689 value: Immediate<M::Provenance>,
690 layout: TyAndLayout<'tcx>,
691 dest: MemPlace<M::Provenance>,
692 ) -> InterpResult<'tcx> {
693// We use the sizes from `value` below.
694 // Ensure that matches the type of the place it is written to.
695value.assert_matches_abi(
696layout.backend_repr,
697"invalid immediate for given destination place",
698self,
699 );
700// Note that it is really important that the type here is the right one, and matches the
701 // type things are read at. In case `value` is a `ScalarPair`, we don't do any magic here
702 // to handle padding properly, which is only correct if we never look at this data with the
703 // wrong type.
704705let tcx = *self.tcx;
706let will_later_validate = M::enforce_validity(self, layout);
707let Some(mut alloc) = self.get_place_alloc_mut(&MPlaceTy { mplace: dest, layout })? else {
708// zero-sized access
709return interp_ok(());
710 };
711712match value {
713 Immediate::Scalar(scalar) => {
714alloc.write_scalar(alloc_range(Size::ZERO, scalar.size()), scalar)?;
715 }
716 Immediate::ScalarPair(a_val, b_val) => {
717let BackendRepr::ScalarPair(_a, b) = layout.backend_repr else {
718::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("write_immediate_to_mplace: invalid ScalarPair layout: {0:#?}",
layout))span_bug!(
719self.cur_span(),
720"write_immediate_to_mplace: invalid ScalarPair layout: {:#?}",
721 layout
722 )723 };
724let a_size = a_val.size();
725let b_offset = a_size.align_to(b.align(&tcx).abi);
726if !(b_offset.bytes() > 0) {
::core::panicking::panic("assertion failed: b_offset.bytes() > 0")
};assert!(b_offset.bytes() > 0); // in `operand_field` we use the offset to tell apart the fields
727728 // It is tempting to verify `b_offset` against `layout.fields.offset(1)`,
729 // but that does not work: We could be a newtype around a pair, then the
730 // fields do not match the `ScalarPair` components.
731732 // In preparation, if we do *not* later reset the padding, we clear the entire
733 // destination now to ensure that no stray pointer fragments are being
734 // preserved (see <https://github.com/rust-lang/rust/issues/148470>).
735 // We can skip this if there is no padding (e.g. for wide pointers).
736if !will_later_validate && a_size + b_val.size() != layout.size {
737alloc.write_uninit_full();
738 }
739740alloc.write_scalar(alloc_range(Size::ZERO, a_size), a_val)?;
741alloc.write_scalar(alloc_range(b_offset, b_val.size()), b_val)?;
742 }
743 Immediate::Uninit => alloc.write_uninit_full(),
744 }
745interp_ok(())
746 }
747748pub fn write_uninit(
749&mut self,
750 dest: &impl Writeable<'tcx, M::Provenance>,
751 ) -> InterpResult<'tcx> {
752match self.as_mplace_or_mutable_local(&dest.to_place())? {
753Right((local_val, _local_layout, local)) => {
754*local_val = Immediate::Uninit;
755// Call the machine hook (the data race detector needs to know about this write).
756if !self.validation_in_progress() {
757 M::after_local_write(self, local, /*storage_live*/ false)?;
758 }
759 }
760Left(mplace) => {
761let Some(mut alloc) = self.get_place_alloc_mut(&mplace)? else {
762// Zero-sized access
763return interp_ok(());
764 };
765alloc.write_uninit_full();
766 }
767 }
768interp_ok(())
769 }
770771/// Remove all provenance in the given place.
772pub fn clear_provenance(
773&mut self,
774 dest: &impl Writeable<'tcx, M::Provenance>,
775 ) -> InterpResult<'tcx> {
776// If this is an efficiently represented local variable without provenance, skip the
777 // `as_mplace_or_mutable_local` that would otherwise force this local into memory.
778if let Right(imm) = dest.to_op(self)?.as_mplace_or_imm() {
779if !imm.has_provenance() {
780return interp_ok(());
781 }
782 }
783match self.as_mplace_or_mutable_local(&dest.to_place())? {
784Right((local_val, _local_layout, local)) => {
785local_val.clear_provenance()?;
786// Call the machine hook (the data race detector needs to know about this write).
787if !self.validation_in_progress() {
788 M::after_local_write(self, local, /*storage_live*/ false)?;
789 }
790 }
791Left(mplace) => {
792let Some(mut alloc) = self.get_place_alloc_mut(&mplace)? else {
793// Zero-sized access
794return interp_ok(());
795 };
796alloc.clear_provenance();
797 }
798 }
799interp_ok(())
800 }
801802/// Copies the data from an operand to a place.
803 /// The layouts of the `src` and `dest` may disagree.
804#[inline(always)]
805pub fn copy_op_allow_transmute(
806&mut self,
807 src: &impl Projectable<'tcx, M::Provenance>,
808 dest: &impl Writeable<'tcx, M::Provenance>,
809 ) -> InterpResult<'tcx> {
810self.copy_op_inner(src, dest, /* allow_transmute */ true)
811 }
812813/// Copies the data from an operand to a place.
814 /// `src` and `dest` must have the same layout and the copied value will be validated.
815#[inline(always)]
816pub fn copy_op(
817&mut self,
818 src: &impl Projectable<'tcx, M::Provenance>,
819 dest: &impl Writeable<'tcx, M::Provenance>,
820 ) -> InterpResult<'tcx> {
821self.copy_op_inner(src, dest, /* allow_transmute */ false)
822 }
823824/// Copies the data from an operand to a place.
825 /// `allow_transmute` indicates whether the layouts may disagree.
826#[inline(always)]
827#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("copy_op_inner",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(827u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["src", "dest",
"allow_transmute"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&src)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&dest)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&allow_transmute as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
self.copy_op_no_validate(src, dest, allow_transmute)?;
if M::enforce_validity(self, dest.layout()) {
let dest = dest.to_place();
if src.layout().ty != dest.layout().ty {
self.validate_operand(&dest.transmute(src.layout(), self)?,
M::enforce_validity_recursively(self, src.layout()), true)?;
}
self.validate_operand(&dest,
M::enforce_validity_recursively(self, dest.layout()),
true)?;
}
interp_ok(())
}
}
}#[instrument(skip(self), level = "trace")]828fn copy_op_inner(
829&mut self,
830 src: &impl Projectable<'tcx, M::Provenance>,
831 dest: &impl Writeable<'tcx, M::Provenance>,
832 allow_transmute: bool,
833 ) -> InterpResult<'tcx> {
834// These are technically *two* typed copies: `src` is a not-yet-loaded value,
835 // so we're doing a typed copy at `src` type from there to some intermediate storage.
836 // And then we're doing a second typed copy from that intermediate storage to `dest`.
837 // But as an optimization, we only make a single direct copy here.
838839 // Do the actual copy.
840self.copy_op_no_validate(src, dest, allow_transmute)?;
841842if M::enforce_validity(self, dest.layout()) {
843let dest = dest.to_place();
844// Given that there were two typed copies, we have to ensure this is valid at both types,
845 // and we have to ensure this loses provenance and padding according to both types.
846 // But if the types are identical, we only do one pass.
847if src.layout().ty != dest.layout().ty {
848self.validate_operand(
849&dest.transmute(src.layout(), self)?,
850 M::enforce_validity_recursively(self, src.layout()),
851/*reset_provenance_and_padding*/ true,
852 )?;
853 }
854self.validate_operand(
855&dest,
856 M::enforce_validity_recursively(self, dest.layout()),
857/*reset_provenance_and_padding*/ true,
858 )?;
859 }
860861 interp_ok(())
862 }
863864/// Copies the data from an operand to a place.
865 /// `allow_transmute` indicates whether the layouts may disagree.
866 /// Also, if you use this you are responsible for validating that things get copied at the
867 /// right type.
868#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("copy_op_no_validate",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(868u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["src", "dest",
"allow_transmute"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&src)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&dest)
as &dyn Value)),
(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&allow_transmute as
&dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return: InterpResult<'tcx> = loop {};
return __tracing_attr_fake_return;
}
{
let layout_compat =
mir_assign_valid_types(*self.tcx, self.typing_env,
src.layout(), dest.layout());
if !allow_transmute && !layout_compat {
::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("type mismatch when copying!\nsrc: {0},\ndest: {1}",
src.layout().ty, dest.layout().ty));
}
let src =
match self.read_immediate_raw(src)? {
Right(src_val) => {
if !!src.layout().is_unsized() {
::core::panicking::panic("assertion failed: !src.layout().is_unsized()")
};
if !!dest.layout().is_unsized() {
::core::panicking::panic("assertion failed: !dest.layout().is_unsized()")
};
match (&src.layout().size, &dest.layout().size) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
return if layout_compat {
self.write_immediate_no_validate(*src_val, dest)
} else {
let dest_mem = dest.force_mplace(self)?;
self.write_immediate_to_mplace_no_validate(*src_val,
src.layout(), dest_mem.mplace)
};
}
Left(mplace) => mplace,
};
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_const_eval/src/interpret/place.rs:914",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(914u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("copy_op: {0:?} <- {1:?}: {2}",
*dest, src, dest.layout().ty) as &dyn Value))])
});
} else { ; }
};
let dest = dest.force_mplace(self)?;
let Some((dest_size, _)) =
self.size_and_align_of_val(&dest)? else {
::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("copy_op needs (dynamically) sized values"))
};
if true {
let src_size = self.size_and_align_of_val(&src)?.unwrap().0;
match (&src_size, &dest_size) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val,
::core::option::Option::Some(format_args!("Cannot copy differently-sized data")));
}
}
};
} else {
match (&src.layout.size, &dest.layout.size) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
}
self.mem_copy(src.ptr(), dest.ptr(), dest_size, true)?;
self.check_misalign(src.mplace.misaligned,
CheckAlignMsg::BasedOn)?;
self.check_misalign(dest.mplace.misaligned,
CheckAlignMsg::BasedOn)?;
interp_ok(())
}
}
}#[instrument(skip(self), level = "trace")]869pub(super) fn copy_op_no_validate(
870&mut self,
871 src: &impl Projectable<'tcx, M::Provenance>,
872 dest: &impl Writeable<'tcx, M::Provenance>,
873 allow_transmute: bool,
874 ) -> InterpResult<'tcx> {
875// We do NOT compare the types for equality, because well-typed code can
876 // actually "transmute" `&mut T` to `&T` in an assignment without a cast.
877let layout_compat =
878 mir_assign_valid_types(*self.tcx, self.typing_env, src.layout(), dest.layout());
879if !allow_transmute && !layout_compat {
880span_bug!(
881self.cur_span(),
882"type mismatch when copying!\nsrc: {},\ndest: {}",
883 src.layout().ty,
884 dest.layout().ty,
885 );
886 }
887888// Let us see if the layout is simple so we take a shortcut,
889 // avoid force_allocation.
890let src = match self.read_immediate_raw(src)? {
891 Right(src_val) => {
892assert!(!src.layout().is_unsized());
893assert!(!dest.layout().is_unsized());
894assert_eq!(src.layout().size, dest.layout().size);
895// Yay, we got a value that we can write directly.
896return if layout_compat {
897self.write_immediate_no_validate(*src_val, dest)
898 } else {
899// This is tricky. The problematic case is `ScalarPair`: the `src_val` was
900 // loaded using the offsets defined by `src.layout`. When we put this back into
901 // the destination, we have to use the same offsets! So (a) we make sure we
902 // write back to memory, and (b) we use `dest` *with the source layout*.
903let dest_mem = dest.force_mplace(self)?;
904self.write_immediate_to_mplace_no_validate(
905*src_val,
906 src.layout(),
907 dest_mem.mplace,
908 )
909 };
910 }
911 Left(mplace) => mplace,
912 };
913// Slow path, this does not fit into an immediate. Just memcpy.
914trace!("copy_op: {:?} <- {:?}: {}", *dest, src, dest.layout().ty);
915916let dest = dest.force_mplace(self)?;
917let Some((dest_size, _)) = self.size_and_align_of_val(&dest)? else {
918span_bug!(self.cur_span(), "copy_op needs (dynamically) sized values")
919 };
920if cfg!(debug_assertions) {
921let src_size = self.size_and_align_of_val(&src)?.unwrap().0;
922assert_eq!(src_size, dest_size, "Cannot copy differently-sized data");
923 } else {
924// As a cheap approximation, we compare the fixed parts of the size.
925assert_eq!(src.layout.size, dest.layout.size);
926 }
927928// Setting `nonoverlapping` here only has an effect when we don't hit the fast-path above,
929 // but that should at least match what LLVM does where `memcpy` is also only used when the
930 // type does not have Scalar/ScalarPair layout.
931 // (Or as the `Assign` docs put it, assignments "not producing primitives" must be
932 // non-overlapping.)
933 // We check alignment separately, and *after* checking everything else.
934 // If an access is both OOB and misaligned, we want to see the bounds error.
935self.mem_copy(src.ptr(), dest.ptr(), dest_size, /*nonoverlapping*/ true)?;
936self.check_misalign(src.mplace.misaligned, CheckAlignMsg::BasedOn)?;
937self.check_misalign(dest.mplace.misaligned, CheckAlignMsg::BasedOn)?;
938 interp_ok(())
939 }
940941/// Ensures that a place is in memory, and returns where it is.
942 /// If the place currently refers to a local that doesn't yet have a matching allocation,
943 /// create such an allocation.
944 /// This is essentially `force_to_memplace`.
945#[allow(clippy :: suspicious_else_formatting)]
{
let __tracing_attr_span;
let __tracing_attr_guard;
if ::tracing::Level::TRACE <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() ||
{ false } {
__tracing_attr_span =
{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("force_allocation",
"rustc_const_eval::interpret::place",
::tracing::Level::TRACE,
::tracing_core::__macro_support::Option::Some("compiler/rustc_const_eval/src/interpret/place.rs"),
::tracing_core::__macro_support::Option::Some(945u32),
::tracing_core::__macro_support::Option::Some("rustc_const_eval::interpret::place"),
::tracing_core::field::FieldSet::new(&["place"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::SPAN)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let mut interest = ::tracing::subscriber::Interest::never();
if ::tracing::Level::TRACE <=
::tracing::level_filters::STATIC_MAX_LEVEL &&
::tracing::Level::TRACE <=
::tracing::level_filters::LevelFilter::current() &&
{ interest = __CALLSITE.interest(); !interest.is_never() }
&&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest) {
let meta = __CALLSITE.metadata();
::tracing::Span::new(meta,
&{
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = meta.fields().iter();
meta.fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&::tracing::field::debug(&place)
as &dyn Value))])
})
} else {
let span =
::tracing::__macro_support::__disabled_span(__CALLSITE.metadata());
{};
span
}
};
__tracing_attr_guard = __tracing_attr_span.enter();
}
#[warn(clippy :: suspicious_else_formatting)]
{
#[allow(unknown_lints, unreachable_code, clippy ::
diverging_sub_expression, clippy :: empty_loop, clippy ::
let_unit_value, clippy :: let_with_type_underscore, clippy ::
needless_return, clippy :: unreachable)]
if false {
let __tracing_attr_fake_return:
InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> = loop {};
return __tracing_attr_fake_return;
}
{
let mplace =
match place.place {
Place::Local { local, offset, locals_addr } => {
if true {
match (&locals_addr, &self.frame().locals_addr()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val,
&*right_val, ::core::option::Option::None);
}
}
};
};
let whole_local =
match self.frame_mut().locals[local].access_mut()? {
&mut Operand::Immediate(local_val) => {
let local_layout =
self.layout_of_local(&self.frame(), local, None)?;
if !local_layout.is_sized() {
{
::core::panicking::panic_fmt(format_args!("unsized locals cannot be immediate"));
}
};
let mplace =
self.allocate(local_layout, MemoryKind::Stack)?;
if !#[allow(non_exhaustive_omitted_patterns)] match local_val
{
Immediate::Uninit => true,
_ => false,
} {
self.write_immediate_to_mplace_no_validate(local_val,
local_layout, mplace.mplace)?;
}
M::after_local_moved_to_memory(self, local, &mplace)?;
*self.frame_mut().locals[local].access_mut().unwrap() =
Operand::Indirect(mplace.mplace);
mplace.mplace
}
&mut Operand::Indirect(mplace) => mplace,
};
if let Some(offset) = offset {
whole_local.offset_with_meta_(offset, OffsetMode::Wrapping,
MemPlaceMeta::None, self)?
} else { whole_local }
}
Place::Ptr(mplace) => mplace,
};
interp_ok(MPlaceTy { mplace, layout: place.layout })
}
}
}#[instrument(skip(self), level = "trace")]946pub fn force_allocation(
947&mut self,
948 place: &PlaceTy<'tcx, M::Provenance>,
949 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
950let mplace = match place.place {
951 Place::Local { local, offset, locals_addr } => {
952debug_assert_eq!(locals_addr, self.frame().locals_addr());
953let whole_local = match self.frame_mut().locals[local].access_mut()? {
954&mut Operand::Immediate(local_val) => {
955// We need to make an allocation.
956957 // We need the layout of the local. We can NOT use the layout we got,
958 // that might e.g., be an inner field of a struct with `Scalar` layout,
959 // that has different alignment than the outer field.
960let local_layout = self.layout_of_local(&self.frame(), local, None)?;
961assert!(local_layout.is_sized(), "unsized locals cannot be immediate");
962let mplace = self.allocate(local_layout, MemoryKind::Stack)?;
963// Preserve old value. (As an optimization, we can skip this if it was uninit.)
964if !matches!(local_val, Immediate::Uninit) {
965// We don't have to validate as we can assume the local was already
966 // valid for its type. We must not use any part of `place` here, that
967 // could be a projection to a part of the local!
968self.write_immediate_to_mplace_no_validate(
969 local_val,
970 local_layout,
971 mplace.mplace,
972 )?;
973 }
974 M::after_local_moved_to_memory(self, local, &mplace)?;
975// Now we can call `access_mut` again, asserting it goes well, and actually
976 // overwrite things. This points to the entire allocation, not just the part
977 // the place refers to, i.e. we do this before we apply `offset`.
978*self.frame_mut().locals[local].access_mut().unwrap() =
979 Operand::Indirect(mplace.mplace);
980 mplace.mplace
981 }
982&mut Operand::Indirect(mplace) => mplace, // this already was an indirect local
983};
984if let Some(offset) = offset {
985// This offset is always inbounds, no need to check it again.
986whole_local.offset_with_meta_(
987 offset,
988 OffsetMode::Wrapping,
989 MemPlaceMeta::None,
990self,
991 )?
992} else {
993// Preserve wide place metadata, do not call `offset`.
994whole_local
995 }
996 }
997 Place::Ptr(mplace) => mplace,
998 };
999// Return with the original layout and align, so that the caller can go on
1000interp_ok(MPlaceTy { mplace, layout: place.layout })
1001 }
10021003pub fn allocate_dyn(
1004&mut self,
1005 layout: TyAndLayout<'tcx>,
1006 kind: MemoryKind<M::MemoryKind>,
1007 meta: MemPlaceMeta<M::Provenance>,
1008 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1009let Some((size, align)) = self.size_and_align_from_meta(&meta, &layout)? else {
1010::rustc_middle::util::bug::span_bug_fmt(self.cur_span(),
format_args!("cannot allocate space for `extern` type, size is not known"))span_bug!(self.cur_span(), "cannot allocate space for `extern` type, size is not known")1011 };
1012let ptr = self.allocate_ptr(size, align, kind, AllocInit::Uninit)?;
1013interp_ok(self.ptr_with_meta_to_mplace(ptr.into(), meta, layout, /*unaligned*/ false))
1014 }
10151016pub fn allocate(
1017&mut self,
1018 layout: TyAndLayout<'tcx>,
1019 kind: MemoryKind<M::MemoryKind>,
1020 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1021if !layout.is_sized() {
::core::panicking::panic("assertion failed: layout.is_sized()")
};assert!(layout.is_sized());
1022self.allocate_dyn(layout, kind, MemPlaceMeta::None)
1023 }
10241025/// Allocates a sequence of bytes in the interpreter's memory with alignment 1.
1026 /// This is allocated in immutable global memory and deduplicated.
1027pub fn allocate_bytes_dedup(
1028&mut self,
1029 bytes: &[u8],
1030 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
1031let salt = M::get_global_alloc_salt(self, None);
1032let id = self.tcx.allocate_bytes_dedup(bytes, salt);
10331034// Turn untagged "global" pointers (obtained via `tcx`) into the machine pointer to the allocation.
1035M::adjust_alloc_root_pointer(
1036&self,
1037Pointer::from(id),
1038 M::GLOBAL_KIND.map(MemoryKind::Machine),
1039 )
1040 }
10411042/// Allocates a string in the interpreter's memory, returning it as a (wide) place.
1043 /// This is allocated in immutable global memory and deduplicated.
1044pub fn allocate_str_dedup(
1045&mut self,
1046 s: &str,
1047 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1048let bytes = s.as_bytes();
1049let ptr = self.allocate_bytes_dedup(bytes)?;
10501051// Create length metadata for the string.
1052let meta = Scalar::from_target_usize(u64::try_from(bytes.len()).unwrap(), self);
10531054// Get layout for Rust's str type.
1055let layout = self.layout_of(self.tcx.types.str_).unwrap();
10561057// Combine pointer and metadata into a wide pointer.
1058interp_ok(self.ptr_with_meta_to_mplace(
1059ptr.into(),
1060 MemPlaceMeta::Meta(meta),
1061layout,
1062/*unaligned*/ false,
1063 ))
1064 }
10651066pub fn raw_const_to_mplace(
1067&self,
1068 raw: mir::ConstAlloc<'tcx>,
1069 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1070// This must be an allocation in `tcx`
1071let _ = self.tcx.global_alloc(raw.alloc_id);
1072let ptr = self.global_root_pointer(Pointer::from(raw.alloc_id))?;
1073let layout = self.layout_of(raw.ty)?;
1074interp_ok(self.ptr_to_mplace(ptr.into(), layout))
1075 }
1076}
10771078// Some nodes are used a lot. Make sure they don't unintentionally get bigger.
1079#[cfg(target_pointer_width = "64")]
1080mod size_asserts {
1081use rustc_data_structures::static_assert_size;
10821083use super::*;
1084// tidy-alphabetical-start
1085const _: [(); 64] = [(); ::std::mem::size_of::<MPlaceTy<'_>>()];static_assert_size!(MPlaceTy<'_>, 64);
1086const _: [(); 48] = [(); ::std::mem::size_of::<MemPlace>()];static_assert_size!(MemPlace, 48);
1087const _: [(); 24] = [(); ::std::mem::size_of::<MemPlaceMeta>()];static_assert_size!(MemPlaceMeta, 24);
1088const _: [(); 48] = [(); ::std::mem::size_of::<Place>()];static_assert_size!(Place, 48);
1089const _: [(); 64] = [(); ::std::mem::size_of::<PlaceTy<'_>>()];static_assert_size!(PlaceTy<'_>, 64);
1090// tidy-alphabetical-end
1091}