1use std::assert_matches::assert_matches;
6
7use either::{Either, Left, Right};
8use rustc_abi::{BackendRepr, HasDataLayout, Size};
9use rustc_middle::ty::Ty;
10use rustc_middle::ty::layout::TyAndLayout;
11use rustc_middle::{bug, mir, span_bug};
12use tracing::field::Empty;
13use tracing::{instrument, trace};
14
15use super::{
16 AllocInit, AllocRef, AllocRefMut, CheckAlignMsg, CtfeProvenance, ImmTy, Immediate, InterpCx,
17 InterpResult, Machine, MemoryKind, Misalignment, OffsetMode, OpTy, Operand, Pointer,
18 Projectable, Provenance, Scalar, alloc_range, interp_ok, mir_assign_valid_types,
19};
20use crate::enter_trace_span;
21
22#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
23pub enum MemPlaceMeta<Prov: Provenance = CtfeProvenance> {
25 Meta(Scalar<Prov>),
27 None,
29}
30
31impl<Prov: Provenance> MemPlaceMeta<Prov> {
32 #[cfg_attr(debug_assertions, track_caller)] pub fn unwrap_meta(self) -> Scalar<Prov> {
34 match self {
35 Self::Meta(s) => s,
36 Self::None => {
37 bug!("expected wide pointer extra data (e.g. slice length or trait object vtable)")
38 }
39 }
40 }
41
42 #[inline(always)]
43 pub fn has_meta(self) -> bool {
44 match self {
45 Self::Meta(_) => true,
46 Self::None => false,
47 }
48 }
49}
50
51#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
52pub(super) struct MemPlace<Prov: Provenance = CtfeProvenance> {
53 pub ptr: Pointer<Option<Prov>>,
55 pub meta: MemPlaceMeta<Prov>,
59 misaligned: Option<Misalignment>,
61}
62
63impl<Prov: Provenance> MemPlace<Prov> {
64 fn map_provenance(self, f: impl FnOnce(Prov) -> Prov) -> Self {
66 MemPlace { ptr: self.ptr.map_provenance(|p| p.map(f)), ..self }
67 }
68
69 #[inline]
71 fn to_ref(self, cx: &impl HasDataLayout) -> Immediate<Prov> {
72 Immediate::new_pointer_with_meta(self.ptr, self.meta, cx)
73 }
74
75 #[inline]
76 fn offset_with_meta_<'tcx, M: Machine<'tcx, Provenance = Prov>>(
78 self,
79 offset: Size,
80 mode: OffsetMode,
81 meta: MemPlaceMeta<Prov>,
82 ecx: &InterpCx<'tcx, M>,
83 ) -> InterpResult<'tcx, Self> {
84 debug_assert!(
85 !meta.has_meta() || self.meta.has_meta(),
86 "cannot use `offset_with_meta` to add metadata to a place"
87 );
88 let ptr = match mode {
89 OffsetMode::Inbounds => {
90 ecx.ptr_offset_inbounds(self.ptr, offset.bytes().try_into().unwrap())?
91 }
92 OffsetMode::Wrapping => self.ptr.wrapping_offset(offset, ecx),
93 };
94 interp_ok(MemPlace { ptr, meta, misaligned: self.misaligned })
95 }
96}
97
98#[derive(Clone, Hash, Eq, PartialEq)]
100pub struct MPlaceTy<'tcx, Prov: Provenance = CtfeProvenance> {
101 mplace: MemPlace<Prov>,
102 pub layout: TyAndLayout<'tcx>,
103}
104
105impl<Prov: Provenance> std::fmt::Debug for MPlaceTy<'_, Prov> {
106 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
107 f.debug_struct("MPlaceTy")
109 .field("mplace", &self.mplace)
110 .field("ty", &format_args!("{}", self.layout.ty))
111 .finish()
112 }
113}
114
115impl<'tcx, Prov: Provenance> MPlaceTy<'tcx, Prov> {
116 #[inline]
120 pub fn fake_alloc_zst(layout: TyAndLayout<'tcx>) -> Self {
121 assert!(layout.is_zst());
122 let align = layout.align.abi;
123 let ptr = Pointer::without_provenance(align.bytes()); MPlaceTy { mplace: MemPlace { ptr, meta: MemPlaceMeta::None, misaligned: None }, layout }
125 }
126
127 pub fn map_provenance(self, f: impl FnOnce(Prov) -> Prov) -> Self {
129 MPlaceTy { mplace: self.mplace.map_provenance(f), ..self }
130 }
131
132 #[inline(always)]
133 pub(super) fn mplace(&self) -> &MemPlace<Prov> {
134 &self.mplace
135 }
136
137 #[inline(always)]
138 pub fn ptr(&self) -> Pointer<Option<Prov>> {
139 self.mplace.ptr
140 }
141
142 #[inline(always)]
143 pub fn to_ref(&self, cx: &impl HasDataLayout) -> Immediate<Prov> {
144 self.mplace.to_ref(cx)
145 }
146}
147
148impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
149 #[inline(always)]
150 fn layout(&self) -> TyAndLayout<'tcx> {
151 self.layout
152 }
153
154 #[inline(always)]
155 fn meta(&self) -> MemPlaceMeta<Prov> {
156 self.mplace.meta
157 }
158
159 fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
160 &self,
161 offset: Size,
162 mode: OffsetMode,
163 meta: MemPlaceMeta<Prov>,
164 layout: TyAndLayout<'tcx>,
165 ecx: &InterpCx<'tcx, M>,
166 ) -> InterpResult<'tcx, Self> {
167 interp_ok(MPlaceTy {
168 mplace: self.mplace.offset_with_meta_(offset, mode, meta, ecx)?,
169 layout,
170 })
171 }
172
173 #[inline(always)]
174 fn to_op<M: Machine<'tcx, Provenance = Prov>>(
175 &self,
176 _ecx: &InterpCx<'tcx, M>,
177 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
178 interp_ok(self.clone().into())
179 }
180}
181
182#[derive(Copy, Clone, Debug)]
183pub(super) enum Place<Prov: Provenance = CtfeProvenance> {
184 Ptr(MemPlace<Prov>),
186
187 Local { local: mir::Local, offset: Option<Size>, locals_addr: usize },
199}
200
201#[derive(Clone)]
208pub struct PlaceTy<'tcx, Prov: Provenance = CtfeProvenance> {
209 place: Place<Prov>, pub layout: TyAndLayout<'tcx>,
211}
212
213impl<Prov: Provenance> std::fmt::Debug for PlaceTy<'_, Prov> {
214 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
215 f.debug_struct("PlaceTy")
217 .field("place", &self.place)
218 .field("ty", &format_args!("{}", self.layout.ty))
219 .finish()
220 }
221}
222
223impl<'tcx, Prov: Provenance> From<MPlaceTy<'tcx, Prov>> for PlaceTy<'tcx, Prov> {
224 #[inline(always)]
225 fn from(mplace: MPlaceTy<'tcx, Prov>) -> Self {
226 PlaceTy { place: Place::Ptr(mplace.mplace), layout: mplace.layout }
227 }
228}
229
230impl<'tcx, Prov: Provenance> PlaceTy<'tcx, Prov> {
231 #[inline(always)]
232 pub(super) fn place(&self) -> &Place<Prov> {
233 &self.place
234 }
235
236 #[inline(always)]
244 pub fn as_mplace_or_local(
245 &self,
246 ) -> Either<MPlaceTy<'tcx, Prov>, (mir::Local, Option<Size>, usize, TyAndLayout<'tcx>)> {
247 match self.place {
248 Place::Ptr(mplace) => Left(MPlaceTy { mplace, layout: self.layout }),
249 Place::Local { local, offset, locals_addr } => {
250 Right((local, offset, locals_addr, self.layout))
251 }
252 }
253 }
254
255 #[inline(always)]
256 #[cfg_attr(debug_assertions, track_caller)] pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
258 self.as_mplace_or_local().left().unwrap_or_else(|| {
259 bug!(
260 "PlaceTy of type {} was a local when it was expected to be an MPlace",
261 self.layout.ty
262 )
263 })
264 }
265}
266
267impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for PlaceTy<'tcx, Prov> {
268 #[inline(always)]
269 fn layout(&self) -> TyAndLayout<'tcx> {
270 self.layout
271 }
272
273 #[inline]
274 fn meta(&self) -> MemPlaceMeta<Prov> {
275 match self.as_mplace_or_local() {
276 Left(mplace) => mplace.meta(),
277 Right(_) => {
278 debug_assert!(self.layout.is_sized(), "unsized locals should live in memory");
279 MemPlaceMeta::None
280 }
281 }
282 }
283
284 fn offset_with_meta<M: Machine<'tcx, Provenance = Prov>>(
285 &self,
286 offset: Size,
287 mode: OffsetMode,
288 meta: MemPlaceMeta<Prov>,
289 layout: TyAndLayout<'tcx>,
290 ecx: &InterpCx<'tcx, M>,
291 ) -> InterpResult<'tcx, Self> {
292 interp_ok(match self.as_mplace_or_local() {
293 Left(mplace) => mplace.offset_with_meta(offset, mode, meta, layout, ecx)?.into(),
294 Right((local, old_offset, locals_addr, _)) => {
295 debug_assert!(layout.is_sized(), "unsized locals should live in memory");
296 assert_matches!(meta, MemPlaceMeta::None); assert!(offset + layout.size <= self.layout.size);
301
302 let new_offset = old_offset.unwrap_or(Size::ZERO) + offset;
304
305 PlaceTy {
306 place: Place::Local { local, offset: Some(new_offset), locals_addr },
307 layout,
308 }
309 }
310 })
311 }
312
313 #[inline(always)]
314 fn to_op<M: Machine<'tcx, Provenance = Prov>>(
315 &self,
316 ecx: &InterpCx<'tcx, M>,
317 ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
318 ecx.place_to_op(self)
319 }
320}
321
322impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
324 #[inline(always)]
325 pub fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>> {
326 match self.op() {
327 Operand::Indirect(mplace) => Left(MPlaceTy { mplace: *mplace, layout: self.layout }),
328 Operand::Immediate(imm) => Right(ImmTy::from_immediate(*imm, self.layout)),
329 }
330 }
331
332 #[inline(always)]
333 #[cfg_attr(debug_assertions, track_caller)] pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
335 self.as_mplace_or_imm().left().unwrap_or_else(|| {
336 bug!(
337 "OpTy of type {} was immediate when it was expected to be an MPlace",
338 self.layout.ty
339 )
340 })
341 }
342}
343
344pub trait Writeable<'tcx, Prov: Provenance>: Projectable<'tcx, Prov> {
346 fn to_place(&self) -> PlaceTy<'tcx, Prov>;
347
348 fn force_mplace<M: Machine<'tcx, Provenance = Prov>>(
349 &self,
350 ecx: &mut InterpCx<'tcx, M>,
351 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Prov>>;
352}
353
354impl<'tcx, Prov: Provenance> Writeable<'tcx, Prov> for PlaceTy<'tcx, Prov> {
355 #[inline(always)]
356 fn to_place(&self) -> PlaceTy<'tcx, Prov> {
357 self.clone()
358 }
359
360 #[inline(always)]
361 fn force_mplace<M: Machine<'tcx, Provenance = Prov>>(
362 &self,
363 ecx: &mut InterpCx<'tcx, M>,
364 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Prov>> {
365 ecx.force_allocation(self)
366 }
367}
368
369impl<'tcx, Prov: Provenance> Writeable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
370 #[inline(always)]
371 fn to_place(&self) -> PlaceTy<'tcx, Prov> {
372 self.clone().into()
373 }
374
375 #[inline(always)]
376 fn force_mplace<M: Machine<'tcx, Provenance = Prov>>(
377 &self,
378 _ecx: &mut InterpCx<'tcx, M>,
379 ) -> InterpResult<'tcx, MPlaceTy<'tcx, Prov>> {
380 interp_ok(self.clone())
381 }
382}
383
384impl<'tcx, Prov, M> InterpCx<'tcx, M>
386where
387 Prov: Provenance,
388 M: Machine<'tcx, Provenance = Prov>,
389{
390 fn ptr_with_meta_to_mplace(
391 &self,
392 ptr: Pointer<Option<M::Provenance>>,
393 meta: MemPlaceMeta<M::Provenance>,
394 layout: TyAndLayout<'tcx>,
395 unaligned: bool,
396 ) -> MPlaceTy<'tcx, M::Provenance> {
397 let misaligned =
398 if unaligned { None } else { self.is_ptr_misaligned(ptr, layout.align.abi) };
399 MPlaceTy { mplace: MemPlace { ptr, meta, misaligned }, layout }
400 }
401
402 pub fn ptr_to_mplace(
403 &self,
404 ptr: Pointer<Option<M::Provenance>>,
405 layout: TyAndLayout<'tcx>,
406 ) -> MPlaceTy<'tcx, M::Provenance> {
407 assert!(layout.is_sized());
408 self.ptr_with_meta_to_mplace(ptr, MemPlaceMeta::None, layout, false)
409 }
410
411 pub fn ptr_to_mplace_unaligned(
412 &self,
413 ptr: Pointer<Option<M::Provenance>>,
414 layout: TyAndLayout<'tcx>,
415 ) -> MPlaceTy<'tcx, M::Provenance> {
416 assert!(layout.is_sized());
417 self.ptr_with_meta_to_mplace(ptr, MemPlaceMeta::None, layout, true)
418 }
419
420 pub fn ref_to_mplace(
427 &self,
428 val: &ImmTy<'tcx, M::Provenance>,
429 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
430 let pointee_type =
431 val.layout.ty.builtin_deref(true).expect("`ref_to_mplace` called on non-ptr type");
432 let layout = self.layout_of(pointee_type)?;
433 let (ptr, meta) = val.to_scalar_and_meta();
434
435 let ptr = ptr.to_pointer(self)?;
438 interp_ok(self.ptr_with_meta_to_mplace(ptr, meta, layout, false))
439 }
440
441 pub fn mplace_to_ref(
445 &self,
446 mplace: &MPlaceTy<'tcx, M::Provenance>,
447 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
448 let imm = mplace.mplace.to_ref(self);
449 let layout = self.layout_of(Ty::new_mut_ptr(self.tcx.tcx, mplace.layout.ty))?;
450 interp_ok(ImmTy::from_immediate(imm, layout))
451 }
452
453 #[instrument(skip(self), level = "trace")]
456 pub fn deref_pointer(
457 &self,
458 src: &impl Projectable<'tcx, M::Provenance>,
459 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
460 if src.layout().ty.is_box() {
461 bug!("dereferencing {}", src.layout().ty);
465 }
466
467 let val = self.read_immediate(src)?;
468 trace!("deref to {} on {:?}", val.layout.ty, *val);
469
470 let mplace = self.ref_to_mplace(&val)?;
471 interp_ok(mplace)
472 }
473
474 #[inline]
475 pub(super) fn get_place_alloc(
476 &self,
477 mplace: &MPlaceTy<'tcx, M::Provenance>,
478 ) -> InterpResult<'tcx, Option<AllocRef<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
479 {
480 let (size, _align) = self
481 .size_and_align_of_val(mplace)?
482 .unwrap_or((mplace.layout.size, mplace.layout.align.abi));
483 let a = self.get_ptr_alloc(mplace.ptr(), size)?;
486 self.check_misalign(mplace.mplace.misaligned, CheckAlignMsg::BasedOn)?;
487 interp_ok(a)
488 }
489
490 #[inline]
491 pub(super) fn get_place_alloc_mut(
492 &mut self,
493 mplace: &MPlaceTy<'tcx, M::Provenance>,
494 ) -> InterpResult<'tcx, Option<AllocRefMut<'_, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
495 {
496 let (size, _align) = self
497 .size_and_align_of_val(mplace)?
498 .unwrap_or((mplace.layout.size, mplace.layout.align.abi));
499 let misalign_res = self.check_misalign(mplace.mplace.misaligned, CheckAlignMsg::BasedOn);
503 let (a, ()) = self.get_ptr_alloc_mut(mplace.ptr(), size).and(misalign_res)?;
505 interp_ok(a)
506 }
507
508 pub fn local_to_place(
510 &self,
511 local: mir::Local,
512 ) -> InterpResult<'tcx, PlaceTy<'tcx, M::Provenance>> {
513 let frame = self.frame();
514 let layout = self.layout_of_local(frame, local, None)?;
515 let place = if layout.is_sized() {
516 Place::Local { local, offset: None, locals_addr: frame.locals_addr() }
518 } else {
519 match frame.locals[local].access()? {
521 Operand::Immediate(_) => bug!(),
522 Operand::Indirect(mplace) => Place::Ptr(*mplace),
523 }
524 };
525 interp_ok(PlaceTy { place, layout })
526 }
527
528 #[instrument(skip(self), level = "trace")]
531 pub fn eval_place(
532 &self,
533 mir_place: mir::Place<'tcx>,
534 ) -> InterpResult<'tcx, PlaceTy<'tcx, M::Provenance>> {
535 let _trace =
536 enter_trace_span!(M, step::eval_place, ?mir_place, tracing_separate_thread = Empty);
537
538 let mut place = self.local_to_place(mir_place.local)?;
539 for elem in mir_place.projection.iter() {
541 place = self.project(&place, elem)?
542 }
543
544 trace!("{:?}", self.dump_place(&place));
545 if cfg!(debug_assertions) {
547 let normalized_place_ty = self
548 .instantiate_from_current_frame_and_normalize_erasing_regions(
549 mir_place.ty(&self.frame().body.local_decls, *self.tcx).ty,
550 )?;
551 if !mir_assign_valid_types(
552 *self.tcx,
553 self.typing_env,
554 self.layout_of(normalized_place_ty)?,
555 place.layout,
556 ) {
557 span_bug!(
558 self.cur_span(),
559 "eval_place of a MIR place with type {} produced an interpreter place with type {}",
560 normalized_place_ty,
561 place.layout.ty,
562 )
563 }
564 }
565 interp_ok(place)
566 }
567
568 #[inline(always)]
571 fn as_mplace_or_mutable_local(
572 &mut self,
573 place: &PlaceTy<'tcx, M::Provenance>,
574 ) -> InterpResult<
575 'tcx,
576 Either<
577 MPlaceTy<'tcx, M::Provenance>,
578 (&mut Immediate<M::Provenance>, TyAndLayout<'tcx>, mir::Local),
579 >,
580 > {
581 interp_ok(match place.to_place().as_mplace_or_local() {
582 Left(mplace) => Left(mplace),
583 Right((local, offset, locals_addr, layout)) => {
584 if offset.is_some() {
585 Left(place.force_mplace(self)?)
588 } else {
589 debug_assert_eq!(locals_addr, self.frame().locals_addr());
590 debug_assert_eq!(self.layout_of_local(self.frame(), local, None)?, layout);
591 match self.frame_mut().locals[local].access_mut()? {
592 Operand::Indirect(mplace) => {
593 Left(MPlaceTy { mplace: *mplace, layout })
595 }
596 Operand::Immediate(local_val) => {
597 Right((local_val, layout, local))
599 }
600 }
601 }
602 }
603 })
604 }
605
606 #[inline(always)]
608 #[instrument(skip(self), level = "trace")]
609 pub fn write_immediate(
610 &mut self,
611 src: Immediate<M::Provenance>,
612 dest: &impl Writeable<'tcx, M::Provenance>,
613 ) -> InterpResult<'tcx> {
614 self.write_immediate_no_validate(src, dest)?;
615
616 if M::enforce_validity(self, dest.layout()) {
617 self.validate_operand(
620 &dest.to_place(),
621 M::enforce_validity_recursively(self, dest.layout()),
622 true,
623 )?;
624 }
625
626 interp_ok(())
627 }
628
629 #[inline(always)]
631 pub fn write_scalar(
632 &mut self,
633 val: impl Into<Scalar<M::Provenance>>,
634 dest: &impl Writeable<'tcx, M::Provenance>,
635 ) -> InterpResult<'tcx> {
636 self.write_immediate(Immediate::Scalar(val.into()), dest)
637 }
638
639 #[inline(always)]
641 pub fn write_pointer(
642 &mut self,
643 ptr: impl Into<Pointer<Option<M::Provenance>>>,
644 dest: &impl Writeable<'tcx, M::Provenance>,
645 ) -> InterpResult<'tcx> {
646 self.write_scalar(Scalar::from_maybe_pointer(ptr.into(), self), dest)
647 }
648
649 pub(super) fn write_immediate_no_validate(
653 &mut self,
654 src: Immediate<M::Provenance>,
655 dest: &impl Writeable<'tcx, M::Provenance>,
656 ) -> InterpResult<'tcx> {
657 assert!(dest.layout().is_sized(), "Cannot write unsized immediate data");
658
659 match self.as_mplace_or_mutable_local(&dest.to_place())? {
660 Right((local_val, local_layout, local)) => {
661 *local_val = src;
663 if !self.validation_in_progress() {
665 M::after_local_write(self, local, false)?;
666 }
667 if cfg!(debug_assertions) {
671 src.assert_matches_abi(
672 local_layout.backend_repr,
673 "invalid immediate for given destination place",
674 self,
675 );
676 }
677 }
678 Left(mplace) => {
679 self.write_immediate_to_mplace_no_validate(src, mplace.layout, mplace.mplace)?;
680 }
681 }
682 interp_ok(())
683 }
684
685 fn write_immediate_to_mplace_no_validate(
689 &mut self,
690 value: Immediate<M::Provenance>,
691 layout: TyAndLayout<'tcx>,
692 dest: MemPlace<M::Provenance>,
693 ) -> InterpResult<'tcx> {
694 value.assert_matches_abi(
697 layout.backend_repr,
698 "invalid immediate for given destination place",
699 self,
700 );
701 let tcx = *self.tcx;
707 let will_later_validate = M::enforce_validity(self, layout);
708 let Some(mut alloc) = self.get_place_alloc_mut(&MPlaceTy { mplace: dest, layout })? else {
709 return interp_ok(());
711 };
712
713 match value {
714 Immediate::Scalar(scalar) => {
715 alloc.write_scalar(alloc_range(Size::ZERO, scalar.size()), scalar)?;
716 }
717 Immediate::ScalarPair(a_val, b_val) => {
718 let BackendRepr::ScalarPair(_a, b) = layout.backend_repr else {
719 span_bug!(
720 self.cur_span(),
721 "write_immediate_to_mplace: invalid ScalarPair layout: {:#?}",
722 layout
723 )
724 };
725 let a_size = a_val.size();
726 let b_offset = a_size.align_to(b.align(&tcx).abi);
727 assert!(b_offset.bytes() > 0); if !will_later_validate && a_size + b_val.size() != layout.size {
738 alloc.write_uninit_full();
739 }
740
741 alloc.write_scalar(alloc_range(Size::ZERO, a_size), a_val)?;
742 alloc.write_scalar(alloc_range(b_offset, b_val.size()), b_val)?;
743 }
744 Immediate::Uninit => alloc.write_uninit_full(),
745 }
746 interp_ok(())
747 }
748
749 pub fn write_uninit(
750 &mut self,
751 dest: &impl Writeable<'tcx, M::Provenance>,
752 ) -> InterpResult<'tcx> {
753 match self.as_mplace_or_mutable_local(&dest.to_place())? {
754 Right((local_val, _local_layout, local)) => {
755 *local_val = Immediate::Uninit;
756 if !self.validation_in_progress() {
758 M::after_local_write(self, local, false)?;
759 }
760 }
761 Left(mplace) => {
762 let Some(mut alloc) = self.get_place_alloc_mut(&mplace)? else {
763 return interp_ok(());
765 };
766 alloc.write_uninit_full();
767 }
768 }
769 interp_ok(())
770 }
771
772 pub fn clear_provenance(
774 &mut self,
775 dest: &impl Writeable<'tcx, M::Provenance>,
776 ) -> InterpResult<'tcx> {
777 if let Right(imm) = dest.to_op(self)?.as_mplace_or_imm() {
780 if !imm.has_provenance() {
781 return interp_ok(());
782 }
783 }
784 match self.as_mplace_or_mutable_local(&dest.to_place())? {
785 Right((local_val, _local_layout, local)) => {
786 local_val.clear_provenance()?;
787 if !self.validation_in_progress() {
789 M::after_local_write(self, local, false)?;
790 }
791 }
792 Left(mplace) => {
793 let Some(mut alloc) = self.get_place_alloc_mut(&mplace)? else {
794 return interp_ok(());
796 };
797 alloc.clear_provenance();
798 }
799 }
800 interp_ok(())
801 }
802
803 #[inline(always)]
806 pub fn copy_op_allow_transmute(
807 &mut self,
808 src: &impl Projectable<'tcx, M::Provenance>,
809 dest: &impl Writeable<'tcx, M::Provenance>,
810 ) -> InterpResult<'tcx> {
811 self.copy_op_inner(src, dest, true)
812 }
813
814 #[inline(always)]
817 pub fn copy_op(
818 &mut self,
819 src: &impl Projectable<'tcx, M::Provenance>,
820 dest: &impl Writeable<'tcx, M::Provenance>,
821 ) -> InterpResult<'tcx> {
822 self.copy_op_inner(src, dest, false)
823 }
824
825 #[inline(always)]
828 #[instrument(skip(self), level = "trace")]
829 fn copy_op_inner(
830 &mut self,
831 src: &impl Projectable<'tcx, M::Provenance>,
832 dest: &impl Writeable<'tcx, M::Provenance>,
833 allow_transmute: bool,
834 ) -> InterpResult<'tcx> {
835 self.copy_op_no_validate(src, dest, allow_transmute)?;
842
843 if M::enforce_validity(self, dest.layout()) {
844 let dest = dest.to_place();
845 if src.layout().ty != dest.layout().ty {
849 self.validate_operand(
850 &dest.transmute(src.layout(), self)?,
851 M::enforce_validity_recursively(self, src.layout()),
852 true,
853 )?;
854 }
855 self.validate_operand(
856 &dest,
857 M::enforce_validity_recursively(self, dest.layout()),
858 true,
859 )?;
860 }
861
862 interp_ok(())
863 }
864
865 #[instrument(skip(self), level = "trace")]
870 pub(super) fn copy_op_no_validate(
871 &mut self,
872 src: &impl Projectable<'tcx, M::Provenance>,
873 dest: &impl Writeable<'tcx, M::Provenance>,
874 allow_transmute: bool,
875 ) -> InterpResult<'tcx> {
876 let layout_compat =
879 mir_assign_valid_types(*self.tcx, self.typing_env, src.layout(), dest.layout());
880 if !allow_transmute && !layout_compat {
881 span_bug!(
882 self.cur_span(),
883 "type mismatch when copying!\nsrc: {},\ndest: {}",
884 src.layout().ty,
885 dest.layout().ty,
886 );
887 }
888
889 let src = match self.read_immediate_raw(src)? {
892 Right(src_val) => {
893 assert!(!src.layout().is_unsized());
894 assert!(!dest.layout().is_unsized());
895 assert_eq!(src.layout().size, dest.layout().size);
896 return if layout_compat {
898 self.write_immediate_no_validate(*src_val, dest)
899 } else {
900 let dest_mem = dest.force_mplace(self)?;
905 self.write_immediate_to_mplace_no_validate(
906 *src_val,
907 src.layout(),
908 dest_mem.mplace,
909 )
910 };
911 }
912 Left(mplace) => mplace,
913 };
914 trace!("copy_op: {:?} <- {:?}: {}", *dest, src, dest.layout().ty);
916
917 let dest = dest.force_mplace(self)?;
918 let Some((dest_size, _)) = self.size_and_align_of_val(&dest)? else {
919 span_bug!(self.cur_span(), "copy_op needs (dynamically) sized values")
920 };
921 if cfg!(debug_assertions) {
922 let src_size = self.size_and_align_of_val(&src)?.unwrap().0;
923 assert_eq!(src_size, dest_size, "Cannot copy differently-sized data");
924 } else {
925 assert_eq!(src.layout.size, dest.layout.size);
927 }
928
929 self.mem_copy(src.ptr(), dest.ptr(), dest_size, true)?;
937 self.check_misalign(src.mplace.misaligned, CheckAlignMsg::BasedOn)?;
938 self.check_misalign(dest.mplace.misaligned, CheckAlignMsg::BasedOn)?;
939 interp_ok(())
940 }
941
942 #[instrument(skip(self), level = "trace")]
947 pub fn force_allocation(
948 &mut self,
949 place: &PlaceTy<'tcx, M::Provenance>,
950 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
951 let mplace = match place.place {
952 Place::Local { local, offset, locals_addr } => {
953 debug_assert_eq!(locals_addr, self.frame().locals_addr());
954 let whole_local = match self.frame_mut().locals[local].access_mut()? {
955 &mut Operand::Immediate(local_val) => {
956 let local_layout = self.layout_of_local(&self.frame(), local, None)?;
962 assert!(local_layout.is_sized(), "unsized locals cannot be immediate");
963 let mplace = self.allocate(local_layout, MemoryKind::Stack)?;
964 if !matches!(local_val, Immediate::Uninit) {
966 self.write_immediate_to_mplace_no_validate(
970 local_val,
971 local_layout,
972 mplace.mplace,
973 )?;
974 }
975 M::after_local_moved_to_memory(self, local, &mplace)?;
976 *self.frame_mut().locals[local].access_mut().unwrap() =
980 Operand::Indirect(mplace.mplace);
981 mplace.mplace
982 }
983 &mut Operand::Indirect(mplace) => mplace, };
985 if let Some(offset) = offset {
986 whole_local.offset_with_meta_(
988 offset,
989 OffsetMode::Wrapping,
990 MemPlaceMeta::None,
991 self,
992 )?
993 } else {
994 whole_local
996 }
997 }
998 Place::Ptr(mplace) => mplace,
999 };
1000 interp_ok(MPlaceTy { mplace, layout: place.layout })
1002 }
1003
1004 pub fn allocate_dyn(
1005 &mut self,
1006 layout: TyAndLayout<'tcx>,
1007 kind: MemoryKind<M::MemoryKind>,
1008 meta: MemPlaceMeta<M::Provenance>,
1009 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1010 let Some((size, align)) = self.size_and_align_from_meta(&meta, &layout)? else {
1011 span_bug!(self.cur_span(), "cannot allocate space for `extern` type, size is not known")
1012 };
1013 let ptr = self.allocate_ptr(size, align, kind, AllocInit::Uninit)?;
1014 interp_ok(self.ptr_with_meta_to_mplace(ptr.into(), meta, layout, false))
1015 }
1016
1017 pub fn allocate(
1018 &mut self,
1019 layout: TyAndLayout<'tcx>,
1020 kind: MemoryKind<M::MemoryKind>,
1021 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1022 assert!(layout.is_sized());
1023 self.allocate_dyn(layout, kind, MemPlaceMeta::None)
1024 }
1025
1026 pub fn allocate_bytes_dedup(
1029 &mut self,
1030 bytes: &[u8],
1031 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
1032 let salt = M::get_global_alloc_salt(self, None);
1033 let id = self.tcx.allocate_bytes_dedup(bytes, salt);
1034
1035 M::adjust_alloc_root_pointer(
1037 &self,
1038 Pointer::from(id),
1039 M::GLOBAL_KIND.map(MemoryKind::Machine),
1040 )
1041 }
1042
1043 pub fn allocate_str_dedup(
1046 &mut self,
1047 s: &str,
1048 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1049 let bytes = s.as_bytes();
1050 let ptr = self.allocate_bytes_dedup(bytes)?;
1051
1052 let meta = Scalar::from_target_usize(u64::try_from(bytes.len()).unwrap(), self);
1054
1055 let layout = self.layout_of(self.tcx.types.str_).unwrap();
1057
1058 interp_ok(self.ptr_with_meta_to_mplace(
1060 ptr.into(),
1061 MemPlaceMeta::Meta(meta),
1062 layout,
1063 false,
1064 ))
1065 }
1066
1067 pub fn raw_const_to_mplace(
1068 &self,
1069 raw: mir::ConstAlloc<'tcx>,
1070 ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
1071 let _ = self.tcx.global_alloc(raw.alloc_id);
1073 let ptr = self.global_root_pointer(Pointer::from(raw.alloc_id))?;
1074 let layout = self.layout_of(raw.ty)?;
1075 interp_ok(self.ptr_to_mplace(ptr.into(), layout))
1076 }
1077}
1078
1079#[cfg(target_pointer_width = "64")]
1081mod size_asserts {
1082 use rustc_data_structures::static_assert_size;
1083
1084 use super::*;
1085 static_assert_size!(MPlaceTy<'_>, 64);
1087 static_assert_size!(MemPlace, 48);
1088 static_assert_size!(MemPlaceMeta, 24);
1089 static_assert_size!(Place, 48);
1090 static_assert_size!(PlaceTy<'_>, 64);
1091 }